deps: update V8 to 7.1.302.28

PR-URL: https://github.com/nodejs/node/pull/23423
Reviewed-By: Colin Ihrig <cjihrig@gmail.com>
Reviewed-By: Gus Caplan <me@gus.host>
Reviewed-By: Myles Borins <myles.borins@gmail.com>
This commit is contained in:
Michaël Zasso 2018-12-04 08:20:37 +01:00
parent b8fbe69db1
commit 9b4bf7de6c
No known key found for this signature in database
GPG Key ID: 770F7A9A5AE15600
1526 changed files with 78560 additions and 41829 deletions

20
deps/v8/.clang-tidy vendored Normal file
View File

@ -0,0 +1,20 @@
---
---
Checks: '-*,
modernize-redundant-void-arg,
modernize-replace-random-shuffle,
modernize-shrink-to-fit,
modernize-use-auto,
modernize-use-bool-literals,
modernize-use-equals-default,
modernize-use-equals-delete,
modernize-use-nullptr,
modernize-use-override,
google-build-explicit-make-pair,
google-explicit-constructor,
google-readability-casting'
WarningsAsErrors: ''
HeaderFilterRegex: ''
AnalyzeTemporaryDtors: false
...

View File

@ -3,3 +3,5 @@
# Do not modify line endings for binary files (which are sometimes auto # Do not modify line endings for binary files (which are sometimes auto
# detected as text files by git). # detected as text files by git).
*.png binary *.png binary
# Don't include minified JS in git grep/diff output
test/mjsunit/asm/sqlite3/*.js -diff

1
deps/v8/.gitignore vendored
View File

@ -73,7 +73,6 @@
/tools/clang /tools/clang
/tools/gcmole/gcmole-tools /tools/gcmole/gcmole-tools
/tools/gcmole/gcmole-tools.tar.gz /tools/gcmole/gcmole-tools.tar.gz
/tools/gyp
/tools/jsfunfuzz/jsfunfuzz /tools/jsfunfuzz/jsfunfuzz
/tools/jsfunfuzz/jsfunfuzz.tar.gz /tools/jsfunfuzz/jsfunfuzz.tar.gz
/tools/luci-go /tools/luci-go

2
deps/v8/AUTHORS vendored
View File

@ -132,6 +132,7 @@ Peter Rybin <peter.rybin@gmail.com>
Peter Varga <pvarga@inf.u-szeged.hu> Peter Varga <pvarga@inf.u-szeged.hu>
Peter Wong <peter.wm.wong@gmail.com> Peter Wong <peter.wm.wong@gmail.com>
Paul Lind <plind44@gmail.com> Paul Lind <plind44@gmail.com>
PhistucK <phistuck@gmail.com>
Qingyan Li <qingyan.liqy@alibaba-inc.com> Qingyan Li <qingyan.liqy@alibaba-inc.com>
Qiuyi Zhang <qiuyi.zqy@alibaba-inc.com> Qiuyi Zhang <qiuyi.zqy@alibaba-inc.com>
Rafal Krypa <rafal@krypa.net> Rafal Krypa <rafal@krypa.net>
@ -162,6 +163,7 @@ Vladimir Krivosheev <develar@gmail.com>
Vladimir Shutoff <vovan@shutoff.ru> Vladimir Shutoff <vovan@shutoff.ru>
Wiktor Garbacz <wiktor.garbacz@gmail.com> Wiktor Garbacz <wiktor.garbacz@gmail.com>
Xiaoyin Liu <xiaoyin.l@outlook.com> Xiaoyin Liu <xiaoyin.l@outlook.com>
Yannic Bonenberger <contact@yannic-bonenberger.com>
Yong Wang <ccyongwang@tencent.com> Yong Wang <ccyongwang@tencent.com>
Yu Yin <xwafish@gmail.com> Yu Yin <xwafish@gmail.com>
Zac Hansen <xaxxon@gmail.com> Zac Hansen <xaxxon@gmail.com>

198
deps/v8/BUILD.gn vendored
View File

@ -76,9 +76,6 @@ declare_args() {
v8_enable_embedded_builtins = v8_use_snapshot && v8_current_cpu != "x86" && v8_enable_embedded_builtins = v8_use_snapshot && v8_current_cpu != "x86" &&
!is_aix && (!is_win || is_clang) !is_aix && (!is_win || is_clang)
# Enable embedded bytecode handlers.
v8_enable_embedded_bytecode_handlers = false
# Enable code-generation-time checking of types in the CodeStubAssembler. # Enable code-generation-time checking of types in the CodeStubAssembler.
v8_enable_verify_csa = false v8_enable_verify_csa = false
@ -164,6 +161,17 @@ declare_args() {
# setting the "check_v8_header_includes" gclient variable to run a # setting the "check_v8_header_includes" gclient variable to run a
# specific hook). # specific hook).
v8_check_header_includes = false v8_check_header_includes = false
# We reuse the snapshot toolchain for building torque and other generators to
# avoid building v8_libbase on the host more than once. On mips with big
# endian, the snapshot toolchain is the target toolchain and, hence, can't be
# used.
}
v8_generator_toolchain = v8_snapshot_toolchain
if (host_cpu == "x64" &&
(v8_current_cpu == "mips" || v8_current_cpu == "mips64")) {
v8_generator_toolchain = "//build/toolchain/linux:clang_x64"
} }
# Derived defaults. # Derived defaults.
@ -197,9 +205,6 @@ assert(
!v8_untrusted_code_mitigations, !v8_untrusted_code_mitigations,
"Embedded builtins on ia32 and untrusted code mitigations are incompatible") "Embedded builtins on ia32 and untrusted code mitigations are incompatible")
assert(!v8_enable_embedded_bytecode_handlers || v8_enable_embedded_builtins,
"Embedded bytecode handlers only work with embedded builtins")
# Specifies if the target build is a simulator build. Comparing target cpu # Specifies if the target build is a simulator build. Comparing target cpu
# with v8 target cpu to not affect simulator builds for making cross-compile # with v8 target cpu to not affect simulator builds for making cross-compile
# snapshots. # snapshots.
@ -377,10 +382,10 @@ config("features") {
defines += [ "V8_CHECK_MICROTASKS_SCOPES_CONSISTENCY" ] defines += [ "V8_CHECK_MICROTASKS_SCOPES_CONSISTENCY" ]
} }
if (v8_enable_embedded_builtins) { if (v8_enable_embedded_builtins) {
defines += [ "V8_EMBEDDED_BUILTINS" ] defines += [
} "V8_EMBEDDED_BUILTINS",
if (v8_enable_embedded_bytecode_handlers) { "V8_EMBEDDED_BYTECODE_HANDLERS",
defines += [ "V8_EMBEDDED_BYTECODE_HANDLERS" ] ]
} }
if (v8_use_multi_snapshots) { if (v8_use_multi_snapshots) {
defines += [ "V8_MULTI_SNAPSHOTS" ] defines += [ "V8_MULTI_SNAPSHOTS" ]
@ -849,6 +854,8 @@ action("postmortem-metadata") {
sources = [ sources = [
"src/objects.h", "src/objects.h",
"src/objects-inl.h", "src/objects-inl.h",
"src/objects/allocation-site-inl.h",
"src/objects/allocation-site.h",
"src/objects/code-inl.h", "src/objects/code-inl.h",
"src/objects/code.h", "src/objects/code.h",
"src/objects/data-handler.h", "src/objects/data-handler.h",
@ -859,6 +866,8 @@ action("postmortem-metadata") {
"src/objects/js-array.h", "src/objects/js-array.h",
"src/objects/js-array-buffer-inl.h", "src/objects/js-array-buffer-inl.h",
"src/objects/js-array-buffer.h", "src/objects/js-array-buffer.h",
"src/objects/js-objects-inl.h",
"src/objects/js-objects.h",
"src/objects/js-regexp-inl.h", "src/objects/js-regexp-inl.h",
"src/objects/js-regexp.h", "src/objects/js-regexp.h",
"src/objects/js-regexp-string-iterator-inl.h", "src/objects/js-regexp-string-iterator-inl.h",
@ -889,7 +898,10 @@ torque_files = [
"src/builtins/array.tq", "src/builtins/array.tq",
"src/builtins/array-copywithin.tq", "src/builtins/array-copywithin.tq",
"src/builtins/array-foreach.tq", "src/builtins/array-foreach.tq",
"src/builtins/array-lastindexof.tq",
"src/builtins/array-reverse.tq", "src/builtins/array-reverse.tq",
"src/builtins/array-splice.tq",
"src/builtins/array-unshift.tq",
"src/builtins/typed-array.tq", "src/builtins/typed-array.tq",
"src/builtins/data-view.tq", "src/builtins/data-view.tq",
"test/torque/test-torque.tq", "test/torque/test-torque.tq",
@ -911,17 +923,8 @@ action("run_torque") {
"test/cctest/:*", "test/cctest/:*",
] ]
# We reuse the snapshot toolchain for building torque to not build v8_libbase
# on the host more than once. On mips with big endian, the snapshot toolchain
# is the target toolchain and, hence, can't be used.
v8_torque_toolchain = v8_snapshot_toolchain
if (host_cpu == "x64" &&
(v8_current_cpu == "mips" || v8_current_cpu == "mips64")) {
v8_torque_toolchain = "//build/toolchain/linux:clang_x64"
}
deps = [ deps = [
":torque($v8_torque_toolchain)", ":torque($v8_generator_toolchain)",
] ]
script = "tools/run.py" script = "tools/run.py"
@ -939,7 +942,7 @@ action("run_torque") {
} }
args = [ args = [
"./" + rebase_path(get_label_info(":torque($v8_torque_toolchain)", "./" + rebase_path(get_label_info(":torque($v8_generator_toolchain)",
"root_out_dir") + "/torque", "root_out_dir") + "/torque",
root_build_dir), root_build_dir),
"-o", "-o",
@ -969,6 +972,7 @@ v8_source_set("torque_generated_initializers") {
visibility = [ ":*" ] # Only targets in this file can depend on this. visibility = [ ":*" ] # Only targets in this file can depend on this.
deps = [ deps = [
":generate_bytecode_builtins_list",
":run_torque", ":run_torque",
] ]
@ -989,6 +993,24 @@ v8_source_set("torque_generated_initializers") {
configs = [ ":internal_config" ] configs = [ ":internal_config" ]
} }
action("generate_bytecode_builtins_list") {
script = "tools/run.py"
outputs = [
"$target_gen_dir/builtins-generated/bytecodes-builtins-list.h",
]
deps = [
":bytecode_builtins_list_generator($v8_generator_toolchain)",
]
args = [
"./" + rebase_path(
get_label_info(
":bytecode_builtins_list_generator($v8_generator_toolchain)",
"root_out_dir") + "/bytecode_builtins_list_generator",
root_build_dir),
rebase_path("$target_gen_dir/builtins-generated/bytecodes-builtins-list.h"),
]
}
# Template to generate different V8 snapshots based on different runtime flags. # Template to generate different V8 snapshots based on different runtime flags.
# Can be invoked with run_mksnapshot(<name>). The target will resolve to # Can be invoked with run_mksnapshot(<name>). The target will resolve to
# run_mksnapshot_<name>. If <name> is "default", no file suffixes will be used. # run_mksnapshot_<name>. If <name> is "default", no file suffixes will be used.
@ -1382,8 +1404,6 @@ v8_source_set("v8_initializers") {
"src/interpreter/interpreter-generator.h", "src/interpreter/interpreter-generator.h",
"src/interpreter/interpreter-intrinsics-generator.cc", "src/interpreter/interpreter-intrinsics-generator.cc",
"src/interpreter/interpreter-intrinsics-generator.h", "src/interpreter/interpreter-intrinsics-generator.h",
"src/interpreter/setup-interpreter-internal.cc",
"src/interpreter/setup-interpreter.h",
] ]
if (use_jumbo_build == true) { if (use_jumbo_build == true) {
@ -1485,6 +1505,7 @@ v8_header_set("v8_headers") {
configs = [ ":internal_config" ] configs = [ ":internal_config" ]
sources = [ sources = [
"include/v8-internal.h",
"include/v8.h", "include/v8.h",
"include/v8config.h", "include/v8config.h",
] ]
@ -1504,8 +1525,10 @@ v8_source_set("v8_base") {
"//base/trace_event/common/trace_event_common.h", "//base/trace_event/common/trace_event_common.h",
### gcmole(all) ### ### gcmole(all) ###
"$target_gen_dir/builtins-generated/bytecodes-builtins-list.h",
"include/v8-inspector-protocol.h", "include/v8-inspector-protocol.h",
"include/v8-inspector.h", "include/v8-inspector.h",
"include/v8-internal.h",
"include/v8-platform.h", "include/v8-platform.h",
"include/v8-profiler.h", "include/v8-profiler.h",
"include/v8-testing.h", "include/v8-testing.h",
@ -1516,6 +1539,7 @@ v8_source_set("v8_base") {
"src/accessors.h", "src/accessors.h",
"src/address-map.cc", "src/address-map.cc",
"src/address-map.h", "src/address-map.h",
"src/allocation-site-scopes-inl.h",
"src/allocation-site-scopes.h", "src/allocation-site-scopes.h",
"src/allocation.cc", "src/allocation.cc",
"src/allocation.h", "src/allocation.h",
@ -1562,6 +1586,7 @@ v8_source_set("v8_base") {
"src/ast/modules.h", "src/ast/modules.h",
"src/ast/prettyprinter.cc", "src/ast/prettyprinter.cc",
"src/ast/prettyprinter.h", "src/ast/prettyprinter.h",
"src/ast/scopes-inl.h",
"src/ast/scopes.cc", "src/ast/scopes.cc",
"src/ast/scopes.h", "src/ast/scopes.h",
"src/ast/variables.cc", "src/ast/variables.cc",
@ -1599,7 +1624,6 @@ v8_source_set("v8_base") {
"src/builtins/builtins-internal.cc", "src/builtins/builtins-internal.cc",
"src/builtins/builtins-interpreter.cc", "src/builtins/builtins-interpreter.cc",
"src/builtins/builtins-intl.cc", "src/builtins/builtins-intl.cc",
"src/builtins/builtins-intl.h",
"src/builtins/builtins-json.cc", "src/builtins/builtins-json.cc",
"src/builtins/builtins-math.cc", "src/builtins/builtins-math.cc",
"src/builtins/builtins-number.cc", "src/builtins/builtins-number.cc",
@ -1813,6 +1837,7 @@ v8_source_set("v8_base") {
"src/compiler/operator.h", "src/compiler/operator.h",
"src/compiler/osr.cc", "src/compiler/osr.cc",
"src/compiler/osr.h", "src/compiler/osr.h",
"src/compiler/per-isolate-compiler-cache.h",
"src/compiler/persistent-map.h", "src/compiler/persistent-map.h",
"src/compiler/pipeline-statistics.cc", "src/compiler/pipeline-statistics.cc",
"src/compiler/pipeline-statistics.h", "src/compiler/pipeline-statistics.h",
@ -1824,6 +1849,8 @@ v8_source_set("v8_base") {
"src/compiler/raw-machine-assembler.h", "src/compiler/raw-machine-assembler.h",
"src/compiler/redundancy-elimination.cc", "src/compiler/redundancy-elimination.cc",
"src/compiler/redundancy-elimination.h", "src/compiler/redundancy-elimination.h",
"src/compiler/refs-map.cc",
"src/compiler/refs-map.h",
"src/compiler/register-allocator-verifier.cc", "src/compiler/register-allocator-verifier.cc",
"src/compiler/register-allocator-verifier.h", "src/compiler/register-allocator-verifier.h",
"src/compiler/register-allocator.cc", "src/compiler/register-allocator.cc",
@ -2140,6 +2167,8 @@ v8_source_set("v8_base") {
"src/macro-assembler.h", "src/macro-assembler.h",
"src/map-updater.cc", "src/map-updater.cc",
"src/map-updater.h", "src/map-updater.h",
"src/math-random.cc",
"src/math-random.h",
"src/maybe-handles-inl.h", "src/maybe-handles-inl.h",
"src/maybe-handles.h", "src/maybe-handles.h",
"src/messages.cc", "src/messages.cc",
@ -2158,6 +2187,7 @@ v8_source_set("v8_base") {
"src/objects/arguments.h", "src/objects/arguments.h",
"src/objects/bigint.cc", "src/objects/bigint.cc",
"src/objects/bigint.h", "src/objects/bigint.h",
"src/objects/builtin-function-id.h",
"src/objects/code-inl.h", "src/objects/code-inl.h",
"src/objects/code.h", "src/objects/code.h",
"src/objects/compilation-cache-inl.h", "src/objects/compilation-cache-inl.h",
@ -2181,11 +2211,17 @@ v8_source_set("v8_base") {
"src/objects/js-array-buffer.h", "src/objects/js-array-buffer.h",
"src/objects/js-array-inl.h", "src/objects/js-array-inl.h",
"src/objects/js-array.h", "src/objects/js-array.h",
"src/objects/js-break-iterator-inl.h",
"src/objects/js-break-iterator.cc",
"src/objects/js-break-iterator.h",
"src/objects/js-collator-inl.h", "src/objects/js-collator-inl.h",
"src/objects/js-collator.cc", "src/objects/js-collator.cc",
"src/objects/js-collator.h", "src/objects/js-collator.h",
"src/objects/js-collection-inl.h", "src/objects/js-collection-inl.h",
"src/objects/js-collection.h", "src/objects/js-collection.h",
"src/objects/js-date-time-format-inl.h",
"src/objects/js-date-time-format.cc",
"src/objects/js-date-time-format.h",
"src/objects/js-generator-inl.h", "src/objects/js-generator-inl.h",
"src/objects/js-generator.h", "src/objects/js-generator.h",
"src/objects/js-list-format-inl.h", "src/objects/js-list-format-inl.h",
@ -2194,6 +2230,11 @@ v8_source_set("v8_base") {
"src/objects/js-locale-inl.h", "src/objects/js-locale-inl.h",
"src/objects/js-locale.cc", "src/objects/js-locale.cc",
"src/objects/js-locale.h", "src/objects/js-locale.h",
"src/objects/js-number-format-inl.h",
"src/objects/js-number-format.cc",
"src/objects/js-number-format.h",
"src/objects/js-objects-inl.h",
"src/objects/js-objects.h",
"src/objects/js-plural-rules-inl.h", "src/objects/js-plural-rules-inl.h",
"src/objects/js-plural-rules.cc", "src/objects/js-plural-rules.cc",
"src/objects/js-plural-rules.h", "src/objects/js-plural-rules.h",
@ -2208,6 +2249,9 @@ v8_source_set("v8_base") {
"src/objects/js-relative-time-format-inl.h", "src/objects/js-relative-time-format-inl.h",
"src/objects/js-relative-time-format.cc", "src/objects/js-relative-time-format.cc",
"src/objects/js-relative-time-format.h", "src/objects/js-relative-time-format.h",
"src/objects/js-segmenter-inl.h",
"src/objects/js-segmenter.cc",
"src/objects/js-segmenter.h",
"src/objects/literal-objects-inl.h", "src/objects/literal-objects-inl.h",
"src/objects/literal-objects.cc", "src/objects/literal-objects.cc",
"src/objects/literal-objects.h", "src/objects/literal-objects.h",
@ -2218,6 +2262,9 @@ v8_source_set("v8_base") {
"src/objects/maybe-object-inl.h", "src/objects/maybe-object-inl.h",
"src/objects/maybe-object.h", "src/objects/maybe-object.h",
"src/objects/microtask-inl.h", "src/objects/microtask-inl.h",
"src/objects/microtask-queue-inl.h",
"src/objects/microtask-queue.cc",
"src/objects/microtask-queue.h",
"src/objects/microtask.h", "src/objects/microtask.h",
"src/objects/module-inl.h", "src/objects/module-inl.h",
"src/objects/module.cc", "src/objects/module.cc",
@ -2231,6 +2278,8 @@ v8_source_set("v8_base") {
"src/objects/ordered-hash-table.h", "src/objects/ordered-hash-table.h",
"src/objects/promise-inl.h", "src/objects/promise-inl.h",
"src/objects/promise.h", "src/objects/promise.h",
"src/objects/property-array-inl.h",
"src/objects/property-array.h",
"src/objects/property-descriptor-object-inl.h", "src/objects/property-descriptor-object-inl.h",
"src/objects/property-descriptor-object.h", "src/objects/property-descriptor-object.h",
"src/objects/prototype-info-inl.h", "src/objects/prototype-info-inl.h",
@ -2242,6 +2291,8 @@ v8_source_set("v8_base") {
"src/objects/script.h", "src/objects/script.h",
"src/objects/shared-function-info-inl.h", "src/objects/shared-function-info-inl.h",
"src/objects/shared-function-info.h", "src/objects/shared-function-info.h",
"src/objects/stack-frame-info-inl.h",
"src/objects/stack-frame-info.h",
"src/objects/string-inl.h", "src/objects/string-inl.h",
"src/objects/string-table.h", "src/objects/string-table.h",
"src/objects/string.h", "src/objects/string.h",
@ -2267,6 +2318,7 @@ v8_source_set("v8_base") {
"src/parsing/parsing.cc", "src/parsing/parsing.cc",
"src/parsing/parsing.h", "src/parsing/parsing.h",
"src/parsing/pattern-rewriter.cc", "src/parsing/pattern-rewriter.cc",
"src/parsing/preparsed-scope-data-impl.h",
"src/parsing/preparsed-scope-data.cc", "src/parsing/preparsed-scope-data.cc",
"src/parsing/preparsed-scope-data.h", "src/parsing/preparsed-scope-data.h",
"src/parsing/preparser-logger.h", "src/parsing/preparser-logger.h",
@ -2323,6 +2375,8 @@ v8_source_set("v8_base") {
"src/regexp/jsregexp-inl.h", "src/regexp/jsregexp-inl.h",
"src/regexp/jsregexp.cc", "src/regexp/jsregexp.cc",
"src/regexp/jsregexp.h", "src/regexp/jsregexp.h",
"src/regexp/property-sequences.cc",
"src/regexp/property-sequences.h",
"src/regexp/regexp-ast.cc", "src/regexp/regexp-ast.cc",
"src/regexp/regexp-ast.h", "src/regexp/regexp-ast.h",
"src/regexp/regexp-macro-assembler-irregexp-inl.h", "src/regexp/regexp-macro-assembler-irregexp-inl.h",
@ -2344,6 +2398,7 @@ v8_source_set("v8_base") {
"src/reloc-info.cc", "src/reloc-info.cc",
"src/reloc-info.h", "src/reloc-info.h",
"src/roots-inl.h", "src/roots-inl.h",
"src/roots.cc",
"src/roots.h", "src/roots.h",
"src/runtime-profiler.cc", "src/runtime-profiler.cc",
"src/runtime-profiler.h", "src/runtime-profiler.h",
@ -2363,7 +2418,6 @@ v8_source_set("v8_base") {
"src/runtime/runtime-interpreter.cc", "src/runtime/runtime-interpreter.cc",
"src/runtime/runtime-intl.cc", "src/runtime/runtime-intl.cc",
"src/runtime/runtime-literals.cc", "src/runtime/runtime-literals.cc",
"src/runtime/runtime-maths.cc",
"src/runtime/runtime-module.cc", "src/runtime/runtime-module.cc",
"src/runtime/runtime-numbers.cc", "src/runtime/runtime-numbers.cc",
"src/runtime/runtime-object.cc", "src/runtime/runtime-object.cc",
@ -2395,8 +2449,6 @@ v8_source_set("v8_base") {
"src/snapshot/builtin-serializer-allocator.h", "src/snapshot/builtin-serializer-allocator.h",
"src/snapshot/builtin-serializer.cc", "src/snapshot/builtin-serializer.cc",
"src/snapshot/builtin-serializer.h", "src/snapshot/builtin-serializer.h",
"src/snapshot/builtin-snapshot-utils.cc",
"src/snapshot/builtin-snapshot-utils.h",
"src/snapshot/code-serializer.cc", "src/snapshot/code-serializer.cc",
"src/snapshot/code-serializer.h", "src/snapshot/code-serializer.h",
"src/snapshot/default-deserializer-allocator.cc", "src/snapshot/default-deserializer-allocator.cc",
@ -2439,6 +2491,8 @@ v8_source_set("v8_base") {
"src/string-builder.cc", "src/string-builder.cc",
"src/string-case.cc", "src/string-case.cc",
"src/string-case.h", "src/string-case.h",
"src/string-constants.cc",
"src/string-constants.h",
"src/string-hasher-inl.h", "src/string-hasher-inl.h",
"src/string-hasher.h", "src/string-hasher.h",
"src/string-search.h", "src/string-search.h",
@ -2447,6 +2501,7 @@ v8_source_set("v8_base") {
"src/strtod.cc", "src/strtod.cc",
"src/strtod.h", "src/strtod.h",
"src/third_party/utf8-decoder/utf8-decoder.h", "src/third_party/utf8-decoder/utf8-decoder.h",
"src/torque-assembler.h",
"src/tracing/trace-event.cc", "src/tracing/trace-event.cc",
"src/tracing/trace-event.h", "src/tracing/trace-event.h",
"src/tracing/traced-value.cc", "src/tracing/traced-value.cc",
@ -2518,6 +2573,7 @@ v8_source_set("v8_base") {
"src/wasm/module-compiler.h", "src/wasm/module-compiler.h",
"src/wasm/module-decoder.cc", "src/wasm/module-decoder.cc",
"src/wasm/module-decoder.h", "src/wasm/module-decoder.h",
"src/wasm/object-access.h",
"src/wasm/signature-map.cc", "src/wasm/signature-map.cc",
"src/wasm/signature-map.h", "src/wasm/signature-map.h",
"src/wasm/streaming-decoder.cc", "src/wasm/streaming-decoder.cc",
@ -2869,6 +2925,7 @@ v8_source_set("v8_base") {
defines = [] defines = []
deps = [ deps = [
":generate_bytecode_builtins_list",
":torque_generated_core", ":torque_generated_core",
":v8_headers", ":v8_headers",
":v8_libbase", ":v8_libbase",
@ -2886,28 +2943,39 @@ v8_source_set("v8_base") {
} else { } else {
sources -= [ sources -= [
"src/builtins/builtins-intl.cc", "src/builtins/builtins-intl.cc",
"src/builtins/builtins-intl.h",
"src/char-predicates.cc", "src/char-predicates.cc",
"src/intl.cc", "src/intl.cc",
"src/intl.h", "src/intl.h",
"src/objects/intl-objects-inl.h", "src/objects/intl-objects-inl.h",
"src/objects/intl-objects.cc", "src/objects/intl-objects.cc",
"src/objects/intl-objects.h", "src/objects/intl-objects.h",
"src/objects/js-break-iterator-inl.h",
"src/objects/js-break-iterator.cc",
"src/objects/js-break-iterator.h",
"src/objects/js-collator-inl.h", "src/objects/js-collator-inl.h",
"src/objects/js-collator.cc", "src/objects/js-collator.cc",
"src/objects/js-collator.h", "src/objects/js-collator.h",
"src/objects/js-date-time-format-inl.h",
"src/objects/js-date-time-format.cc",
"src/objects/js-date-time-format.h",
"src/objects/js-list-format-inl.h", "src/objects/js-list-format-inl.h",
"src/objects/js-list-format.cc", "src/objects/js-list-format.cc",
"src/objects/js-list-format.h", "src/objects/js-list-format.h",
"src/objects/js-locale-inl.h", "src/objects/js-locale-inl.h",
"src/objects/js-locale.cc", "src/objects/js-locale.cc",
"src/objects/js-locale.h", "src/objects/js-locale.h",
"src/objects/js-number-format-inl.h",
"src/objects/js-number-format.cc",
"src/objects/js-number-format.h",
"src/objects/js-plural-rules-inl.h", "src/objects/js-plural-rules-inl.h",
"src/objects/js-plural-rules.cc", "src/objects/js-plural-rules.cc",
"src/objects/js-plural-rules.h", "src/objects/js-plural-rules.h",
"src/objects/js-relative-time-format-inl.h", "src/objects/js-relative-time-format-inl.h",
"src/objects/js-relative-time-format.cc", "src/objects/js-relative-time-format.cc",
"src/objects/js-relative-time-format.h", "src/objects/js-relative-time-format.h",
"src/objects/js-segmenter-inl.h",
"src/objects/js-segmenter.cc",
"src/objects/js-segmenter.h",
"src/runtime/runtime-intl.cc", "src/runtime/runtime-intl.cc",
] ]
} }
@ -2916,6 +2984,15 @@ v8_source_set("v8_base") {
sources += [ "$target_gen_dir/debug-support.cc" ] sources += [ "$target_gen_dir/debug-support.cc" ]
deps += [ ":postmortem-metadata" ] deps += [ ":postmortem-metadata" ]
} }
# Platforms that don't have CAS support need to link atomic library
# to implement atomic memory access
if (v8_current_cpu == "mips" || v8_current_cpu == "mipsel" ||
v8_current_cpu == "mips64" || v8_current_cpu == "mips64el" ||
v8_current_cpu == "ppc" || v8_current_cpu == "ppc64" ||
v8_current_cpu == "s390" || v8_current_cpu == "s390x") {
libs = [ "atomic" ]
}
} }
v8_source_set("torque_base") { v8_source_set("torque_base") {
@ -2923,7 +3000,11 @@ v8_source_set("torque_base") {
sources = [ sources = [
"src/torque/ast.h", "src/torque/ast.h",
"src/torque/cfg.cc",
"src/torque/cfg.h",
"src/torque/contextual.h", "src/torque/contextual.h",
"src/torque/csa-generator.cc",
"src/torque/csa-generator.h",
"src/torque/declarable.cc", "src/torque/declarable.cc",
"src/torque/declarable.h", "src/torque/declarable.h",
"src/torque/declaration-visitor.cc", "src/torque/declaration-visitor.cc",
@ -2937,6 +3018,8 @@ v8_source_set("torque_base") {
"src/torque/global-context.h", "src/torque/global-context.h",
"src/torque/implementation-visitor.cc", "src/torque/implementation-visitor.cc",
"src/torque/implementation-visitor.h", "src/torque/implementation-visitor.h",
"src/torque/instructions.cc",
"src/torque/instructions.h",
"src/torque/scope.cc", "src/torque/scope.cc",
"src/torque/scope.h", "src/torque/scope.h",
"src/torque/source-positions.cc", "src/torque/source-positions.cc",
@ -2956,11 +3039,15 @@ v8_source_set("torque_base") {
] ]
configs = [ ":internal_config" ] configs = [ ":internal_config" ]
if (is_win && is_asan) {
remove_configs = [ "//build/config/sanitizers:default_sanitizer_flags" ]
}
} }
v8_component("v8_libbase") { v8_component("v8_libbase") {
sources = [ sources = [
"src/base/adapters.h", "src/base/adapters.h",
"src/base/address-region.h",
"src/base/atomic-utils.h", "src/base/atomic-utils.h",
"src/base/atomicops.h", "src/base/atomicops.h",
"src/base/atomicops_internals_atomicword_compat.h", "src/base/atomicops_internals_atomicword_compat.h",
@ -2969,6 +3056,8 @@ v8_component("v8_libbase") {
"src/base/base-export.h", "src/base/base-export.h",
"src/base/bits.cc", "src/base/bits.cc",
"src/base/bits.h", "src/base/bits.h",
"src/base/bounded-page-allocator.cc",
"src/base/bounded-page-allocator.h",
"src/base/build_config.h", "src/base/build_config.h",
"src/base/compiler-specific.h", "src/base/compiler-specific.h",
"src/base/cpu.cc", "src/base/cpu.cc",
@ -2994,6 +3083,8 @@ v8_component("v8_libbase") {
"src/base/list.h", "src/base/list.h",
"src/base/logging.cc", "src/base/logging.cc",
"src/base/logging.h", "src/base/logging.h",
"src/base/lsan-page-allocator.cc",
"src/base/lsan-page-allocator.h",
"src/base/macros.h", "src/base/macros.h",
"src/base/once.cc", "src/base/once.cc",
"src/base/once.h", "src/base/once.h",
@ -3010,6 +3101,8 @@ v8_component("v8_libbase") {
"src/base/platform/semaphore.h", "src/base/platform/semaphore.h",
"src/base/platform/time.cc", "src/base/platform/time.cc",
"src/base/platform/time.h", "src/base/platform/time.h",
"src/base/region-allocator.cc",
"src/base/region-allocator.h",
"src/base/ring-buffer.h", "src/base/ring-buffer.h",
"src/base/safe_conversions.h", "src/base/safe_conversions.h",
"src/base/safe_conversions_impl.h", "src/base/safe_conversions_impl.h",
@ -3237,6 +3330,29 @@ if (v8_monolithic) {
# Executables # Executables
# #
if (current_toolchain == v8_generator_toolchain) {
v8_executable("bytecode_builtins_list_generator") {
visibility = [ ":*" ] # Only targets in this file can depend on this.
include_dirs = [ "." ]
sources = [
"src/builtins/generate-bytecodes-builtins-list.cc",
"src/interpreter/bytecode-operands.cc",
"src/interpreter/bytecode-operands.h",
"src/interpreter/bytecodes.cc",
"src/interpreter/bytecodes.h",
]
configs = [ ":internal_config" ]
deps = [
":v8_libbase",
"//build/win:default_exe_manifest",
]
}
}
if (v8_use_snapshot && current_toolchain == v8_snapshot_toolchain) { if (v8_use_snapshot && current_toolchain == v8_snapshot_toolchain) {
v8_executable("mksnapshot") { v8_executable("mksnapshot") {
visibility = [ ":*" ] # Only targets in this file can depend on this. visibility = [ ":*" ] # Only targets in this file can depend on this.
@ -3272,6 +3388,9 @@ if (current_toolchain == v8_snapshot_toolchain) {
] ]
configs = [ ":internal_config" ] configs = [ ":internal_config" ]
if (is_win && is_asan) {
remove_configs = [ "//build/config/sanitizers:default_sanitizer_flags" ]
}
} }
} }
@ -3325,11 +3444,32 @@ group("v8_clusterfuzz") {
} }
group("v8_archive") { group("v8_archive") {
testonly = true
deps = [ deps = [
":d8", ":d8",
"test/cctest:cctest",
] ]
} }
# TODO(dglazkov): Remove the "!build_with_chromium" condition once this clause
# is removed from Chromium.
if (is_fuchsia && !build_with_chromium) {
import("//build/config/fuchsia/rules.gni")
fuchsia_package("d8_fuchsia_pkg") {
testonly = true
binary = ":d8"
package_name_override = "d8"
}
fuchsia_package_runner("d8_fuchsia") {
testonly = true
package = ":d8_fuchsia_pkg"
package_name_override = "d8"
}
}
group("v8_fuzzers") { group("v8_fuzzers") {
testonly = true testonly = true
data_deps = [ data_deps = [
@ -3636,6 +3776,7 @@ v8_source_set("wasm_module_runner") {
] ]
deps = [ deps = [
":generate_bytecode_builtins_list",
":torque_generated_core", ":torque_generated_core",
] ]
@ -3719,6 +3860,7 @@ v8_source_set("lib_wasm_fuzzer_common") {
] ]
deps = [ deps = [
":generate_bytecode_builtins_list",
":torque_generated_core", ":torque_generated_core",
] ]

1640
deps/v8/ChangeLog vendored

File diff suppressed because it is too large Load Diff

62
deps/v8/DEPS vendored
View File

@ -13,15 +13,13 @@ vars = {
deps = { deps = {
'v8/build': 'v8/build':
Var('chromium_url') + '/chromium/src/build.git' + '@' + 'dd6b994b32b498e9e766ce60c44da0aec3a2a188', Var('chromium_url') + '/chromium/src/build.git' + '@' + 'a7674eacc34947257c78fe6ba5cf0da17f60696c',
'v8/tools/gyp':
Var('chromium_url') + '/external/gyp.git' + '@' + 'd61a9397e668fa9843c4aa7da9e79460fe590bfb',
'v8/third_party/depot_tools': 'v8/third_party/depot_tools':
Var('chromium_url') + '/chromium/tools/depot_tools.git' + '@' + 'aaf2cc09c6874e394c6c1e4692360cc400d6b388', Var('chromium_url') + '/chromium/tools/depot_tools.git' + '@' + '71e3be7a50c21faeee91ed99a8d5addfb7594e7c',
'v8/third_party/icu': 'v8/third_party/icu':
Var('chromium_url') + '/chromium/deps/icu.git' + '@' + 'a191af9d025859e8368b8b469120d78006e9f5f6', Var('chromium_url') + '/chromium/deps/icu.git' + '@' + 'c52a2a250d6c5f5cbdd015dff36af7c5d0ae1150',
'v8/third_party/instrumented_libraries': 'v8/third_party/instrumented_libraries':
Var('chromium_url') + '/chromium/src/third_party/instrumented_libraries.git' + '@' + 'd8cf40c4592dcec7fb01fcbdf1f6d4958b3fbf11', Var('chromium_url') + '/chromium/src/third_party/instrumented_libraries.git' + '@' + 'a90cbf3b4216430a437991fb53ede8e048dea454',
'v8/buildtools': 'v8/buildtools':
Var('chromium_url') + '/chromium/buildtools.git' + '@' + '2dff9c9c74e9d732e6fe57c84ef7fd044cc45d96', Var('chromium_url') + '/chromium/buildtools.git' + '@' + '2dff9c9c74e9d732e6fe57c84ef7fd044cc45d96',
'v8/base/trace_event/common': 'v8/base/trace_event/common':
@ -35,7 +33,7 @@ deps = {
'condition': 'checkout_android', 'condition': 'checkout_android',
}, },
'v8/third_party/catapult': { 'v8/third_party/catapult': {
'url': Var('chromium_url') + '/catapult.git' + '@' + 'bc2c0a9307285fa36e03e7cdb6bf8623390ff855', 'url': Var('chromium_url') + '/catapult.git' + '@' + '9ec8468cfde0868ce5f3893e819087278c5af988',
'condition': 'checkout_android', 'condition': 'checkout_android',
}, },
'v8/third_party/colorama/src': { 'v8/third_party/colorama/src': {
@ -43,17 +41,15 @@ deps = {
'condition': 'checkout_android', 'condition': 'checkout_android',
}, },
'v8/third_party/fuchsia-sdk': { 'v8/third_party/fuchsia-sdk': {
'url': Var('chromium_url') + '/chromium/src/third_party/fuchsia-sdk.git' + '@' + '3ec92c896bcbddc46e2a073ebfdd25aa1194656e', 'url': Var('chromium_url') + '/chromium/src/third_party/fuchsia-sdk.git' + '@' + '6e1868c9083769d489d3fc25657339d50c22b1d8',
'condition': 'checkout_fuchsia', 'condition': 'checkout_fuchsia',
}, },
'v8/third_party/googletest/src': 'v8/third_party/googletest/src':
Var('chromium_url') + '/external/github.com/google/googletest.git' + '@' + 'd5266326752f0a1dadbd310932d8f4fd8c3c5e7d', Var('chromium_url') + '/external/github.com/google/googletest.git' + '@' + '2e68926a9d4929e9289373cd49e40ddcb9a628f7',
'v8/third_party/jinja2': 'v8/third_party/jinja2':
Var('chromium_url') + '/chromium/src/third_party/jinja2.git' + '@' + 'b41863e42637544c2941b574c7877d3e1f663e25', Var('chromium_url') + '/chromium/src/third_party/jinja2.git' + '@' + 'b41863e42637544c2941b574c7877d3e1f663e25',
'v8/third_party/markupsafe': 'v8/third_party/markupsafe':
Var('chromium_url') + '/chromium/src/third_party/markupsafe.git' + '@' + '8f45f5cfa0009d2a70589bcda0349b8cb2b72783', Var('chromium_url') + '/chromium/src/third_party/markupsafe.git' + '@' + '8f45f5cfa0009d2a70589bcda0349b8cb2b72783',
'v8/third_party/proguard':
Var('chromium_url') + '/chromium/src/third_party/proguard.git' + '@' + 'a3729bea473bb5ffc5eaf289f5733bc5e2861c07',
'v8/tools/swarming_client': 'v8/tools/swarming_client':
Var('chromium_url') + '/infra/luci/client-py.git' + '@' + '486c9b53c4d54dd4b95bb6ce0e31160e600dfc11', Var('chromium_url') + '/infra/luci/client-py.git' + '@' + '486c9b53c4d54dd4b95bb6ce0e31160e600dfc11',
'v8/test/benchmarks/data': 'v8/test/benchmarks/data':
@ -61,25 +57,35 @@ deps = {
'v8/test/mozilla/data': 'v8/test/mozilla/data':
Var('chromium_url') + '/v8/deps/third_party/mozilla-tests.git' + '@' + 'f6c578a10ea707b1a8ab0b88943fe5115ce2b9be', Var('chromium_url') + '/v8/deps/third_party/mozilla-tests.git' + '@' + 'f6c578a10ea707b1a8ab0b88943fe5115ce2b9be',
'v8/test/test262/data': 'v8/test/test262/data':
Var('chromium_url') + '/external/github.com/tc39/test262.git' + '@' + 'a6c1d05ac4fed084fa047e4c52ab2a8c9c2a8aef', Var('chromium_url') + '/external/github.com/tc39/test262.git' + '@' + '00cfe1628cc03164dcf03f01ba9c84376e9be735',
'v8/test/test262/harness': 'v8/test/test262/harness':
Var('chromium_url') + '/external/github.com/test262-utils/test262-harness-py.git' + '@' + '0f2acdd882c84cff43b9d60df7574a1901e2cdcd', Var('chromium_url') + '/external/github.com/test262-utils/test262-harness-py.git' + '@' + '0f2acdd882c84cff43b9d60df7574a1901e2cdcd',
'v8/third_party/qemu': { 'v8/third_party/qemu-linux-x64': {
'packages': [ 'packages': [
{ {
'package': 'fuchsia/qemu/linux-amd64', 'package': 'fuchsia/qemu/linux-amd64',
'version': '9cc486c5b18a0be515c39a280ca9a309c54cf994' 'version': '9cc486c5b18a0be515c39a280ca9a309c54cf994'
}, },
], ],
'condition': 'checkout_fuchsia', 'condition': 'host_os == "linux" and checkout_fuchsia',
'dep_type': 'cipd',
},
'v8/third_party/qemu-mac-x64': {
'packages': [
{
'package': 'fuchsia/qemu/mac-amd64',
'version': '2d3358ae9a569b2d4a474f498b32b202a152134f'
},
],
'condition': 'host_os == "mac" and checkout_fuchsia',
'dep_type': 'cipd', 'dep_type': 'cipd',
}, },
'v8/tools/clang': 'v8/tools/clang':
Var('chromium_url') + '/chromium/src/tools/clang.git' + '@' + 'bb4146fb8a9dde405b71914657bb461dc93912ab', Var('chromium_url') + '/chromium/src/tools/clang.git' + '@' + '7792d28b069af6dd3a86d1ba83b7f5c4ede605dc',
'v8/tools/luci-go': 'v8/tools/luci-go':
Var('chromium_url') + '/chromium/src/tools/luci-go.git' + '@' + '445d7c4b6a4f10e188edb395b132e3996b127691', Var('chromium_url') + '/chromium/src/tools/luci-go.git' + '@' + '445d7c4b6a4f10e188edb395b132e3996b127691',
'v8/test/wasm-js': 'v8/test/wasm-js':
Var('chromium_url') + '/external/github.com/WebAssembly/spec.git' + '@' + '240ea673de6e75d78ae472f66127301ecab22a99', Var('chromium_url') + '/external/github.com/WebAssembly/spec.git' + '@' + 'db9cd40808a90ecc5f4a23e88fb375c8f60b8d52',
} }
recursedeps = [ recursedeps = [
@ -344,13 +350,6 @@ hooks = [
'condition': 'checkout_win', 'condition': 'checkout_win',
'action': ['python', 'v8/build/vs_toolchain.py', 'update'], 'action': ['python', 'v8/build/vs_toolchain.py', 'update'],
}, },
{
# Update the Mac toolchain if necessary.
'name': 'mac_toolchain',
'pattern': '.',
'condition': 'checkout_mac',
'action': ['python', 'v8/build/mac_toolchain.py'],
},
# Pull binutils for linux, enabled debug fission for faster linking / # Pull binutils for linux, enabled debug fission for faster linking /
# debugging when used with clang on Ubuntu Precise. # debugging when used with clang on Ubuntu Precise.
# https://code.google.com/p/chromium/issues/detail?id=352046 # https://code.google.com/p/chromium/issues/detail?id=352046
@ -387,6 +386,23 @@ hooks = [
'v8/build/fuchsia/update_sdk.py', 'v8/build/fuchsia/update_sdk.py',
], ],
}, },
{
# Mac doesn't use lld so it's not included in the default clang bundle
# there. However, lld is need in Fuchsia cross builds, so
# download it there.
# Should run after the clang hook.
'name': 'lld/mac',
'pattern': '.',
'condition': 'host_os == "mac" and checkout_fuchsia',
'action': ['python', 'v8/tools/clang/scripts/download_lld_mac.py'],
},
{
# Mac does not have llvm-objdump, download it for cross builds in Fuchsia.
'name': 'llvm-objdump',
'pattern': '.',
'condition': 'host_os == "mac" and checkout_fuchsia',
'action': ['python', 'v8/tools/clang/scripts/download_objdump.py'],
},
{ {
'name': 'mips_toolchain', 'name': 'mips_toolchain',
'pattern': '.', 'pattern': '.',

16
deps/v8/PRESUBMIT.py vendored
View File

@ -73,9 +73,10 @@ def _V8PresubmitChecks(input_api, output_api):
import sys import sys
sys.path.append(input_api.os_path.join( sys.path.append(input_api.os_path.join(
input_api.PresubmitLocalPath(), 'tools')) input_api.PresubmitLocalPath(), 'tools'))
from presubmit import CppLintProcessor from v8_presubmit import CppLintProcessor
from presubmit import SourceProcessor from v8_presubmit import TorqueFormatProcessor
from presubmit import StatusFilesProcessor from v8_presubmit import SourceProcessor
from v8_presubmit import StatusFilesProcessor
def FilterFile(affected_file): def FilterFile(affected_file):
return input_api.FilterSourceFile( return input_api.FilterSourceFile(
@ -83,10 +84,19 @@ def _V8PresubmitChecks(input_api, output_api):
white_list=None, white_list=None,
black_list=_NO_LINT_PATHS) black_list=_NO_LINT_PATHS)
def FilterTorqueFile(affected_file):
return input_api.FilterSourceFile(
affected_file,
white_list=(r'.+\.tq'))
results = [] results = []
if not CppLintProcessor().RunOnFiles( if not CppLintProcessor().RunOnFiles(
input_api.AffectedFiles(file_filter=FilterFile, include_deletes=False)): input_api.AffectedFiles(file_filter=FilterFile, include_deletes=False)):
results.append(output_api.PresubmitError("C++ lint check failed")) results.append(output_api.PresubmitError("C++ lint check failed"))
if not TorqueFormatProcessor().RunOnFiles(
input_api.AffectedFiles(file_filter=FilterTorqueFile,
include_deletes=False)):
results.append(output_api.PresubmitError("Torque format check failed"))
if not SourceProcessor().RunOnFiles( if not SourceProcessor().RunOnFiles(
input_api.AffectedFiles(include_deletes=False)): input_api.AffectedFiles(include_deletes=False)):
results.append(output_api.PresubmitError( results.append(output_api.PresubmitError(

10
deps/v8/gni/v8.gni vendored
View File

@ -143,7 +143,15 @@ template("v8_source_set") {
} }
} }
target(link_target_type, target_name) { target(link_target_type, target_name) {
forward_variables_from(invoker, "*", [ "configs" ]) forward_variables_from(invoker,
"*",
[
"configs",
"remove_configs",
])
if (defined(invoker.remove_configs)) {
configs -= invoker.remove_configs
}
configs += invoker.configs configs += invoker.configs
configs -= v8_remove_configs configs -= v8_remove_configs
configs += v8_add_configs configs += v8_add_configs

View File

@ -35,7 +35,7 @@ class V8_PLATFORM_EXPORT TraceObject {
const char* as_string; const char* as_string;
}; };
TraceObject() {} TraceObject() = default;
~TraceObject(); ~TraceObject();
void Initialize( void Initialize(
char phase, const uint8_t* category_enabled_flag, const char* name, char phase, const uint8_t* category_enabled_flag, const char* name,
@ -106,8 +106,8 @@ class V8_PLATFORM_EXPORT TraceObject {
class V8_PLATFORM_EXPORT TraceWriter { class V8_PLATFORM_EXPORT TraceWriter {
public: public:
TraceWriter() {} TraceWriter() = default;
virtual ~TraceWriter() {} virtual ~TraceWriter() = default;
virtual void AppendTraceEvent(TraceObject* trace_event) = 0; virtual void AppendTraceEvent(TraceObject* trace_event) = 0;
virtual void Flush() = 0; virtual void Flush() = 0;
@ -147,8 +147,8 @@ class V8_PLATFORM_EXPORT TraceBufferChunk {
class V8_PLATFORM_EXPORT TraceBuffer { class V8_PLATFORM_EXPORT TraceBuffer {
public: public:
TraceBuffer() {} TraceBuffer() = default;
virtual ~TraceBuffer() {} virtual ~TraceBuffer() = default;
virtual TraceObject* AddTraceEvent(uint64_t* handle) = 0; virtual TraceObject* AddTraceEvent(uint64_t* handle) = 0;
virtual TraceObject* GetEventByHandle(uint64_t handle) = 0; virtual TraceObject* GetEventByHandle(uint64_t handle) = 0;

View File

@ -62,7 +62,7 @@ class V8_EXPORT StringView {
class V8_EXPORT StringBuffer { class V8_EXPORT StringBuffer {
public: public:
virtual ~StringBuffer() {} virtual ~StringBuffer() = default;
virtual const StringView& string() = 0; virtual const StringView& string() = 0;
// This method copies contents. // This method copies contents.
static std::unique_ptr<StringBuffer> create(const StringView&); static std::unique_ptr<StringBuffer> create(const StringView&);
@ -107,7 +107,7 @@ class V8_EXPORT V8StackTrace {
virtual StringView topScriptId() const = 0; virtual StringView topScriptId() const = 0;
virtual StringView topFunctionName() const = 0; virtual StringView topFunctionName() const = 0;
virtual ~V8StackTrace() {} virtual ~V8StackTrace() = default;
virtual std::unique_ptr<protocol::Runtime::API::StackTrace> virtual std::unique_ptr<protocol::Runtime::API::StackTrace>
buildInspectorObject() const = 0; buildInspectorObject() const = 0;
virtual std::unique_ptr<StringBuffer> toString() const = 0; virtual std::unique_ptr<StringBuffer> toString() const = 0;
@ -118,13 +118,13 @@ class V8_EXPORT V8StackTrace {
class V8_EXPORT V8InspectorSession { class V8_EXPORT V8InspectorSession {
public: public:
virtual ~V8InspectorSession() {} virtual ~V8InspectorSession() = default;
// Cross-context inspectable values (DOM nodes in different worlds, etc.). // Cross-context inspectable values (DOM nodes in different worlds, etc.).
class V8_EXPORT Inspectable { class V8_EXPORT Inspectable {
public: public:
virtual v8::Local<v8::Value> get(v8::Local<v8::Context>) = 0; virtual v8::Local<v8::Value> get(v8::Local<v8::Context>) = 0;
virtual ~Inspectable() {} virtual ~Inspectable() = default;
}; };
virtual void addInspectedObject(std::unique_ptr<Inspectable>) = 0; virtual void addInspectedObject(std::unique_ptr<Inspectable>) = 0;
@ -162,7 +162,7 @@ class V8_EXPORT V8InspectorSession {
class V8_EXPORT V8InspectorClient { class V8_EXPORT V8InspectorClient {
public: public:
virtual ~V8InspectorClient() {} virtual ~V8InspectorClient() = default;
virtual void runMessageLoopOnPause(int contextGroupId) {} virtual void runMessageLoopOnPause(int contextGroupId) {}
virtual void quitMessageLoopOnPause() {} virtual void quitMessageLoopOnPause() {}
@ -239,7 +239,7 @@ struct V8_EXPORT V8StackTraceId {
class V8_EXPORT V8Inspector { class V8_EXPORT V8Inspector {
public: public:
static std::unique_ptr<V8Inspector> create(v8::Isolate*, V8InspectorClient*); static std::unique_ptr<V8Inspector> create(v8::Isolate*, V8InspectorClient*);
virtual ~V8Inspector() {} virtual ~V8Inspector() = default;
// Contexts instrumentation. // Contexts instrumentation.
virtual void contextCreated(const V8ContextInfo&) = 0; virtual void contextCreated(const V8ContextInfo&) = 0;
@ -277,7 +277,7 @@ class V8_EXPORT V8Inspector {
// Connection. // Connection.
class V8_EXPORT Channel { class V8_EXPORT Channel {
public: public:
virtual ~Channel() {} virtual ~Channel() = default;
virtual void sendResponse(int callId, virtual void sendResponse(int callId,
std::unique_ptr<StringBuffer> message) = 0; std::unique_ptr<StringBuffer> message) = 0;
virtual void sendNotification(std::unique_ptr<StringBuffer> message) = 0; virtual void sendNotification(std::unique_ptr<StringBuffer> message) = 0;

316
deps/v8/include/v8-internal.h vendored Normal file
View File

@ -0,0 +1,316 @@
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_V8_INTERNAL_H_
#define INCLUDE_V8_INTERNAL_H_
#include <stddef.h>
#include <stdint.h>
#include <type_traits>
#include "v8-version.h" // NOLINT(build/include)
#include "v8config.h" // NOLINT(build/include)
namespace v8 {
class Context;
class Data;
class Isolate;
namespace internal {
class Object;
/**
* Configuration of tagging scheme.
*/
const int kApiPointerSize = sizeof(void*); // NOLINT
const int kApiDoubleSize = sizeof(double); // NOLINT
const int kApiIntSize = sizeof(int); // NOLINT
const int kApiInt64Size = sizeof(int64_t); // NOLINT
// Tag information for HeapObject.
const int kHeapObjectTag = 1;
const int kWeakHeapObjectTag = 3;
const int kHeapObjectTagSize = 2;
const intptr_t kHeapObjectTagMask = (1 << kHeapObjectTagSize) - 1;
// Tag information for Smi.
const int kSmiTag = 0;
const int kSmiTagSize = 1;
const intptr_t kSmiTagMask = (1 << kSmiTagSize) - 1;
template <size_t tagged_ptr_size>
struct SmiTagging;
template <int kSmiShiftSize>
V8_INLINE internal::Object* IntToSmi(int value) {
int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
intptr_t tagged_value =
(static_cast<intptr_t>(value) << smi_shift_bits) | kSmiTag;
return reinterpret_cast<internal::Object*>(tagged_value);
}
// Smi constants for systems where tagged pointer is a 32-bit value.
template <>
struct SmiTagging<4> {
enum { kSmiShiftSize = 0, kSmiValueSize = 31 };
static int SmiShiftSize() { return kSmiShiftSize; }
static int SmiValueSize() { return kSmiValueSize; }
V8_INLINE static int SmiToInt(const internal::Object* value) {
int shift_bits = kSmiTagSize + kSmiShiftSize;
// Throw away top 32 bits and shift down (requires >> to be sign extending).
return static_cast<int>(reinterpret_cast<intptr_t>(value)) >> shift_bits;
}
V8_INLINE static internal::Object* IntToSmi(int value) {
return internal::IntToSmi<kSmiShiftSize>(value);
}
V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
// To be representable as an tagged small integer, the two
// most-significant bits of 'value' must be either 00 or 11 due to
// sign-extension. To check this we add 01 to the two
// most-significant bits, and check if the most-significant bit is 0
//
// CAUTION: The original code below:
// bool result = ((value + 0x40000000) & 0x80000000) == 0;
// may lead to incorrect results according to the C language spec, and
// in fact doesn't work correctly with gcc4.1.1 in some cases: The
// compiler may produce undefined results in case of signed integer
// overflow. The computation must be done w/ unsigned ints.
return static_cast<uintptr_t>(value) + 0x40000000U < 0x80000000U;
}
};
// Smi constants for systems where tagged pointer is a 64-bit value.
template <>
struct SmiTagging<8> {
enum { kSmiShiftSize = 31, kSmiValueSize = 32 };
static int SmiShiftSize() { return kSmiShiftSize; }
static int SmiValueSize() { return kSmiValueSize; }
V8_INLINE static int SmiToInt(const internal::Object* value) {
int shift_bits = kSmiTagSize + kSmiShiftSize;
// Shift down and throw away top 32 bits.
return static_cast<int>(reinterpret_cast<intptr_t>(value) >> shift_bits);
}
V8_INLINE static internal::Object* IntToSmi(int value) {
return internal::IntToSmi<kSmiShiftSize>(value);
}
V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
// To be representable as a long smi, the value must be a 32-bit integer.
return (value == static_cast<int32_t>(value));
}
};
#if V8_COMPRESS_POINTERS
static_assert(
kApiPointerSize == kApiInt64Size,
"Pointer compression can be enabled only for 64-bit architectures");
typedef SmiTagging<4> PlatformSmiTagging;
#else
typedef SmiTagging<kApiPointerSize> PlatformSmiTagging;
#endif
const int kSmiShiftSize = PlatformSmiTagging::kSmiShiftSize;
const int kSmiValueSize = PlatformSmiTagging::kSmiValueSize;
const int kSmiMinValue = (static_cast<unsigned int>(-1)) << (kSmiValueSize - 1);
const int kSmiMaxValue = -(kSmiMinValue + 1);
constexpr bool SmiValuesAre31Bits() { return kSmiValueSize == 31; }
constexpr bool SmiValuesAre32Bits() { return kSmiValueSize == 32; }
/**
* This class exports constants and functionality from within v8 that
* is necessary to implement inline functions in the v8 api. Don't
* depend on functions and constants defined here.
*/
class Internals {
public:
// These values match non-compiler-dependent values defined within
// the implementation of v8.
static const int kHeapObjectMapOffset = 0;
static const int kMapInstanceTypeOffset = 1 * kApiPointerSize + kApiIntSize;
static const int kStringResourceOffset =
1 * kApiPointerSize + 2 * kApiIntSize;
static const int kOddballKindOffset = 4 * kApiPointerSize + kApiDoubleSize;
static const int kForeignAddressOffset = kApiPointerSize;
static const int kJSObjectHeaderSize = 3 * kApiPointerSize;
static const int kFixedArrayHeaderSize = 2 * kApiPointerSize;
static const int kContextHeaderSize = 2 * kApiPointerSize;
static const int kContextEmbedderDataIndex = 5;
static const int kFullStringRepresentationMask = 0x0f;
static const int kStringEncodingMask = 0x8;
static const int kExternalTwoByteRepresentationTag = 0x02;
static const int kExternalOneByteRepresentationTag = 0x0a;
static const int kIsolateEmbedderDataOffset = 0 * kApiPointerSize;
static const int kExternalMemoryOffset = 4 * kApiPointerSize;
static const int kExternalMemoryLimitOffset =
kExternalMemoryOffset + kApiInt64Size;
static const int kExternalMemoryAtLastMarkCompactOffset =
kExternalMemoryLimitOffset + kApiInt64Size;
static const int kIsolateRootsOffset = kExternalMemoryLimitOffset +
kApiInt64Size + kApiInt64Size +
kApiPointerSize + kApiPointerSize;
static const int kUndefinedValueRootIndex = 4;
static const int kTheHoleValueRootIndex = 5;
static const int kNullValueRootIndex = 6;
static const int kTrueValueRootIndex = 7;
static const int kFalseValueRootIndex = 8;
static const int kEmptyStringRootIndex = 9;
static const int kNodeClassIdOffset = 1 * kApiPointerSize;
static const int kNodeFlagsOffset = 1 * kApiPointerSize + 3;
static const int kNodeStateMask = 0x7;
static const int kNodeStateIsWeakValue = 2;
static const int kNodeStateIsPendingValue = 3;
static const int kNodeStateIsNearDeathValue = 4;
static const int kNodeIsIndependentShift = 3;
static const int kNodeIsActiveShift = 4;
static const int kFirstNonstringType = 0x80;
static const int kOddballType = 0x83;
static const int kForeignType = 0x87;
static const int kJSSpecialApiObjectType = 0x410;
static const int kJSApiObjectType = 0x420;
static const int kJSObjectType = 0x421;
static const int kUndefinedOddballKind = 5;
static const int kNullOddballKind = 3;
static const uint32_t kNumIsolateDataSlots = 4;
// Soft limit for AdjustAmountofExternalAllocatedMemory. Trigger an
// incremental GC once the external memory reaches this limit.
static constexpr int kExternalAllocationSoftLimit = 64 * 1024 * 1024;
V8_EXPORT static void CheckInitializedImpl(v8::Isolate* isolate);
V8_INLINE static void CheckInitialized(v8::Isolate* isolate) {
#ifdef V8_ENABLE_CHECKS
CheckInitializedImpl(isolate);
#endif
}
V8_INLINE static bool HasHeapObjectTag(const internal::Object* value) {
return ((reinterpret_cast<intptr_t>(value) & kHeapObjectTagMask) ==
kHeapObjectTag);
}
V8_INLINE static int SmiValue(const internal::Object* value) {
return PlatformSmiTagging::SmiToInt(value);
}
V8_INLINE static internal::Object* IntToSmi(int value) {
return PlatformSmiTagging::IntToSmi(value);
}
V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
return PlatformSmiTagging::IsValidSmi(value);
}
V8_INLINE static int GetInstanceType(const internal::Object* obj) {
typedef internal::Object O;
O* map = ReadField<O*>(obj, kHeapObjectMapOffset);
return ReadField<uint16_t>(map, kMapInstanceTypeOffset);
}
V8_INLINE static int GetOddballKind(const internal::Object* obj) {
typedef internal::Object O;
return SmiValue(ReadField<O*>(obj, kOddballKindOffset));
}
V8_INLINE static bool IsExternalTwoByteString(int instance_type) {
int representation = (instance_type & kFullStringRepresentationMask);
return representation == kExternalTwoByteRepresentationTag;
}
V8_INLINE static uint8_t GetNodeFlag(internal::Object** obj, int shift) {
uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
return *addr & static_cast<uint8_t>(1U << shift);
}
V8_INLINE static void UpdateNodeFlag(internal::Object** obj, bool value,
int shift) {
uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
uint8_t mask = static_cast<uint8_t>(1U << shift);
*addr = static_cast<uint8_t>((*addr & ~mask) | (value << shift));
}
V8_INLINE static uint8_t GetNodeState(internal::Object** obj) {
uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
return *addr & kNodeStateMask;
}
V8_INLINE static void UpdateNodeState(internal::Object** obj, uint8_t value) {
uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
*addr = static_cast<uint8_t>((*addr & ~kNodeStateMask) | value);
}
V8_INLINE static void SetEmbedderData(v8::Isolate* isolate, uint32_t slot,
void* data) {
uint8_t* addr = reinterpret_cast<uint8_t*>(isolate) +
kIsolateEmbedderDataOffset + slot * kApiPointerSize;
*reinterpret_cast<void**>(addr) = data;
}
V8_INLINE static void* GetEmbedderData(const v8::Isolate* isolate,
uint32_t slot) {
const uint8_t* addr = reinterpret_cast<const uint8_t*>(isolate) +
kIsolateEmbedderDataOffset + slot * kApiPointerSize;
return *reinterpret_cast<void* const*>(addr);
}
V8_INLINE static internal::Object** GetRoot(v8::Isolate* isolate, int index) {
uint8_t* addr = reinterpret_cast<uint8_t*>(isolate) + kIsolateRootsOffset;
return reinterpret_cast<internal::Object**>(addr + index * kApiPointerSize);
}
template <typename T>
V8_INLINE static T ReadField(const internal::Object* ptr, int offset) {
const uint8_t* addr =
reinterpret_cast<const uint8_t*>(ptr) + offset - kHeapObjectTag;
return *reinterpret_cast<const T*>(addr);
}
template <typename T>
V8_INLINE static T ReadEmbedderData(const v8::Context* context, int index) {
typedef internal::Object O;
typedef internal::Internals I;
O* ctx = *reinterpret_cast<O* const*>(context);
int embedder_data_offset =
I::kContextHeaderSize +
(internal::kApiPointerSize * I::kContextEmbedderDataIndex);
O* embedder_data = I::ReadField<O*>(ctx, embedder_data_offset);
int value_offset =
I::kFixedArrayHeaderSize + (internal::kApiPointerSize * index);
return I::ReadField<T>(embedder_data, value_offset);
}
};
// Only perform cast check for types derived from v8::Data since
// other types do not implement the Cast method.
template <bool PerformCheck>
struct CastCheck {
template <class T>
static void Perform(T* data);
};
template <>
template <class T>
void CastCheck<true>::Perform(T* data) {
T::Cast(data);
}
template <>
template <class T>
void CastCheck<false>::Perform(T* data) {}
template <class T>
V8_INLINE void PerformCastCheck(T* data) {
CastCheck<std::is_base_of<Data, T>::value>::Perform(data);
}
} // namespace internal
} // namespace v8
#endif // INCLUDE_V8_INTERNAL_H_

View File

@ -322,7 +322,9 @@ class Platform {
* |isolate|. Tasks posted for the same isolate should be execute in order of * |isolate|. Tasks posted for the same isolate should be execute in order of
* scheduling. The definition of "foreground" is opaque to V8. * scheduling. The definition of "foreground" is opaque to V8.
*/ */
virtual void CallOnForegroundThread(Isolate* isolate, Task* task) = 0; V8_DEPRECATE_SOON(
"Use a taskrunner acquired by GetForegroundTaskRunner instead.",
virtual void CallOnForegroundThread(Isolate* isolate, Task* task)) = 0;
/** /**
* Schedules a task to be invoked on a foreground thread wrt a specific * Schedules a task to be invoked on a foreground thread wrt a specific
@ -330,8 +332,10 @@ class Platform {
* Tasks posted for the same isolate should be execute in order of * Tasks posted for the same isolate should be execute in order of
* scheduling. The definition of "foreground" is opaque to V8. * scheduling. The definition of "foreground" is opaque to V8.
*/ */
virtual void CallDelayedOnForegroundThread(Isolate* isolate, Task* task, V8_DEPRECATE_SOON(
double delay_in_seconds) = 0; "Use a taskrunner acquired by GetForegroundTaskRunner instead.",
virtual void CallDelayedOnForegroundThread(Isolate* isolate, Task* task,
double delay_in_seconds)) = 0;
/** /**
* Schedules a task to be invoked on a foreground thread wrt a specific * Schedules a task to be invoked on a foreground thread wrt a specific
@ -341,7 +345,10 @@ class Platform {
* starved for an arbitrarily long time if no idle time is available. * starved for an arbitrarily long time if no idle time is available.
* The definition of "foreground" is opaque to V8. * The definition of "foreground" is opaque to V8.
*/ */
virtual void CallIdleOnForegroundThread(Isolate* isolate, IdleTask* task) { V8_DEPRECATE_SOON(
"Use a taskrunner acquired by GetForegroundTaskRunner instead.",
virtual void CallIdleOnForegroundThread(Isolate* isolate,
IdleTask* task)) {
// This must be overriden if |IdleTasksEnabled()|. // This must be overriden if |IdleTasksEnabled()|.
abort(); abort();
} }

View File

@ -341,12 +341,6 @@ class V8_EXPORT CpuProfiler {
V8_DEPRECATED("Use Isolate::SetIdle(bool) instead.", V8_DEPRECATED("Use Isolate::SetIdle(bool) instead.",
void SetIdle(bool is_idle)); void SetIdle(bool is_idle));
/**
* Generate more detailed source positions to code objects. This results in
* better results when mapping profiling samples to script source.
*/
static void UseDetailedSourcePositionsForProfiling(Isolate* isolate);
private: private:
CpuProfiler(); CpuProfiler();
~CpuProfiler(); ~CpuProfiler();
@ -451,7 +445,7 @@ class V8_EXPORT OutputStream { // NOLINT
kContinue = 0, kContinue = 0,
kAbort = 1 kAbort = 1
}; };
virtual ~OutputStream() {} virtual ~OutputStream() = default;
/** Notify about the end of stream. */ /** Notify about the end of stream. */
virtual void EndOfStream() = 0; virtual void EndOfStream() = 0;
/** Get preferred output chunk size. Called only once. */ /** Get preferred output chunk size. Called only once. */
@ -545,7 +539,7 @@ class V8_EXPORT ActivityControl { // NOLINT
kContinue = 0, kContinue = 0,
kAbort = 1 kAbort = 1
}; };
virtual ~ActivityControl() {} virtual ~ActivityControl() = default;
/** /**
* Notify about current progress. The activity can be stopped by * Notify about current progress. The activity can be stopped by
* returning kAbort as the callback result. * returning kAbort as the callback result.
@ -631,7 +625,7 @@ class V8_EXPORT AllocationProfile {
*/ */
virtual Node* GetRootNode() = 0; virtual Node* GetRootNode() = 0;
virtual ~AllocationProfile() {} virtual ~AllocationProfile() = default;
static const int kNoLineNumberInfo = Message::kNoLineNumberInfo; static const int kNoLineNumberInfo = Message::kNoLineNumberInfo;
static const int kNoColumnNumberInfo = Message::kNoColumnInfo; static const int kNoColumnNumberInfo = Message::kNoColumnInfo;
@ -799,15 +793,15 @@ class V8_EXPORT HeapProfiler {
virtual const char* GetName(Local<Object> object) = 0; virtual const char* GetName(Local<Object> object) = 0;
protected: protected:
virtual ~ObjectNameResolver() {} virtual ~ObjectNameResolver() = default;
}; };
/** /**
* Takes a heap snapshot and returns it. * Takes a heap snapshot and returns it.
*/ */
const HeapSnapshot* TakeHeapSnapshot( const HeapSnapshot* TakeHeapSnapshot(
ActivityControl* control = NULL, ActivityControl* control = nullptr,
ObjectNameResolver* global_object_name_resolver = NULL); ObjectNameResolver* global_object_name_resolver = nullptr);
/** /**
* Starts tracking of heap objects population statistics. After calling * Starts tracking of heap objects population statistics. After calling
@ -834,7 +828,7 @@ class V8_EXPORT HeapProfiler {
* method. * method.
*/ */
SnapshotObjectId GetHeapStats(OutputStream* stream, SnapshotObjectId GetHeapStats(OutputStream* stream,
int64_t* timestamp_us = NULL); int64_t* timestamp_us = nullptr);
/** /**
* Stops tracking of heap objects population statistics, cleans up all * Stops tracking of heap objects population statistics, cleans up all
@ -991,8 +985,8 @@ class V8_EXPORT RetainedObjectInfo { // NOLINT
virtual intptr_t GetSizeInBytes() { return -1; } virtual intptr_t GetSizeInBytes() { return -1; }
protected: protected:
RetainedObjectInfo() {} RetainedObjectInfo() = default;
virtual ~RetainedObjectInfo() {} virtual ~RetainedObjectInfo() = default;
private: private:
RetainedObjectInfo(const RetainedObjectInfo&); RetainedObjectInfo(const RetainedObjectInfo&);

View File

@ -94,11 +94,11 @@ class DefaultPersistentValueMapTraits : public StdMapTraits<K, V> {
static WeakCallbackDataType* WeakCallbackParameter( static WeakCallbackDataType* WeakCallbackParameter(
MapType* map, const K& key, Local<V> value) { MapType* map, const K& key, Local<V> value) {
return NULL; return nullptr;
} }
static MapType* MapFromWeakCallbackInfo( static MapType* MapFromWeakCallbackInfo(
const WeakCallbackInfo<WeakCallbackDataType>& data) { const WeakCallbackInfo<WeakCallbackDataType>& data) {
return NULL; return nullptr;
} }
static K KeyFromWeakCallbackInfo( static K KeyFromWeakCallbackInfo(
const WeakCallbackInfo<WeakCallbackDataType>& data) { const WeakCallbackInfo<WeakCallbackDataType>& data) {
@ -302,7 +302,7 @@ class PersistentValueMapBase {
static PersistentContainerValue ClearAndLeak(Global<V>* persistent) { static PersistentContainerValue ClearAndLeak(Global<V>* persistent) {
V* v = persistent->val_; V* v = persistent->val_;
persistent->val_ = 0; persistent->val_ = nullptr;
return reinterpret_cast<PersistentContainerValue>(v); return reinterpret_cast<PersistentContainerValue>(v);
} }
@ -633,7 +633,7 @@ class PersistentValueVector {
private: private:
static PersistentContainerValue ClearAndLeak(Global<V>* persistent) { static PersistentContainerValue ClearAndLeak(Global<V>* persistent) {
V* v = persistent->val_; V* v = persistent->val_;
persistent->val_ = 0; persistent->val_ = nullptr;
return reinterpret_cast<PersistentContainerValue>(v); return reinterpret_cast<PersistentContainerValue>(v);
} }

View File

@ -9,9 +9,9 @@
// NOTE these macros are used by some of the tool scripts and the build // NOTE these macros are used by some of the tool scripts and the build
// system so their names cannot be changed without changing the scripts. // system so their names cannot be changed without changing the scripts.
#define V8_MAJOR_VERSION 7 #define V8_MAJOR_VERSION 7
#define V8_MINOR_VERSION 0 #define V8_MINOR_VERSION 1
#define V8_BUILD_NUMBER 276 #define V8_BUILD_NUMBER 302
#define V8_PATCH_LEVEL 38 #define V8_PATCH_LEVEL 28
// Use 1 for candidates and 0 otherwise. // Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.) // (Boolean macro values are not supported by all preprocessors.)

764
deps/v8/include/v8.h vendored

File diff suppressed because it is too large Load Diff

View File

@ -420,6 +420,36 @@ namespace v8 { template <typename T> class AlignOfHelper { char c; T t; }; }
#define V8_WARN_UNUSED_RESULT /* NOT SUPPORTED */ #define V8_WARN_UNUSED_RESULT /* NOT SUPPORTED */
#endif #endif
#ifdef V8_OS_WIN
// Setup for Windows DLL export/import. When building the V8 DLL the
// BUILDING_V8_SHARED needs to be defined. When building a program which uses
// the V8 DLL USING_V8_SHARED needs to be defined. When either building the V8
// static library or building a program which uses the V8 static library neither
// BUILDING_V8_SHARED nor USING_V8_SHARED should be defined.
#ifdef BUILDING_V8_SHARED
# define V8_EXPORT __declspec(dllexport)
#elif USING_V8_SHARED
# define V8_EXPORT __declspec(dllimport)
#else
# define V8_EXPORT
#endif // BUILDING_V8_SHARED
#else // V8_OS_WIN
// Setup for Linux shared library export.
#if V8_HAS_ATTRIBUTE_VISIBILITY
# ifdef BUILDING_V8_SHARED
# define V8_EXPORT __attribute__ ((visibility("default")))
# else
# define V8_EXPORT
# endif
#else
# define V8_EXPORT
#endif
#endif // V8_OS_WIN
// clang-format on // clang-format on
#endif // V8CONFIG_H_ #endif // V8CONFIG_H_

View File

@ -108,6 +108,23 @@ verifiers {
triggered_by: "v8_win_rel_ng" triggered_by: "v8_win_rel_ng"
} }
} }
# TODO(machenbach): Remove after testing in practice and migrate to
# PRESUBMIT.py scripts.
buckets {
name: "luci.chromium.try"
builders {
name: "cast_shell_android"
experiment_percentage: 20
}
builders {
name: "cast_shell_linux"
experiment_percentage: 20
}
builders {
name: "linux-chromeos-rel"
experiment_percentage: 20
}
}
} }
} }

View File

@ -27,24 +27,12 @@
'mips64el.debug': 'default_debug_mips64el', 'mips64el.debug': 'default_debug_mips64el',
'mips64el.optdebug': 'default_optdebug_mips64el', 'mips64el.optdebug': 'default_optdebug_mips64el',
'mips64el.release': 'default_release_mips64el', 'mips64el.release': 'default_release_mips64el',
'ppc.debug': 'default_debug_ppc',
'ppc.optdebug': 'default_optdebug_ppc',
'ppc.release': 'default_release_ppc',
'ppc.debug.sim': 'default_debug_ppc_sim',
'ppc.optdebug.sim': 'default_optdebug_ppc_sim',
'ppc.release.sim': 'default_release_ppc_sim',
'ppc64.debug': 'default_debug_ppc64', 'ppc64.debug': 'default_debug_ppc64',
'ppc64.optdebug': 'default_optdebug_ppc64', 'ppc64.optdebug': 'default_optdebug_ppc64',
'ppc64.release': 'default_release_ppc64', 'ppc64.release': 'default_release_ppc64',
'ppc64.debug.sim': 'default_debug_ppc64_sim', 'ppc64.debug.sim': 'default_debug_ppc64_sim',
'ppc64.optdebug.sim': 'default_optdebug_ppc64_sim', 'ppc64.optdebug.sim': 'default_optdebug_ppc64_sim',
'ppc64.release.sim': 'default_release_ppc64_sim', 'ppc64.release.sim': 'default_release_ppc64_sim',
's390.debug': 'default_debug_s390',
's390.optdebug': 'default_optdebug_s390',
's390.release': 'default_release_s390',
's390.debug.sim': 'default_debug_s390_sim',
's390.optdebug.sim': 'default_optdebug_s390_sim',
's390.release.sim': 'default_release_s390_sim',
's390x.debug': 'default_debug_s390x', 's390x.debug': 'default_debug_s390x',
's390x.optdebug': 'default_optdebug_s390x', 's390x.optdebug': 'default_optdebug_s390x',
's390x.release': 'default_release_s390x', 's390x.release': 'default_release_s390x',
@ -54,6 +42,7 @@
'x64.debug': 'default_debug_x64', 'x64.debug': 'default_debug_x64',
'x64.optdebug': 'default_optdebug_x64', 'x64.optdebug': 'default_optdebug_x64',
'x64.release': 'default_release_x64', 'x64.release': 'default_release_x64',
'x64.release.sample': 'release_x64_sample',
}, },
'client.dynamorio': { 'client.dynamorio': {
'linux-v8-dr': 'release_x64', 'linux-v8-dr': 'release_x64',
@ -102,6 +91,8 @@
'V8 Linux gcc 4.8': 'release_x86_gcc', 'V8 Linux gcc 4.8': 'release_x86_gcc',
'V8 Linux64 gcc 4.8 - debug': 'debug_x64_gcc', 'V8 Linux64 gcc 4.8 - debug': 'debug_x64_gcc',
# FYI. # FYI.
'V8 Linux - embedded builtins': 'release_x86_embedded_builtins',
'V8 Linux - embedded builtins - debug': 'debug_x86_embedded_builtins',
'V8 Fuchsia': 'release_x64_fuchsia', 'V8 Fuchsia': 'release_x64_fuchsia',
'V8 Fuchsia - debug': 'debug_x64_fuchsia', 'V8 Fuchsia - debug': 'debug_x64_fuchsia',
'V8 Linux64 - cfi': 'release_x64_cfi', 'V8 Linux64 - cfi': 'release_x64_cfi',
@ -138,8 +129,6 @@
'debug_simulate_arm64_asan_edge', 'debug_simulate_arm64_asan_edge',
'V8 Clusterfuzz Linux ASAN arm - debug builder': 'V8 Clusterfuzz Linux ASAN arm - debug builder':
'debug_simulate_arm_asan_edge', 'debug_simulate_arm_asan_edge',
'V8 Clusterfuzz Linux ASAN mipsel - debug builder':
'debug_simulate_mipsel_asan_edge',
'V8 Clusterfuzz Linux64 CFI - release builder': 'V8 Clusterfuzz Linux64 CFI - release builder':
'release_x64_cfi_clusterfuzz', 'release_x64_cfi_clusterfuzz',
'V8 Clusterfuzz Linux MSAN no origins': 'V8 Clusterfuzz Linux MSAN no origins':
@ -169,11 +158,8 @@
'V8 Mips - builder': 'release_mips_no_snap_no_i18n', 'V8 Mips - builder': 'release_mips_no_snap_no_i18n',
'V8 Linux - mipsel - sim - builder': 'release_simulate_mipsel', 'V8 Linux - mipsel - sim - builder': 'release_simulate_mipsel',
'V8 Linux - mips64el - sim - builder': 'release_simulate_mips64el', 'V8 Linux - mips64el - sim - builder': 'release_simulate_mips64el',
# PPC. # IBM.
'V8 Linux - ppc - sim': 'release_simulate_ppc',
'V8 Linux - ppc64 - sim': 'release_simulate_ppc64', 'V8 Linux - ppc64 - sim': 'release_simulate_ppc64',
# S390.
'V8 Linux - s390 - sim': 'release_simulate_s390',
'V8 Linux - s390x - sim': 'release_simulate_s390x', 'V8 Linux - s390x - sim': 'release_simulate_s390x',
}, },
'client.v8.branches': { 'client.v8.branches': {
@ -193,12 +179,8 @@
'V8 mips64el - sim - stable branch': 'release_simulate_mips64el', 'V8 mips64el - sim - stable branch': 'release_simulate_mips64el',
'V8 mipsel - sim - beta branch': 'release_simulate_mipsel', 'V8 mipsel - sim - beta branch': 'release_simulate_mipsel',
'V8 mipsel - sim - stable branch': 'release_simulate_mipsel', 'V8 mipsel - sim - stable branch': 'release_simulate_mipsel',
'V8 ppc - sim - beta branch': 'release_simulate_ppc',
'V8 ppc - sim - stable branch': 'release_simulate_ppc',
'V8 ppc64 - sim - beta branch': 'release_simulate_ppc64', 'V8 ppc64 - sim - beta branch': 'release_simulate_ppc64',
'V8 ppc64 - sim - stable branch': 'release_simulate_ppc64', 'V8 ppc64 - sim - stable branch': 'release_simulate_ppc64',
'V8 s390 - sim - beta branch': 'release_simulate_s390',
'V8 s390 - sim - stable branch': 'release_simulate_s390',
'V8 s390x - sim - beta branch': 'release_simulate_s390x', 'V8 s390x - sim - beta branch': 'release_simulate_s390x',
'V8 s390x - sim - stable branch': 'release_simulate_s390x', 'V8 s390x - sim - stable branch': 'release_simulate_s390x',
}, },
@ -207,7 +189,9 @@
'v8_android_arm64_compile_dbg': 'debug_android_arm64', 'v8_android_arm64_compile_dbg': 'debug_android_arm64',
'v8_android_arm64_n5x_rel_ng': 'release_android_arm64', 'v8_android_arm64_n5x_rel_ng': 'release_android_arm64',
'v8_fuchsia_rel_ng': 'release_x64_fuchsia_trybot', 'v8_fuchsia_rel_ng': 'release_x64_fuchsia_trybot',
'v8_linux_embedded_builtins_rel_ng': 'release_x86_embedded_builtins_trybot',
'v8_linux_rel_ng': 'release_x86_gcmole_trybot', 'v8_linux_rel_ng': 'release_x86_gcmole_trybot',
'v8_linux_optional_rel_ng': 'release_x86_trybot',
'v8_linux_verify_csa_rel_ng': 'release_x86_verify_csa', 'v8_linux_verify_csa_rel_ng': 'release_x86_verify_csa',
'v8_linux_nodcheck_rel_ng': 'release_x86_minimal_symbols', 'v8_linux_nodcheck_rel_ng': 'release_x86_minimal_symbols',
'v8_linux_dbg_ng': 'debug_x86_trybot', 'v8_linux_dbg_ng': 'debug_x86_trybot',
@ -218,6 +202,7 @@
'v8_linux_gcc_compile_rel': 'release_x86_gcc_minimal_symbols', 'v8_linux_gcc_compile_rel': 'release_x86_gcc_minimal_symbols',
'v8_linux_gcc_rel': 'release_x86_gcc_minimal_symbols', 'v8_linux_gcc_rel': 'release_x86_gcc_minimal_symbols',
'v8_linux_shared_compile_rel': 'release_x86_shared_verify_heap', 'v8_linux_shared_compile_rel': 'release_x86_shared_verify_heap',
'v8_linux64_compile_rel_xg': 'release_x64_test_features_trybot',
'v8_linux64_dbg_ng': 'debug_x64_trybot', 'v8_linux64_dbg_ng': 'debug_x64_trybot',
'v8_linux64_gcc_compile_dbg': 'debug_x64_gcc', 'v8_linux64_gcc_compile_dbg': 'debug_x64_gcc',
'v8_linux64_header_includes_dbg': 'debug_x64_header_includes', 'v8_linux64_header_includes_dbg': 'debug_x64_header_includes',
@ -235,6 +220,7 @@
'v8_linux64_tsan_isolates_rel_ng': 'v8_linux64_tsan_isolates_rel_ng':
'release_x64_tsan_minimal_symbols', 'release_x64_tsan_minimal_symbols',
'v8_linux64_ubsan_rel_ng': 'release_x64_ubsan_vptr_minimal_symbols', 'v8_linux64_ubsan_rel_ng': 'release_x64_ubsan_vptr_minimal_symbols',
'v8_odroid_arm_rel_ng': 'release_arm',
# TODO(machenbach): Remove after switching to x64 on infra side. # TODO(machenbach): Remove after switching to x64 on infra side.
'v8_win_dbg': 'debug_x86_trybot', 'v8_win_dbg': 'debug_x86_trybot',
'v8_win_compile_dbg': 'debug_x86_trybot', 'v8_win_compile_dbg': 'debug_x86_trybot',
@ -280,7 +266,7 @@
'default_optdebug_android_arm': [ 'default_optdebug_android_arm': [
'debug', 'arm', 'android', 'v8_enable_slow_dchecks' ], 'debug', 'arm', 'android', 'v8_enable_slow_dchecks' ],
'default_release_android_arm': [ 'default_release_android_arm': [
'release', 'arm', 'android'], 'release', 'arm', 'android', 'android_strip_outputs'],
'default_debug_arm64': [ 'default_debug_arm64': [
'debug', 'simulate_arm64', 'v8_enable_slow_dchecks', 'v8_full_debug'], 'debug', 'simulate_arm64', 'v8_enable_slow_dchecks', 'v8_full_debug'],
'default_optdebug_arm64': [ 'default_optdebug_arm64': [
@ -299,18 +285,6 @@
'debug', 'simulate_mips64el', 'v8_enable_slow_dchecks'], 'debug', 'simulate_mips64el', 'v8_enable_slow_dchecks'],
'default_release_mips64el': [ 'default_release_mips64el': [
'release', 'simulate_mips64el'], 'release', 'simulate_mips64el'],
'default_debug_ppc': [
'debug', 'ppc', 'v8_enable_slow_dchecks', 'v8_full_debug'],
'default_optdebug_ppc': [
'debug', 'ppc', 'v8_enable_slow_dchecks'],
'default_release_ppc': [
'release', 'ppc'],
'default_debug_ppc_sim': [
'debug', 'simulate_ppc', 'v8_enable_slow_dchecks', 'v8_full_debug'],
'default_optdebug_ppc_sim': [
'debug', 'simulate_ppc', 'v8_enable_slow_dchecks'],
'default_release_ppc_sim': [
'release', 'simulate_ppc'],
'default_debug_ppc64': [ 'default_debug_ppc64': [
'debug', 'ppc64', 'gcc', 'v8_enable_slow_dchecks', 'v8_full_debug'], 'debug', 'ppc64', 'gcc', 'v8_enable_slow_dchecks', 'v8_full_debug'],
'default_optdebug_ppc64': [ 'default_optdebug_ppc64': [
@ -323,18 +297,6 @@
'debug', 'simulate_ppc64', 'v8_enable_slow_dchecks'], 'debug', 'simulate_ppc64', 'v8_enable_slow_dchecks'],
'default_release_ppc64_sim': [ 'default_release_ppc64_sim': [
'release', 'simulate_ppc64'], 'release', 'simulate_ppc64'],
'default_debug_s390': [
'debug', 's390', 'v8_enable_slow_dchecks', 'v8_full_debug'],
'default_optdebug_s390': [
'debug', 's390', 'v8_enable_slow_dchecks'],
'default_release_s390': [
'release', 's390'],
'default_debug_s390_sim': [
'debug', 'simulate_s390', 'v8_enable_slow_dchecks', 'v8_full_debug'],
'default_optdebug_s390_sim': [
'debug', 'simulate_s390', 'v8_enable_slow_dchecks'],
'default_release_s390_sim': [
'release', 'simulate_s390'],
'default_debug_s390x': [ 'default_debug_s390x': [
'debug', 's390x', 'v8_enable_slow_dchecks', 'v8_full_debug'], 'debug', 's390x', 'v8_enable_slow_dchecks', 'v8_full_debug'],
'default_optdebug_s390x': [ 'default_optdebug_s390x': [
@ -353,6 +315,8 @@
'debug', 'x64', 'v8_enable_slow_dchecks'], 'debug', 'x64', 'v8_enable_slow_dchecks'],
'default_release_x64': [ 'default_release_x64': [
'release', 'x64'], 'release', 'x64'],
'release_x64_sample': [
'release', 'x64', 'sample'],
'default_debug_x86': [ 'default_debug_x86': [
'debug', 'x86', 'v8_enable_slow_dchecks', 'v8_full_debug'], 'debug', 'x86', 'v8_enable_slow_dchecks', 'v8_full_debug'],
'default_optdebug_x86': [ 'default_optdebug_x86': [
@ -373,8 +337,6 @@
'debug_simulate_arm64_no_snap': [ 'debug_simulate_arm64_no_snap': [
'debug', 'simulate_arm64', 'shared', 'goma', 'v8_optimized_debug', 'debug', 'simulate_arm64', 'shared', 'goma', 'v8_optimized_debug',
'v8_snapshot_none'], 'v8_snapshot_none'],
'debug_simulate_mipsel_asan_edge': [
'debug_bot', 'simulate_mipsel', 'asan', 'edge'],
# Release configs for simulators. # Release configs for simulators.
'release_simulate_arm': [ 'release_simulate_arm': [
@ -397,12 +359,8 @@
'release_bot', 'simulate_mipsel'], 'release_bot', 'simulate_mipsel'],
'release_simulate_mips64el': [ 'release_simulate_mips64el': [
'release_bot', 'simulate_mips64el'], 'release_bot', 'simulate_mips64el'],
'release_simulate_ppc': [
'release_bot', 'simulate_ppc'],
'release_simulate_ppc64': [ 'release_simulate_ppc64': [
'release_bot', 'simulate_ppc64'], 'release_bot', 'simulate_ppc64'],
'release_simulate_s390': [
'release_bot', 'simulate_s390'],
'release_simulate_s390x': [ 'release_simulate_s390x': [
'release_bot', 'simulate_s390x'], 'release_bot', 'simulate_s390x'],
@ -416,9 +374,11 @@
'release_arm': [ 'release_arm': [
'release_bot', 'arm', 'hard_float'], 'release_bot', 'arm', 'hard_float'],
'release_android_arm': [ 'release_android_arm': [
'release_bot', 'arm', 'android', 'minimal_symbols'], 'release_bot', 'arm', 'android', 'minimal_symbols',
'android_strip_outputs'],
'release_android_arm64': [ 'release_android_arm64': [
'release_bot', 'arm64', 'android', 'minimal_symbols'], 'release_bot', 'arm64', 'android', 'minimal_symbols',
'android_strip_outputs'],
# Release configs for x64. # Release configs for x64.
'release_x64': [ 'release_x64': [
@ -519,6 +479,9 @@
# Debug configs for x86. # Debug configs for x86.
'debug_x86': [ 'debug_x86': [
'debug_bot', 'x86'], 'debug_bot', 'x86'],
'debug_x86_embedded_builtins': [
'debug_bot', 'x86', 'v8_enable_embedded_builtins',
'v8_no_untrusted_code_mitigations'],
'debug_x86_minimal_symbols': [ 'debug_x86_minimal_symbols': [
'debug_bot', 'x86', 'minimal_symbols'], 'debug_bot', 'x86', 'minimal_symbols'],
'debug_x86_no_i18n': [ 'debug_x86_no_i18n': [
@ -538,6 +501,12 @@
# Release configs for x86. # Release configs for x86.
'release_x86': [ 'release_x86': [
'release_bot', 'x86'], 'release_bot', 'x86'],
'release_x86_embedded_builtins': [
'release_bot', 'x86', 'v8_enable_embedded_builtins',
'v8_no_untrusted_code_mitigations'],
'release_x86_embedded_builtins_trybot': [
'release_trybot', 'x86', 'v8_enable_embedded_builtins',
'v8_no_untrusted_code_mitigations'],
'release_x86_gcc': [ 'release_x86_gcc': [
'release_bot', 'x86', 'gcc'], 'release_bot', 'x86', 'gcc'],
'release_x86_gcc_minimal_symbols': [ 'release_x86_gcc_minimal_symbols': [
@ -580,6 +549,10 @@
'gn_args': 'target_os="android" v8_android_log_stdout=true', 'gn_args': 'target_os="android" v8_android_log_stdout=true',
}, },
'android_strip_outputs': {
'gn_args': 'android_unstripped_runtime_outputs=false',
},
'arm': { 'arm': {
'gn_args': 'target_cpu="arm"', 'gn_args': 'target_cpu="arm"',
}, },
@ -625,14 +598,10 @@
'gn_args': 'is_debug=true v8_enable_backtrace=true', 'gn_args': 'is_debug=true v8_enable_backtrace=true',
}, },
'v8_use_multi_snapshots': {
'gn_args': 'v8_use_multi_snapshots=true',
},
'debug_bot': { 'debug_bot': {
'mixins': [ 'mixins': [
'debug', 'shared', 'goma', 'v8_enable_slow_dchecks', 'debug', 'shared', 'goma', 'v8_enable_slow_dchecks',
'v8_use_multi_snapshots', 'v8_optimized_debug'], 'v8_optimized_debug'],
}, },
'debug_trybot': { 'debug_trybot': {
@ -715,11 +684,11 @@
}, },
'release': { 'release': {
'gn_args': 'is_debug=false android_unstripped_runtime_outputs=false', 'gn_args': 'is_debug=false',
}, },
'release_bot': { 'release_bot': {
'mixins': ['release', 'static', 'goma', 'v8_use_multi_snapshots'], 'mixins': ['release', 'static', 'goma'],
}, },
'release_trybot': { 'release_trybot': {
@ -747,18 +716,10 @@
'gn_args': 'target_cpu="x64" v8_target_cpu="mips64el"', 'gn_args': 'target_cpu="x64" v8_target_cpu="mips64el"',
}, },
'simulate_ppc': {
'gn_args': 'target_cpu="x86" v8_target_cpu="ppc"',
},
'simulate_ppc64': { 'simulate_ppc64': {
'gn_args': 'target_cpu="x64" v8_target_cpu="ppc64"', 'gn_args': 'target_cpu="x64" v8_target_cpu="ppc64"',
}, },
'simulate_s390': {
'gn_args': 'target_cpu="x86" v8_target_cpu="s390"',
},
'simulate_s390x': { 'simulate_s390x': {
'gn_args': 'target_cpu="x64" v8_target_cpu="s390x"', 'gn_args': 'target_cpu="x64" v8_target_cpu="s390x"',
}, },
@ -808,6 +769,10 @@
'gn_args': 'v8_correctness_fuzzer=true v8_multi_arch_build=true', 'gn_args': 'v8_correctness_fuzzer=true v8_multi_arch_build=true',
}, },
'v8_enable_embedded_builtins': {
'gn_args': 'v8_enable_embedded_builtins=true',
},
'v8_enable_slow_dchecks': { 'v8_enable_slow_dchecks': {
'gn_args': 'v8_enable_slow_dchecks=true', 'gn_args': 'v8_enable_slow_dchecks=true',
}, },
@ -853,6 +818,10 @@
'gn_args': 'v8_use_snapshot=false', 'gn_args': 'v8_use_snapshot=false',
}, },
'v8_no_untrusted_code_mitigations': {
'gn_args': 'v8_untrusted_code_mitigations=false',
},
'v8_verify_heap': { 'v8_verify_heap': {
'gn_args': 'v8_enable_verify_heap=true', 'gn_args': 'v8_enable_verify_heap=true',
}, },
@ -861,18 +830,10 @@
'gn_args': 'v8_enable_verify_csa=true', 'gn_args': 'v8_enable_verify_csa=true',
}, },
's390': {
'gn_args': 'target_cpu="s390x" v8_target_cpu="s390"',
},
's390x': { 's390x': {
'gn_args': 'target_cpu="s390x" v8_target_cpu="s390x"', 'gn_args': 'target_cpu="s390x" v8_target_cpu="s390x"',
}, },
'ppc': {
'gn_args': 'target_cpu="ppc"',
},
'ppc64': { 'ppc64': {
'gn_args': 'target_cpu="ppc64" use_custom_libcxx=false', 'gn_args': 'target_cpu="ppc64" use_custom_libcxx=false',
}, },
@ -885,5 +846,9 @@
'gn_args': 'target_cpu="x86"', 'gn_args': 'target_cpu="x86"',
}, },
'sample': {
'gn_args': 'v8_monolithic=true is_component_build=false '
'v8_use_external_startup_data=false use_custom_libcxx=false',
},
}, },
} }

2
deps/v8/src/DEPS vendored
View File

@ -13,6 +13,7 @@ include_rules = [
"+src/heap/heap.h", "+src/heap/heap.h",
"+src/heap/heap-inl.h", "+src/heap/heap-inl.h",
"+src/heap/heap-write-barrier-inl.h", "+src/heap/heap-write-barrier-inl.h",
"+src/heap/heap-write-barrier.h",
"-src/inspector", "-src/inspector",
"-src/interpreter", "-src/interpreter",
"+src/interpreter/bytecode-array-accessor.h", "+src/interpreter/bytecode-array-accessor.h",
@ -30,6 +31,7 @@ include_rules = [
"+testing/gtest/include/gtest/gtest_prod.h", "+testing/gtest/include/gtest/gtest_prod.h",
"-src/libplatform", "-src/libplatform",
"-include/libplatform", "-include/libplatform",
"+builtins-generated",
"+torque-generated" "+torque-generated"
] ]

View File

@ -31,7 +31,8 @@ Handle<AccessorInfo> Accessors::MakeAccessor(
info->set_is_special_data_property(true); info->set_is_special_data_property(true);
info->set_is_sloppy(false); info->set_is_sloppy(false);
info->set_replace_on_access(false); info->set_replace_on_access(false);
info->set_has_no_side_effect(false); info->set_getter_side_effect_type(SideEffectType::kHasSideEffect);
info->set_setter_side_effect_type(SideEffectType::kHasSideEffect);
name = factory->InternalizeName(name); name = factory->InternalizeName(name);
info->set_name(*name); info->set_name(*name);
Handle<Object> get = v8::FromCData(isolate, getter); Handle<Object> get = v8::FromCData(isolate, getter);
@ -70,7 +71,7 @@ bool Accessors::IsJSObjectFieldAccessor(Isolate* isolate, Handle<Map> map,
default: default:
if (map->instance_type() < FIRST_NONSTRING_TYPE) { if (map->instance_type() < FIRST_NONSTRING_TYPE) {
return CheckForName(isolate, name, isolate->factory()->length_string(), return CheckForName(isolate, name, isolate->factory()->length_string(),
String::kLengthOffset, FieldIndex::kTagged, index); String::kLengthOffset, FieldIndex::kWord32, index);
} }
return false; return false;

View File

@ -22,27 +22,28 @@ class JavaScriptFrame;
// The list of accessor descriptors. This is a second-order macro // The list of accessor descriptors. This is a second-order macro
// taking a macro to be applied to all accessor descriptor names. // taking a macro to be applied to all accessor descriptor names.
#define ACCESSOR_INFO_LIST(V) \ // V(accessor_name, AccessorName, GetterSideEffectType, SetterSideEffectType)
V(arguments_iterator, ArgumentsIterator) \ #define ACCESSOR_INFO_LIST_GENERATOR(V, _) \
V(array_length, ArrayLength) \ V(_, arguments_iterator, ArgumentsIterator, kHasNoSideEffect, \
V(bound_function_length, BoundFunctionLength) \ kHasSideEffectToReceiver) \
V(bound_function_name, BoundFunctionName) \ V(_, array_length, ArrayLength, kHasNoSideEffect, kHasSideEffectToReceiver) \
V(error_stack, ErrorStack) \ V(_, bound_function_length, BoundFunctionLength, kHasNoSideEffect, \
V(function_arguments, FunctionArguments) \ kHasSideEffectToReceiver) \
V(function_caller, FunctionCaller) \ V(_, bound_function_name, BoundFunctionName, kHasNoSideEffect, \
V(function_name, FunctionName) \ kHasSideEffectToReceiver) \
V(function_length, FunctionLength) \ V(_, error_stack, ErrorStack, kHasSideEffectToReceiver, \
V(function_prototype, FunctionPrototype) \ kHasSideEffectToReceiver) \
V(string_length, StringLength) V(_, function_arguments, FunctionArguments, kHasNoSideEffect, \
kHasSideEffectToReceiver) \
#define SIDE_EFFECT_FREE_ACCESSOR_INFO_LIST(V) \ V(_, function_caller, FunctionCaller, kHasNoSideEffect, \
V(ArrayLength) \ kHasSideEffectToReceiver) \
V(BoundFunctionLength) \ V(_, function_name, FunctionName, kHasNoSideEffect, \
V(BoundFunctionName) \ kHasSideEffectToReceiver) \
V(FunctionName) \ V(_, function_length, FunctionLength, kHasNoSideEffect, \
V(FunctionLength) \ kHasSideEffectToReceiver) \
V(FunctionPrototype) \ V(_, function_prototype, FunctionPrototype, kHasNoSideEffect, \
V(StringLength) kHasSideEffectToReceiver) \
V(_, string_length, StringLength, kHasNoSideEffect, kHasSideEffectToReceiver)
#define ACCESSOR_SETTER_LIST(V) \ #define ACCESSOR_SETTER_LIST(V) \
V(ArrayLengthSetter) \ V(ArrayLengthSetter) \
@ -55,11 +56,11 @@ class JavaScriptFrame;
class Accessors : public AllStatic { class Accessors : public AllStatic {
public: public:
#define ACCESSOR_GETTER_DECLARATION(accessor_name, AccessorName) \ #define ACCESSOR_GETTER_DECLARATION(_, accessor_name, AccessorName, ...) \
static void AccessorName##Getter( \ static void AccessorName##Getter( \
v8::Local<v8::Name> name, \ v8::Local<v8::Name> name, \
const v8::PropertyCallbackInfo<v8::Value>& info); const v8::PropertyCallbackInfo<v8::Value>& info);
ACCESSOR_INFO_LIST(ACCESSOR_GETTER_DECLARATION) ACCESSOR_INFO_LIST_GENERATOR(ACCESSOR_GETTER_DECLARATION, /* not used */)
#undef ACCESSOR_GETTER_DECLARATION #undef ACCESSOR_GETTER_DECLARATION
#define ACCESSOR_SETTER_DECLARATION(accessor_name) \ #define ACCESSOR_SETTER_DECLARATION(accessor_name) \
@ -71,7 +72,7 @@ class Accessors : public AllStatic {
static constexpr int kAccessorInfoCount = static constexpr int kAccessorInfoCount =
#define COUNT_ACCESSOR(...) +1 #define COUNT_ACCESSOR(...) +1
ACCESSOR_INFO_LIST(COUNT_ACCESSOR); ACCESSOR_INFO_LIST_GENERATOR(COUNT_ACCESSOR, /* not used */);
#undef COUNT_ACCESSOR #undef COUNT_ACCESSOR
static constexpr int kAccessorSetterCount = static constexpr int kAccessorSetterCount =
@ -118,9 +119,9 @@ class Accessors : public AllStatic {
AccessorNameBooleanSetterCallback setter); AccessorNameBooleanSetterCallback setter);
private: private:
#define ACCESSOR_INFO_DECLARATION(accessor_name, AccessorName) \ #define ACCESSOR_INFO_DECLARATION(_, accessor_name, AccessorName, ...) \
static Handle<AccessorInfo> Make##AccessorName##Info(Isolate* isolate); static Handle<AccessorInfo> Make##AccessorName##Info(Isolate* isolate);
ACCESSOR_INFO_LIST(ACCESSOR_INFO_DECLARATION) ACCESSOR_INFO_LIST_GENERATOR(ACCESSOR_INFO_DECLARATION, /* not used */)
#undef ACCESSOR_INFO_DECLARATION #undef ACCESSOR_INFO_DECLARATION
friend class Heap; friend class Heap;

View File

@ -14,8 +14,8 @@ RootIndexMap::RootIndexMap(Isolate* isolate) {
map_ = isolate->root_index_map(); map_ = isolate->root_index_map();
if (map_ != nullptr) return; if (map_ != nullptr) return;
map_ = new HeapObjectToIndexHashMap(); map_ = new HeapObjectToIndexHashMap();
for (uint32_t i = 0; i < Heap::kStrongRootListLength; i++) { for (RootIndex root_index = RootIndex::kFirstStrongRoot;
Heap::RootListIndex root_index = static_cast<Heap::RootListIndex>(i); root_index <= RootIndex::kLastStrongRoot; ++root_index) {
Object* root = isolate->heap()->root(root_index); Object* root = isolate->heap()->root(root_index);
if (!root->IsHeapObject()) continue; if (!root->IsHeapObject()) continue;
// Omit root entries that can be written after initialization. They must // Omit root entries that can be written after initialization. They must
@ -25,11 +25,12 @@ RootIndexMap::RootIndexMap(Isolate* isolate) {
if (isolate->heap()->RootCanBeTreatedAsConstant(root_index)) { if (isolate->heap()->RootCanBeTreatedAsConstant(root_index)) {
HeapObject* heap_object = HeapObject::cast(root); HeapObject* heap_object = HeapObject::cast(root);
Maybe<uint32_t> maybe_index = map_->Get(heap_object); Maybe<uint32_t> maybe_index = map_->Get(heap_object);
uint32_t index = static_cast<uint32_t>(root_index);
if (maybe_index.IsJust()) { if (maybe_index.IsJust()) {
// Some are initialized to a previous value in the root list. // Some are initialized to a previous value in the root list.
DCHECK_LT(maybe_index.FromJust(), i); DCHECK_LT(maybe_index.FromJust(), index);
} else { } else {
map_->Set(heap_object, i); map_->Set(heap_object, index);
} }
} else { } else {
// Immortal immovable root objects are constant and allocated on the first // Immortal immovable root objects are constant and allocated on the first

View File

@ -56,11 +56,14 @@ class RootIndexMap {
public: public:
explicit RootIndexMap(Isolate* isolate); explicit RootIndexMap(Isolate* isolate);
static const int kInvalidRootIndex = -1; // Returns true on successful lookup and sets *|out_root_list|.
bool Lookup(HeapObject* obj, RootIndex* out_root_list) {
int Lookup(HeapObject* obj) {
Maybe<uint32_t> maybe_index = map_->Get(obj); Maybe<uint32_t> maybe_index = map_->Get(obj);
return maybe_index.IsJust() ? maybe_index.FromJust() : kInvalidRootIndex; if (maybe_index.IsJust()) {
*out_root_list = static_cast<RootIndex>(maybe_index.FromJust());
return true;
}
return false;
} }
private: private:

View File

@ -0,0 +1,52 @@
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_ALLOCATION_SITE_SCOPES_INL_H_
#define V8_ALLOCATION_SITE_SCOPES_INL_H_
#include "src/allocation-site-scopes.h"
#include "src/objects/allocation-site-inl.h"
namespace v8 {
namespace internal {
Handle<AllocationSite> AllocationSiteUsageContext::EnterNewScope() {
if (top().is_null()) {
InitializeTraversal(top_site_);
} else {
// Advance current site
Object* nested_site = current()->nested_site();
// Something is wrong if we advance to the end of the list here.
update_current_site(AllocationSite::cast(nested_site));
}
return Handle<AllocationSite>(*current(), isolate());
}
void AllocationSiteUsageContext::ExitScope(Handle<AllocationSite> scope_site,
Handle<JSObject> object) {
// This assert ensures that we are pointing at the right sub-object in a
// recursive walk of a nested literal.
DCHECK(object.is_null() || *object == scope_site->boilerplate());
}
bool AllocationSiteUsageContext::ShouldCreateMemento(Handle<JSObject> object) {
if (activated_ && AllocationSite::CanTrack(object->map()->instance_type())) {
if (FLAG_allocation_site_pretenuring ||
AllocationSite::ShouldTrack(object->GetElementsKind())) {
if (FLAG_trace_creation_allocation_sites) {
PrintF("*** Creating Memento for %s %p\n",
object->IsJSArray() ? "JSArray" : "JSObject",
static_cast<void*>(*object));
}
return true;
}
}
return false;
}
} // namespace internal
} // namespace v8
#endif // V8_ALLOCATION_SITE_SCOPES_INL_H_

View File

@ -56,40 +56,12 @@ class AllocationSiteUsageContext : public AllocationSiteContext {
top_site_(site), top_site_(site),
activated_(activated) { } activated_(activated) { }
inline Handle<AllocationSite> EnterNewScope() { inline Handle<AllocationSite> EnterNewScope();
if (top().is_null()) {
InitializeTraversal(top_site_);
} else {
// Advance current site
Object* nested_site = current()->nested_site();
// Something is wrong if we advance to the end of the list here.
update_current_site(AllocationSite::cast(nested_site));
}
return Handle<AllocationSite>(*current(), isolate());
}
inline void ExitScope(Handle<AllocationSite> scope_site, inline void ExitScope(Handle<AllocationSite> scope_site,
Handle<JSObject> object) { Handle<JSObject> object);
// This assert ensures that we are pointing at the right sub-object in a
// recursive walk of a nested literal.
DCHECK(object.is_null() || *object == scope_site->boilerplate());
}
bool ShouldCreateMemento(Handle<JSObject> object) { inline bool ShouldCreateMemento(Handle<JSObject> object);
if (activated_ &&
AllocationSite::CanTrack(object->map()->instance_type())) {
if (FLAG_allocation_site_pretenuring ||
AllocationSite::ShouldTrack(object->GetElementsKind())) {
if (FLAG_trace_creation_allocation_sites) {
PrintF("*** Creating Memento for %s %p\n",
object->IsJSArray() ? "JSArray" : "JSObject",
static_cast<void*>(*object));
}
return true;
}
}
return false;
}
static const bool kCopying = true; static const bool kCopying = true;

View File

@ -8,6 +8,7 @@
#include "src/base/bits.h" #include "src/base/bits.h"
#include "src/base/lazy-instance.h" #include "src/base/lazy-instance.h"
#include "src/base/logging.h" #include "src/base/logging.h"
#include "src/base/lsan-page-allocator.h"
#include "src/base/page-allocator.h" #include "src/base/page-allocator.h"
#include "src/base/platform/platform.h" #include "src/base/platform/platform.h"
#include "src/utils.h" #include "src/utils.h"
@ -17,10 +18,6 @@
#include <malloc.h> // NOLINT #include <malloc.h> // NOLINT
#endif #endif
#if defined(LEAK_SANITIZER)
#include <sanitizer/lsan_interface.h>
#endif
namespace v8 { namespace v8 {
namespace internal { namespace internal {
@ -51,21 +48,29 @@ struct InitializePageAllocator {
static v8::base::PageAllocator default_allocator; static v8::base::PageAllocator default_allocator;
page_allocator = &default_allocator; page_allocator = &default_allocator;
} }
#if defined(LEAK_SANITIZER)
{
static v8::base::LsanPageAllocator lsan_allocator(page_allocator);
page_allocator = &lsan_allocator;
}
#endif
*page_allocator_ptr = page_allocator; *page_allocator_ptr = page_allocator;
} }
}; };
static base::LazyInstance<v8::PageAllocator*, InitializePageAllocator>::type static base::LazyInstance<v8::PageAllocator*, InitializePageAllocator>::type
page_allocator = LAZY_INSTANCE_INITIALIZER; page_allocator = LAZY_INSTANCE_INITIALIZER;
v8::PageAllocator* GetPageAllocator() { return page_allocator.Get(); }
// We will attempt allocation this many times. After each failure, we call // We will attempt allocation this many times. After each failure, we call
// OnCriticalMemoryPressure to try to free some memory. // OnCriticalMemoryPressure to try to free some memory.
const int kAllocationTries = 2; const int kAllocationTries = 2;
} // namespace } // namespace
v8::PageAllocator* GetPlatformPageAllocator() {
DCHECK_NOT_NULL(page_allocator.Get());
return page_allocator.Get();
}
void* Malloced::New(size_t size) { void* Malloced::New(size_t size) {
void* result = AllocWithRetry(size); void* result = AllocWithRetry(size);
if (result == nullptr) { if (result == nullptr) {
@ -131,68 +136,62 @@ void AlignedFree(void *ptr) {
#endif #endif
} }
size_t AllocatePageSize() { return GetPageAllocator()->AllocatePageSize(); } size_t AllocatePageSize() {
return GetPlatformPageAllocator()->AllocatePageSize();
}
size_t CommitPageSize() { return GetPageAllocator()->CommitPageSize(); } size_t CommitPageSize() { return GetPlatformPageAllocator()->CommitPageSize(); }
void SetRandomMmapSeed(int64_t seed) { void SetRandomMmapSeed(int64_t seed) {
GetPageAllocator()->SetRandomMmapSeed(seed); GetPlatformPageAllocator()->SetRandomMmapSeed(seed);
} }
void* GetRandomMmapAddr() { return GetPageAllocator()->GetRandomMmapAddr(); } void* GetRandomMmapAddr() {
return GetPlatformPageAllocator()->GetRandomMmapAddr();
}
void* AllocatePages(void* address, size_t size, size_t alignment, void* AllocatePages(v8::PageAllocator* page_allocator, void* address,
size_t size, size_t alignment,
PageAllocator::Permission access) { PageAllocator::Permission access) {
DCHECK_NOT_NULL(page_allocator);
DCHECK_EQ(address, AlignedAddress(address, alignment)); DCHECK_EQ(address, AlignedAddress(address, alignment));
DCHECK_EQ(0UL, size & (GetPageAllocator()->AllocatePageSize() - 1)); DCHECK_EQ(0UL, size & (page_allocator->AllocatePageSize() - 1));
void* result = nullptr; void* result = nullptr;
for (int i = 0; i < kAllocationTries; ++i) { for (int i = 0; i < kAllocationTries; ++i) {
result = result = page_allocator->AllocatePages(address, size, alignment, access);
GetPageAllocator()->AllocatePages(address, size, alignment, access);
if (result != nullptr) break; if (result != nullptr) break;
size_t request_size = size + alignment - AllocatePageSize(); size_t request_size = size + alignment - page_allocator->AllocatePageSize();
if (!OnCriticalMemoryPressure(request_size)) break; if (!OnCriticalMemoryPressure(request_size)) break;
} }
#if defined(LEAK_SANITIZER)
if (result != nullptr) {
__lsan_register_root_region(result, size);
}
#endif
return result; return result;
} }
bool FreePages(void* address, const size_t size) { bool FreePages(v8::PageAllocator* page_allocator, void* address,
DCHECK_EQ(0UL, size & (GetPageAllocator()->AllocatePageSize() - 1)); const size_t size) {
bool result = GetPageAllocator()->FreePages(address, size); DCHECK_NOT_NULL(page_allocator);
#if defined(LEAK_SANITIZER) DCHECK_EQ(0UL, size & (page_allocator->AllocatePageSize() - 1));
if (result) { return page_allocator->FreePages(address, size);
__lsan_unregister_root_region(address, size);
}
#endif
return result;
} }
bool ReleasePages(void* address, size_t size, size_t new_size) { bool ReleasePages(v8::PageAllocator* page_allocator, void* address, size_t size,
size_t new_size) {
DCHECK_NOT_NULL(page_allocator);
DCHECK_LT(new_size, size); DCHECK_LT(new_size, size);
bool result = GetPageAllocator()->ReleasePages(address, size, new_size); return page_allocator->ReleasePages(address, size, new_size);
#if defined(LEAK_SANITIZER)
if (result) {
__lsan_unregister_root_region(address, size);
__lsan_register_root_region(address, new_size);
}
#endif
return result;
} }
bool SetPermissions(void* address, size_t size, bool SetPermissions(v8::PageAllocator* page_allocator, void* address,
PageAllocator::Permission access) { size_t size, PageAllocator::Permission access) {
return GetPageAllocator()->SetPermissions(address, size, access); DCHECK_NOT_NULL(page_allocator);
return page_allocator->SetPermissions(address, size, access);
} }
byte* AllocatePage(void* address, size_t* allocated) { byte* AllocatePage(v8::PageAllocator* page_allocator, void* address,
size_t page_size = AllocatePageSize(); size_t* allocated) {
void* result = DCHECK_NOT_NULL(page_allocator);
AllocatePages(address, page_size, page_size, PageAllocator::kReadWrite); size_t page_size = page_allocator->AllocatePageSize();
void* result = AllocatePages(page_allocator, address, page_size, page_size,
PageAllocator::kReadWrite);
if (result != nullptr) *allocated = page_size; if (result != nullptr) *allocated = page_size;
return static_cast<byte*>(result); return static_cast<byte*>(result);
} }
@ -206,16 +205,17 @@ bool OnCriticalMemoryPressure(size_t length) {
return true; return true;
} }
VirtualMemory::VirtualMemory() : address_(kNullAddress), size_(0) {} VirtualMemory::VirtualMemory(v8::PageAllocator* page_allocator, size_t size,
void* hint, size_t alignment)
VirtualMemory::VirtualMemory(size_t size, void* hint, size_t alignment) : page_allocator_(page_allocator) {
: address_(kNullAddress), size_(0) { DCHECK_NOT_NULL(page_allocator);
size_t page_size = AllocatePageSize(); size_t page_size = page_allocator_->AllocatePageSize();
size_t alloc_size = RoundUp(size, page_size); alignment = RoundUp(alignment, page_size);
address_ = reinterpret_cast<Address>( size = RoundUp(size, page_size);
AllocatePages(hint, alloc_size, alignment, PageAllocator::kNoAccess)); Address address = reinterpret_cast<Address>(AllocatePages(
if (address_ != kNullAddress) { page_allocator_, hint, size, alignment, PageAllocator::kNoAccess));
size_ = alloc_size; if (address != kNullAddress) {
region_ = base::AddressRegion(address, size);
} }
} }
@ -226,30 +226,31 @@ VirtualMemory::~VirtualMemory() {
} }
void VirtualMemory::Reset() { void VirtualMemory::Reset() {
address_ = kNullAddress; page_allocator_ = nullptr;
size_ = 0; region_ = base::AddressRegion();
} }
bool VirtualMemory::SetPermissions(Address address, size_t size, bool VirtualMemory::SetPermissions(Address address, size_t size,
PageAllocator::Permission access) { PageAllocator::Permission access) {
CHECK(InVM(address, size)); CHECK(InVM(address, size));
bool result = v8::internal::SetPermissions(address, size, access); bool result =
v8::internal::SetPermissions(page_allocator_, address, size, access);
DCHECK(result); DCHECK(result);
return result; return result;
} }
size_t VirtualMemory::Release(Address free_start) { size_t VirtualMemory::Release(Address free_start) {
DCHECK(IsReserved()); DCHECK(IsReserved());
DCHECK(IsAddressAligned(free_start, CommitPageSize())); DCHECK(IsAddressAligned(free_start, page_allocator_->CommitPageSize()));
// Notice: Order is important here. The VirtualMemory object might live // Notice: Order is important here. The VirtualMemory object might live
// inside the allocated region. // inside the allocated region.
const size_t free_size = size_ - (free_start - address_);
const size_t old_size = region_.size();
const size_t free_size = old_size - (free_start - region_.begin());
CHECK(InVM(free_start, free_size)); CHECK(InVM(free_start, free_size));
DCHECK_LT(address_, free_start); region_.set_size(old_size - free_size);
DCHECK_LT(free_start, address_ + size_); CHECK(ReleasePages(page_allocator_, reinterpret_cast<void*>(region_.begin()),
CHECK(ReleasePages(reinterpret_cast<void*>(address_), size_, old_size, region_.size()));
size_ - free_size));
size_ -= free_size;
return free_size; return free_size;
} }
@ -257,41 +258,21 @@ void VirtualMemory::Free() {
DCHECK(IsReserved()); DCHECK(IsReserved());
// Notice: Order is important here. The VirtualMemory object might live // Notice: Order is important here. The VirtualMemory object might live
// inside the allocated region. // inside the allocated region.
Address address = address_; v8::PageAllocator* page_allocator = page_allocator_;
size_t size = size_; base::AddressRegion region = region_;
CHECK(InVM(address, size));
Reset(); Reset();
// FreePages expects size to be aligned to allocation granularity. Trimming // FreePages expects size to be aligned to allocation granularity however
// may leave size at only commit granularity. Align it here. // ReleasePages may leave size at only commit granularity. Align it here.
CHECK(FreePages(reinterpret_cast<void*>(address), CHECK(FreePages(page_allocator, reinterpret_cast<void*>(region.begin()),
RoundUp(size, AllocatePageSize()))); RoundUp(region.size(), page_allocator->AllocatePageSize())));
} }
void VirtualMemory::TakeControl(VirtualMemory* from) { void VirtualMemory::TakeControl(VirtualMemory* from) {
DCHECK(!IsReserved()); DCHECK(!IsReserved());
address_ = from->address_; page_allocator_ = from->page_allocator_;
size_ = from->size_; region_ = from->region_;
from->Reset(); from->Reset();
} }
bool AllocVirtualMemory(size_t size, void* hint, VirtualMemory* result) {
VirtualMemory vm(size, hint);
if (vm.IsReserved()) {
result->TakeControl(&vm);
return true;
}
return false;
}
bool AlignedAllocVirtualMemory(size_t size, size_t alignment, void* hint,
VirtualMemory* result) {
VirtualMemory vm(size, hint, alignment);
if (vm.IsReserved()) {
result->TakeControl(&vm);
return true;
}
return false;
}
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8

View File

@ -6,6 +6,7 @@
#define V8_ALLOCATION_H_ #define V8_ALLOCATION_H_
#include "include/v8-platform.h" #include "include/v8-platform.h"
#include "src/base/address-region.h"
#include "src/base/compiler-specific.h" #include "src/base/compiler-specific.h"
#include "src/base/platform/platform.h" #include "src/base/platform/platform.h"
#include "src/globals.h" #include "src/globals.h"
@ -82,6 +83,9 @@ void* AllocWithRetry(size_t size);
void* AlignedAlloc(size_t size, size_t alignment); void* AlignedAlloc(size_t size, size_t alignment);
void AlignedFree(void *ptr); void AlignedFree(void *ptr);
// Returns platfrom page allocator instance. Guaranteed to be a valid pointer.
V8_EXPORT_PRIVATE v8::PageAllocator* GetPlatformPageAllocator();
// Gets the page granularity for AllocatePages and FreePages. Addresses returned // Gets the page granularity for AllocatePages and FreePages. Addresses returned
// by AllocatePages and AllocatePage are aligned to this size. // by AllocatePages and AllocatePage are aligned to this size.
V8_EXPORT_PRIVATE size_t AllocatePageSize(); V8_EXPORT_PRIVATE size_t AllocatePageSize();
@ -101,14 +105,16 @@ V8_EXPORT_PRIVATE void* GetRandomMmapAddr();
// AllocatePageSize(). Returns the address of the allocated memory, with the // AllocatePageSize(). Returns the address of the allocated memory, with the
// specified size and alignment, or nullptr on failure. // specified size and alignment, or nullptr on failure.
V8_EXPORT_PRIVATE V8_EXPORT_PRIVATE
V8_WARN_UNUSED_RESULT void* AllocatePages(void* address, size_t size, V8_WARN_UNUSED_RESULT void* AllocatePages(v8::PageAllocator* page_allocator,
void* address, size_t size,
size_t alignment, size_t alignment,
PageAllocator::Permission access); PageAllocator::Permission access);
// Frees memory allocated by a call to AllocatePages. |address| and |size| must // Frees memory allocated by a call to AllocatePages. |address| and |size| must
// be multiples of AllocatePageSize(). Returns true on success, otherwise false. // be multiples of AllocatePageSize(). Returns true on success, otherwise false.
V8_EXPORT_PRIVATE V8_EXPORT_PRIVATE
V8_WARN_UNUSED_RESULT bool FreePages(void* address, const size_t size); V8_WARN_UNUSED_RESULT bool FreePages(v8::PageAllocator* page_allocator,
void* address, const size_t size);
// Releases memory that is no longer needed. The range specified by |address| // Releases memory that is no longer needed. The range specified by |address|
// and |size| must be an allocated memory region. |size| and |new_size| must be // and |size| must be an allocated memory region. |size| and |new_size| must be
@ -116,7 +122,8 @@ V8_WARN_UNUSED_RESULT bool FreePages(void* address, const size_t size);
// Released memory is left in an undefined state, so it should not be accessed. // Released memory is left in an undefined state, so it should not be accessed.
// Returns true on success, otherwise false. // Returns true on success, otherwise false.
V8_EXPORT_PRIVATE V8_EXPORT_PRIVATE
V8_WARN_UNUSED_RESULT bool ReleasePages(void* address, size_t size, V8_WARN_UNUSED_RESULT bool ReleasePages(v8::PageAllocator* page_allocator,
void* address, size_t size,
size_t new_size); size_t new_size);
// Sets permissions according to |access|. |address| and |size| must be // Sets permissions according to |access|. |address| and |size| must be
@ -124,18 +131,21 @@ V8_WARN_UNUSED_RESULT bool ReleasePages(void* address, size_t size,
// cause the memory contents to be lost. Returns true on success, otherwise // cause the memory contents to be lost. Returns true on success, otherwise
// false. // false.
V8_EXPORT_PRIVATE V8_EXPORT_PRIVATE
V8_WARN_UNUSED_RESULT bool SetPermissions(void* address, size_t size, V8_WARN_UNUSED_RESULT bool SetPermissions(v8::PageAllocator* page_allocator,
void* address, size_t size,
PageAllocator::Permission access); PageAllocator::Permission access);
inline bool SetPermissions(Address address, size_t size, inline bool SetPermissions(v8::PageAllocator* page_allocator, Address address,
PageAllocator::Permission access) { size_t size, PageAllocator::Permission access) {
return SetPermissions(reinterpret_cast<void*>(address), size, access); return SetPermissions(page_allocator, reinterpret_cast<void*>(address), size,
access);
} }
// Convenience function that allocates a single system page with read and write // Convenience function that allocates a single system page with read and write
// permissions. |address| is a hint. Returns the base address of the memory and // permissions. |address| is a hint. Returns the base address of the memory and
// the page size via |allocated| on success. Returns nullptr on failure. // the page size via |allocated| on success. Returns nullptr on failure.
V8_EXPORT_PRIVATE V8_EXPORT_PRIVATE
V8_WARN_UNUSED_RESULT byte* AllocatePage(void* address, size_t* allocated); V8_WARN_UNUSED_RESULT byte* AllocatePage(v8::PageAllocator* page_allocator,
void* address, size_t* allocated);
// Function that may release reserved memory regions to allow failed allocations // Function that may release reserved memory regions to allow failed allocations
// to succeed. |length| is the amount of memory needed. Returns |true| if memory // to succeed. |length| is the amount of memory needed. Returns |true| if memory
@ -143,50 +153,67 @@ V8_WARN_UNUSED_RESULT byte* AllocatePage(void* address, size_t* allocated);
V8_EXPORT_PRIVATE bool OnCriticalMemoryPressure(size_t length); V8_EXPORT_PRIVATE bool OnCriticalMemoryPressure(size_t length);
// Represents and controls an area of reserved memory. // Represents and controls an area of reserved memory.
class V8_EXPORT_PRIVATE VirtualMemory { class V8_EXPORT_PRIVATE VirtualMemory final {
public: public:
// Empty VirtualMemory object, controlling no reserved memory. // Empty VirtualMemory object, controlling no reserved memory.
VirtualMemory(); VirtualMemory() = default;
// Reserves virtual memory containing an area of the given size that is // Reserves virtual memory containing an area of the given size that is
// aligned per alignment. This may not be at the position returned by // aligned per |alignment| rounded up to the |page_allocator|'s allocate page
// address(). // size.
VirtualMemory(size_t size, void* hint, size_t alignment = AllocatePageSize()); // This may not be at the position returned by address().
VirtualMemory(v8::PageAllocator* page_allocator, size_t size, void* hint,
size_t alignment = 1);
// Construct a virtual memory by assigning it some already mapped address // Construct a virtual memory by assigning it some already mapped address
// and size. // and size.
VirtualMemory(Address address, size_t size) VirtualMemory(v8::PageAllocator* page_allocator, Address address, size_t size)
: address_(address), size_(size) {} : page_allocator_(page_allocator), region_(address, size) {
DCHECK_NOT_NULL(page_allocator);
}
// Releases the reserved memory, if any, controlled by this VirtualMemory // Releases the reserved memory, if any, controlled by this VirtualMemory
// object. // object.
~VirtualMemory(); ~VirtualMemory();
// Move constructor.
VirtualMemory(VirtualMemory&& other) V8_NOEXCEPT { TakeControl(&other); }
// Move assignment operator.
VirtualMemory& operator=(VirtualMemory&& other) V8_NOEXCEPT {
TakeControl(&other);
return *this;
}
// Returns whether the memory has been reserved. // Returns whether the memory has been reserved.
bool IsReserved() const { return address_ != kNullAddress; } bool IsReserved() const { return region_.begin() != kNullAddress; }
// Initialize or resets an embedded VirtualMemory object. // Initialize or resets an embedded VirtualMemory object.
void Reset(); void Reset();
v8::PageAllocator* page_allocator() { return page_allocator_; }
const base::AddressRegion& region() const { return region_; }
// Returns the start address of the reserved memory. // Returns the start address of the reserved memory.
// If the memory was reserved with an alignment, this address is not // If the memory was reserved with an alignment, this address is not
// necessarily aligned. The user might need to round it up to a multiple of // necessarily aligned. The user might need to round it up to a multiple of
// the alignment to get the start of the aligned block. // the alignment to get the start of the aligned block.
Address address() const { Address address() const {
DCHECK(IsReserved()); DCHECK(IsReserved());
return address_; return region_.begin();
} }
Address end() const { Address end() const {
DCHECK(IsReserved()); DCHECK(IsReserved());
return address_ + size_; return region_.end();
} }
// Returns the size of the reserved memory. The returned value is only // Returns the size of the reserved memory. The returned value is only
// meaningful when IsReserved() returns true. // meaningful when IsReserved() returns true.
// If the memory was reserved with an alignment, this size may be larger // If the memory was reserved with an alignment, this size may be larger
// than the requested size. // than the requested size.
size_t size() const { return size_; } size_t size() const { return region_.size(); }
// Sets permissions according to the access argument. address and size must be // Sets permissions according to the access argument. address and size must be
// multiples of CommitPageSize(). Returns true on success, otherwise false. // multiples of CommitPageSize(). Returns true on success, otherwise false.
@ -204,17 +231,16 @@ class V8_EXPORT_PRIVATE VirtualMemory {
void TakeControl(VirtualMemory* from); void TakeControl(VirtualMemory* from);
bool InVM(Address address, size_t size) { bool InVM(Address address, size_t size) {
return (address_ <= address) && ((address_ + size_) >= (address + size)); return region_.contains(address, size);
} }
private: private:
Address address_; // Start address of the virtual memory. // Page allocator that controls the virtual memory.
size_t size_; // Size of the virtual memory. v8::PageAllocator* page_allocator_ = nullptr;
}; base::AddressRegion region_;
bool AllocVirtualMemory(size_t size, void* hint, VirtualMemory* result); DISALLOW_COPY_AND_ASSIGN(VirtualMemory);
bool AlignedAllocVirtualMemory(size_t size, size_t alignment, void* hint, };
VirtualMemory* result);
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8

View File

@ -8,6 +8,7 @@
#include "src/api-arguments.h" #include "src/api-arguments.h"
#include "src/api-inl.h" #include "src/api-inl.h"
#include "src/debug/debug.h"
#include "src/objects/api-callbacks.h" #include "src/objects/api-callbacks.h"
#include "src/tracing/trace-event.h" #include "src/tracing/trace-event.h"
#include "src/vm-state-inl.h" #include "src/vm-state-inl.h"
@ -34,6 +35,10 @@ inline JSObject* PropertyCallbackArguments::holder() {
return JSObject::cast(this->begin()[T::kHolderIndex]); return JSObject::cast(this->begin()[T::kHolderIndex]);
} }
inline Object* PropertyCallbackArguments::receiver() {
return Object::cast(this->begin()[T::kThisIndex]);
}
inline JSObject* FunctionCallbackArguments::holder() { inline JSObject* FunctionCallbackArguments::holder() {
return JSObject::cast(this->begin()[T::kHolderIndex]); return JSObject::cast(this->begin()[T::kHolderIndex]);
} }
@ -47,14 +52,24 @@ inline JSObject* FunctionCallbackArguments::holder() {
DCHECK(!name->IsPrivate()); \ DCHECK(!name->IsPrivate()); \
DCHECK_IMPLIES(name->IsSymbol(), interceptor->can_intercept_symbols()); DCHECK_IMPLIES(name->IsSymbol(), interceptor->can_intercept_symbols());
#define PREPARE_CALLBACK_INFO(ISOLATE, F, RETURN_VALUE, API_RETURN_TYPE, \ #define PREPARE_CALLBACK_INFO(ISOLATE, F, RETURN_VALUE, API_RETURN_TYPE, \
CALLBACK_INFO) \ CALLBACK_INFO, RECEIVER, ACCESSOR_KIND) \
if (ISOLATE->debug_execution_mode() == DebugInfo::kSideEffects && \ if (ISOLATE->debug_execution_mode() == DebugInfo::kSideEffects && \
!ISOLATE->debug()->PerformSideEffectCheckForCallback(CALLBACK_INFO)) { \ !ISOLATE->debug()->PerformSideEffectCheckForCallback( \
return RETURN_VALUE(); \ CALLBACK_INFO, RECEIVER, Debug::k##ACCESSOR_KIND)) { \
} \ return RETURN_VALUE(); \
VMState<EXTERNAL> state(ISOLATE); \ } \
ExternalCallbackScope call_scope(ISOLATE, FUNCTION_ADDR(F)); \ VMState<EXTERNAL> state(ISOLATE); \
ExternalCallbackScope call_scope(ISOLATE, FUNCTION_ADDR(F)); \
PropertyCallbackInfo<API_RETURN_TYPE> callback_info(begin());
#define PREPARE_CALLBACK_INFO_FAIL_SIDE_EFFECT_CHECK(ISOLATE, F, RETURN_VALUE, \
API_RETURN_TYPE) \
if (ISOLATE->debug_execution_mode() == DebugInfo::kSideEffects) { \
return RETURN_VALUE(); \
} \
VMState<EXTERNAL> state(ISOLATE); \
ExternalCallbackScope call_scope(ISOLATE, FUNCTION_ADDR(F)); \
PropertyCallbackInfo<API_RETURN_TYPE> callback_info(begin()); PropertyCallbackInfo<API_RETURN_TYPE> callback_info(begin());
#define CREATE_NAMED_CALLBACK(FUNCTION, TYPE, RETURN_TYPE, API_RETURN_TYPE, \ #define CREATE_NAMED_CALLBACK(FUNCTION, TYPE, RETURN_TYPE, API_RETURN_TYPE, \
@ -65,11 +80,13 @@ inline JSObject* FunctionCallbackArguments::holder() {
Isolate* isolate = this->isolate(); \ Isolate* isolate = this->isolate(); \
RuntimeCallTimerScope timer( \ RuntimeCallTimerScope timer( \
isolate, RuntimeCallCounterId::kNamed##FUNCTION##Callback); \ isolate, RuntimeCallCounterId::kNamed##FUNCTION##Callback); \
Handle<Object> receiver_check_unsupported; \
GenericNamedProperty##FUNCTION##Callback f = \ GenericNamedProperty##FUNCTION##Callback f = \
ToCData<GenericNamedProperty##FUNCTION##Callback>( \ ToCData<GenericNamedProperty##FUNCTION##Callback>( \
interceptor->TYPE()); \ interceptor->TYPE()); \
PREPARE_CALLBACK_INFO(isolate, f, Handle<RETURN_TYPE>, API_RETURN_TYPE, \ PREPARE_CALLBACK_INFO(isolate, f, Handle<RETURN_TYPE>, API_RETURN_TYPE, \
INFO_FOR_SIDE_EFFECT); \ INFO_FOR_SIDE_EFFECT, receiver_check_unsupported, \
NotAccessor); \
LOG(isolate, \ LOG(isolate, \
ApiNamedPropertyAccess("interceptor-named-" #TYPE, holder(), *name)); \ ApiNamedPropertyAccess("interceptor-named-" #TYPE, holder(), *name)); \
f(v8::Utils::ToLocal(name), callback_info); \ f(v8::Utils::ToLocal(name), callback_info); \
@ -87,10 +104,12 @@ FOR_EACH_CALLBACK(CREATE_NAMED_CALLBACK)
Isolate* isolate = this->isolate(); \ Isolate* isolate = this->isolate(); \
RuntimeCallTimerScope timer( \ RuntimeCallTimerScope timer( \
isolate, RuntimeCallCounterId::kIndexed##FUNCTION##Callback); \ isolate, RuntimeCallCounterId::kIndexed##FUNCTION##Callback); \
Handle<Object> receiver_check_unsupported; \
IndexedProperty##FUNCTION##Callback f = \ IndexedProperty##FUNCTION##Callback f = \
ToCData<IndexedProperty##FUNCTION##Callback>(interceptor->TYPE()); \ ToCData<IndexedProperty##FUNCTION##Callback>(interceptor->TYPE()); \
PREPARE_CALLBACK_INFO(isolate, f, Handle<RETURN_TYPE>, API_RETURN_TYPE, \ PREPARE_CALLBACK_INFO(isolate, f, Handle<RETURN_TYPE>, API_RETURN_TYPE, \
INFO_FOR_SIDE_EFFECT); \ INFO_FOR_SIDE_EFFECT, receiver_check_unsupported, \
NotAccessor); \
LOG(isolate, ApiIndexedPropertyAccess("interceptor-indexed-" #TYPE, \ LOG(isolate, ApiIndexedPropertyAccess("interceptor-indexed-" #TYPE, \
holder(), index)); \ holder(), index)); \
f(index, callback_info); \ f(index, callback_info); \
@ -108,9 +127,11 @@ Handle<Object> FunctionCallbackArguments::Call(CallHandlerInfo* handler) {
RuntimeCallTimerScope timer(isolate, RuntimeCallCounterId::kFunctionCallback); RuntimeCallTimerScope timer(isolate, RuntimeCallCounterId::kFunctionCallback);
v8::FunctionCallback f = v8::FunctionCallback f =
v8::ToCData<v8::FunctionCallback>(handler->callback()); v8::ToCData<v8::FunctionCallback>(handler->callback());
Handle<Object> receiver_check_unsupported;
if (isolate->debug_execution_mode() == DebugInfo::kSideEffects && if (isolate->debug_execution_mode() == DebugInfo::kSideEffects &&
!isolate->debug()->PerformSideEffectCheckForCallback( !isolate->debug()->PerformSideEffectCheckForCallback(
handle(handler, isolate))) { handle(handler, isolate), receiver_check_unsupported,
Debug::kNotAccessor)) {
return Handle<Object>(); return Handle<Object>();
} }
VMState<EXTERNAL> state(isolate); VMState<EXTERNAL> state(isolate);
@ -167,10 +188,11 @@ Handle<Object> PropertyCallbackArguments::CallNamedDescriptor(
Handle<Object> PropertyCallbackArguments::BasicCallNamedGetterCallback( Handle<Object> PropertyCallbackArguments::BasicCallNamedGetterCallback(
GenericNamedPropertyGetterCallback f, Handle<Name> name, GenericNamedPropertyGetterCallback f, Handle<Name> name,
Handle<Object> info) { Handle<Object> info, Handle<Object> receiver) {
DCHECK(!name->IsPrivate()); DCHECK(!name->IsPrivate());
Isolate* isolate = this->isolate(); Isolate* isolate = this->isolate();
PREPARE_CALLBACK_INFO(isolate, f, Handle<Object>, v8::Value, info); PREPARE_CALLBACK_INFO(isolate, f, Handle<Object>, v8::Value, info, receiver,
Getter);
f(v8::Utils::ToLocal(name), callback_info); f(v8::Utils::ToLocal(name), callback_info);
return GetReturnValue<Object>(isolate); return GetReturnValue<Object>(isolate);
} }
@ -184,9 +206,8 @@ Handle<Object> PropertyCallbackArguments::CallNamedSetter(
Isolate* isolate = this->isolate(); Isolate* isolate = this->isolate();
RuntimeCallTimerScope timer(isolate, RuntimeCallTimerScope timer(isolate,
RuntimeCallCounterId::kNamedSetterCallback); RuntimeCallCounterId::kNamedSetterCallback);
Handle<Object> side_effect_check_not_supported; PREPARE_CALLBACK_INFO_FAIL_SIDE_EFFECT_CHECK(isolate, f, Handle<Object>,
PREPARE_CALLBACK_INFO(isolate, f, Handle<Object>, v8::Value, v8::Value);
side_effect_check_not_supported);
LOG(isolate, LOG(isolate,
ApiNamedPropertyAccess("interceptor-named-set", holder(), *name)); ApiNamedPropertyAccess("interceptor-named-set", holder(), *name));
f(v8::Utils::ToLocal(name), v8::Utils::ToLocal(value), callback_info); f(v8::Utils::ToLocal(name), v8::Utils::ToLocal(value), callback_info);
@ -202,9 +223,8 @@ Handle<Object> PropertyCallbackArguments::CallNamedDefiner(
RuntimeCallCounterId::kNamedDefinerCallback); RuntimeCallCounterId::kNamedDefinerCallback);
GenericNamedPropertyDefinerCallback f = GenericNamedPropertyDefinerCallback f =
ToCData<GenericNamedPropertyDefinerCallback>(interceptor->definer()); ToCData<GenericNamedPropertyDefinerCallback>(interceptor->definer());
Handle<Object> side_effect_check_not_supported; PREPARE_CALLBACK_INFO_FAIL_SIDE_EFFECT_CHECK(isolate, f, Handle<Object>,
PREPARE_CALLBACK_INFO(isolate, f, Handle<Object>, v8::Value, v8::Value);
side_effect_check_not_supported);
LOG(isolate, LOG(isolate,
ApiNamedPropertyAccess("interceptor-named-define", holder(), *name)); ApiNamedPropertyAccess("interceptor-named-define", holder(), *name));
f(v8::Utils::ToLocal(name), desc, callback_info); f(v8::Utils::ToLocal(name), desc, callback_info);
@ -219,9 +239,8 @@ Handle<Object> PropertyCallbackArguments::CallIndexedSetter(
RuntimeCallCounterId::kIndexedSetterCallback); RuntimeCallCounterId::kIndexedSetterCallback);
IndexedPropertySetterCallback f = IndexedPropertySetterCallback f =
ToCData<IndexedPropertySetterCallback>(interceptor->setter()); ToCData<IndexedPropertySetterCallback>(interceptor->setter());
Handle<Object> side_effect_check_not_supported; PREPARE_CALLBACK_INFO_FAIL_SIDE_EFFECT_CHECK(isolate, f, Handle<Object>,
PREPARE_CALLBACK_INFO(isolate, f, Handle<Object>, v8::Value, v8::Value);
side_effect_check_not_supported);
LOG(isolate, LOG(isolate,
ApiIndexedPropertyAccess("interceptor-indexed-set", holder(), index)); ApiIndexedPropertyAccess("interceptor-indexed-set", holder(), index));
f(index, v8::Utils::ToLocal(value), callback_info); f(index, v8::Utils::ToLocal(value), callback_info);
@ -237,9 +256,8 @@ Handle<Object> PropertyCallbackArguments::CallIndexedDefiner(
RuntimeCallCounterId::kIndexedDefinerCallback); RuntimeCallCounterId::kIndexedDefinerCallback);
IndexedPropertyDefinerCallback f = IndexedPropertyDefinerCallback f =
ToCData<IndexedPropertyDefinerCallback>(interceptor->definer()); ToCData<IndexedPropertyDefinerCallback>(interceptor->definer());
Handle<Object> side_effect_check_not_supported; PREPARE_CALLBACK_INFO_FAIL_SIDE_EFFECT_CHECK(isolate, f, Handle<Object>,
PREPARE_CALLBACK_INFO(isolate, f, Handle<Object>, v8::Value, v8::Value);
side_effect_check_not_supported);
LOG(isolate, LOG(isolate,
ApiIndexedPropertyAccess("interceptor-indexed-define", holder(), index)); ApiIndexedPropertyAccess("interceptor-indexed-define", holder(), index));
f(index, desc, callback_info); f(index, desc, callback_info);
@ -275,7 +293,9 @@ Handle<Object> PropertyCallbackArguments::CallIndexedDescriptor(
Handle<Object> PropertyCallbackArguments::BasicCallIndexedGetterCallback( Handle<Object> PropertyCallbackArguments::BasicCallIndexedGetterCallback(
IndexedPropertyGetterCallback f, uint32_t index, Handle<Object> info) { IndexedPropertyGetterCallback f, uint32_t index, Handle<Object> info) {
Isolate* isolate = this->isolate(); Isolate* isolate = this->isolate();
PREPARE_CALLBACK_INFO(isolate, f, Handle<Object>, v8::Value, info); Handle<Object> receiver_check_unsupported;
PREPARE_CALLBACK_INFO(isolate, f, Handle<Object>, v8::Value, info,
receiver_check_unsupported, Getter);
f(index, callback_info); f(index, callback_info);
return GetReturnValue<Object>(isolate); return GetReturnValue<Object>(isolate);
} }
@ -287,7 +307,9 @@ Handle<JSObject> PropertyCallbackArguments::CallPropertyEnumerator(
v8::ToCData<IndexedPropertyEnumeratorCallback>(interceptor->enumerator()); v8::ToCData<IndexedPropertyEnumeratorCallback>(interceptor->enumerator());
// TODO(cbruni): assert same type for indexed and named callback. // TODO(cbruni): assert same type for indexed and named callback.
Isolate* isolate = this->isolate(); Isolate* isolate = this->isolate();
PREPARE_CALLBACK_INFO(isolate, f, Handle<JSObject>, v8::Array, interceptor); Handle<Object> receiver_check_unsupported;
PREPARE_CALLBACK_INFO(isolate, f, Handle<JSObject>, v8::Array, interceptor,
receiver_check_unsupported, NotAccessor);
f(callback_info); f(callback_info);
return GetReturnValue<JSObject>(isolate); return GetReturnValue<JSObject>(isolate);
} }
@ -303,7 +325,8 @@ Handle<Object> PropertyCallbackArguments::CallAccessorGetter(
LOG(isolate, ApiNamedPropertyAccess("accessor-getter", holder(), *name)); LOG(isolate, ApiNamedPropertyAccess("accessor-getter", holder(), *name));
AccessorNameGetterCallback f = AccessorNameGetterCallback f =
ToCData<AccessorNameGetterCallback>(info->getter()); ToCData<AccessorNameGetterCallback>(info->getter());
return BasicCallNamedGetterCallback(f, name, info); return BasicCallNamedGetterCallback(f, name, info,
handle(receiver(), isolate));
} }
Handle<Object> PropertyCallbackArguments::CallAccessorSetter( Handle<Object> PropertyCallbackArguments::CallAccessorSetter(
@ -314,15 +337,15 @@ Handle<Object> PropertyCallbackArguments::CallAccessorSetter(
RuntimeCallCounterId::kAccessorSetterCallback); RuntimeCallCounterId::kAccessorSetterCallback);
AccessorNameSetterCallback f = AccessorNameSetterCallback f =
ToCData<AccessorNameSetterCallback>(accessor_info->setter()); ToCData<AccessorNameSetterCallback>(accessor_info->setter());
Handle<Object> side_effect_check_not_supported; PREPARE_CALLBACK_INFO(isolate, f, Handle<Object>, void, accessor_info,
PREPARE_CALLBACK_INFO(isolate, f, Handle<Object>, void, handle(receiver(), isolate), Setter);
side_effect_check_not_supported);
LOG(isolate, ApiNamedPropertyAccess("accessor-setter", holder(), *name)); LOG(isolate, ApiNamedPropertyAccess("accessor-setter", holder(), *name));
f(v8::Utils::ToLocal(name), v8::Utils::ToLocal(value), callback_info); f(v8::Utils::ToLocal(name), v8::Utils::ToLocal(value), callback_info);
return GetReturnValue<Object>(isolate); return GetReturnValue<Object>(isolate);
} }
#undef PREPARE_CALLBACK_INFO #undef PREPARE_CALLBACK_INFO
#undef PREPARE_CALLBACK_INFO_FAIL_SIDE_EFFECT_CHECK
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8

View File

@ -26,12 +26,12 @@ class CustomArguments : public CustomArgumentsBase {
public: public:
static const int kReturnValueOffset = T::kReturnValueIndex; static const int kReturnValueOffset = T::kReturnValueIndex;
~CustomArguments() { ~CustomArguments() override {
this->begin()[kReturnValueOffset] = this->begin()[kReturnValueOffset] =
reinterpret_cast<Object*>(kHandleZapValue); reinterpret_cast<Object*>(kHandleZapValue);
} }
virtual inline void IterateInstance(RootVisitor* v) { inline void IterateInstance(RootVisitor* v) override {
v->VisitRootPointers(Root::kRelocatable, nullptr, values_, v->VisitRootPointers(Root::kRelocatable, nullptr, values_,
values_ + T::kArgsLength); values_ + T::kArgsLength);
} }
@ -133,9 +133,10 @@ class PropertyCallbackArguments
IndexedPropertyGetterCallback f, uint32_t index, Handle<Object> info); IndexedPropertyGetterCallback f, uint32_t index, Handle<Object> info);
inline Handle<Object> BasicCallNamedGetterCallback( inline Handle<Object> BasicCallNamedGetterCallback(
GenericNamedPropertyGetterCallback f, Handle<Name> name, GenericNamedPropertyGetterCallback f, Handle<Name> name,
Handle<Object> info); Handle<Object> info, Handle<Object> receiver = Handle<Object>());
inline JSObject* holder(); inline JSObject* holder();
inline Object* receiver();
// Don't copy PropertyCallbackArguments, because they would both have the // Don't copy PropertyCallbackArguments, because they would both have the
// same prev_ pointer. // same prev_ pointer.

View File

@ -7,6 +7,7 @@
#include "src/api.h" #include "src/api.h"
#include "src/objects-inl.h" #include "src/objects-inl.h"
#include "src/objects/stack-frame-info.h"
namespace v8 { namespace v8 {

View File

@ -114,9 +114,8 @@ MaybeHandle<Object> DefineDataProperty(Isolate* isolate,
} }
#endif #endif
MAYBE_RETURN_NULL( MAYBE_RETURN_NULL(Object::AddDataProperty(
Object::AddDataProperty(&it, value, attributes, kThrowOnError, &it, value, attributes, kThrowOnError, StoreOrigin::kNamed));
Object::CERTAINLY_NOT_STORE_FROM_KEYED));
return value; return value;
} }
@ -403,8 +402,10 @@ MaybeHandle<JSObject> InstantiateObject(Isolate* isolate,
} }
Handle<JSObject> object; Handle<JSObject> object;
ASSIGN_RETURN_ON_EXCEPTION(isolate, object, ASSIGN_RETURN_ON_EXCEPTION(
JSObject::New(constructor, new_target), JSObject); isolate, object,
JSObject::New(constructor, new_target, Handle<AllocationSite>::null()),
JSObject);
if (is_prototype) JSObject::OptimizeAsPrototype(object); if (is_prototype) JSObject::OptimizeAsPrototype(object);
@ -495,8 +496,15 @@ MaybeHandle<JSFunction> InstantiateFunction(Isolate* isolate,
parent_prototype); parent_prototype);
} }
} }
InstanceType function_type =
(!data->needs_access_check() &&
data->named_property_handler()->IsUndefined(isolate) &&
data->indexed_property_handler()->IsUndefined(isolate))
? JS_API_OBJECT_TYPE
: JS_SPECIAL_API_OBJECT_TYPE;
Handle<JSFunction> function = ApiNatives::CreateApiFunction( Handle<JSFunction> function = ApiNatives::CreateApiFunction(
isolate, data, prototype, ApiNatives::JavaScriptObjectType, maybe_name); isolate, data, prototype, function_type, maybe_name);
if (serial_number) { if (serial_number) {
// Cache the function. // Cache the function.
CacheTemplateInstantiation(isolate, serial_number, CachingMode::kUnlimited, CacheTemplateInstantiation(isolate, serial_number, CachingMode::kUnlimited,
@ -625,8 +633,7 @@ void ApiNatives::AddNativeDataProperty(Isolate* isolate,
Handle<JSFunction> ApiNatives::CreateApiFunction( Handle<JSFunction> ApiNatives::CreateApiFunction(
Isolate* isolate, Handle<FunctionTemplateInfo> obj, Isolate* isolate, Handle<FunctionTemplateInfo> obj,
Handle<Object> prototype, ApiInstanceType instance_type, Handle<Object> prototype, InstanceType type, MaybeHandle<Name> maybe_name) {
MaybeHandle<Name> maybe_name) {
Handle<SharedFunctionInfo> shared = Handle<SharedFunctionInfo> shared =
FunctionTemplateInfo::GetOrCreateSharedFunctionInfo(isolate, obj, FunctionTemplateInfo::GetOrCreateSharedFunctionInfo(isolate, obj,
maybe_name); maybe_name);
@ -670,33 +677,10 @@ Handle<JSFunction> ApiNatives::CreateApiFunction(
immutable_proto = instance_template->immutable_proto(); immutable_proto = instance_template->immutable_proto();
} }
// TODO(svenpanne) Kill ApiInstanceType and refactor things by generalizing // JS_FUNCTION_TYPE requires information about the prototype slot.
// JSObject::GetHeaderSize. DCHECK_NE(JS_FUNCTION_TYPE, type);
int instance_size = kPointerSize * embedder_field_count; int instance_size =
InstanceType type; JSObject::GetHeaderSize(type) + kPointerSize * embedder_field_count;
switch (instance_type) {
case JavaScriptObjectType:
if (!obj->needs_access_check() &&
obj->named_property_handler()->IsUndefined(isolate) &&
obj->indexed_property_handler()->IsUndefined(isolate)) {
type = JS_API_OBJECT_TYPE;
} else {
type = JS_SPECIAL_API_OBJECT_TYPE;
}
instance_size += JSObject::kHeaderSize;
break;
case GlobalObjectType:
type = JS_GLOBAL_OBJECT_TYPE;
instance_size += JSGlobalObject::kSize;
break;
case GlobalProxyType:
type = JS_GLOBAL_PROXY_TYPE;
instance_size += JSGlobalProxy::kSize;
break;
default:
UNREACHABLE();
break;
}
Handle<Map> map = isolate->factory()->NewMap(type, instance_size, Handle<Map> map = isolate->factory()->NewMap(type, instance_size,
TERMINAL_FAST_ELEMENTS_KIND); TERMINAL_FAST_ELEMENTS_KIND);

View File

@ -9,6 +9,7 @@
#include "src/base/macros.h" #include "src/base/macros.h"
#include "src/handles.h" #include "src/handles.h"
#include "src/maybe-handles.h" #include "src/maybe-handles.h"
#include "src/objects.h"
#include "src/property-details.h" #include "src/property-details.h"
namespace v8 { namespace v8 {
@ -33,15 +34,9 @@ class ApiNatives {
V8_WARN_UNUSED_RESULT static MaybeHandle<JSObject> InstantiateRemoteObject( V8_WARN_UNUSED_RESULT static MaybeHandle<JSObject> InstantiateRemoteObject(
Handle<ObjectTemplateInfo> data); Handle<ObjectTemplateInfo> data);
enum ApiInstanceType {
JavaScriptObjectType,
GlobalObjectType,
GlobalProxyType
};
static Handle<JSFunction> CreateApiFunction( static Handle<JSFunction> CreateApiFunction(
Isolate* isolate, Handle<FunctionTemplateInfo> obj, Isolate* isolate, Handle<FunctionTemplateInfo> obj,
Handle<Object> prototype, ApiInstanceType instance_type, Handle<Object> prototype, InstanceType type,
MaybeHandle<Name> name = MaybeHandle<Name>()); MaybeHandle<Name> name = MaybeHandle<Name>());
static void AddDataProperty(Isolate* isolate, Handle<TemplateInfo> info, static void AddDataProperty(Isolate* isolate, Handle<TemplateInfo> info,

456
deps/v8/src/api.cc vendored
View File

@ -58,7 +58,9 @@
#include "src/objects/js-regexp-inl.h" #include "src/objects/js-regexp-inl.h"
#include "src/objects/module-inl.h" #include "src/objects/module-inl.h"
#include "src/objects/ordered-hash-table-inl.h" #include "src/objects/ordered-hash-table-inl.h"
#include "src/objects/stack-frame-info-inl.h"
#include "src/objects/templates.h" #include "src/objects/templates.h"
#include "src/parsing/parse-info.h"
#include "src/parsing/parser.h" #include "src/parsing/parser.h"
#include "src/parsing/scanner-character-streams.h" #include "src/parsing/scanner-character-streams.h"
#include "src/pending-compilation-error-handler.h" #include "src/pending-compilation-error-handler.h"
@ -834,6 +836,7 @@ StartupData SnapshotCreator::CreateBlob(
} }
data->created_ = true; data->created_ = true;
DCHECK(i::Snapshot::VerifyChecksum(&result));
return result; return result;
} }
@ -876,12 +879,12 @@ void RegisteredExtension::UnregisterAll() {
namespace { namespace {
class ExtensionResource : public String::ExternalOneByteStringResource { class ExtensionResource : public String::ExternalOneByteStringResource {
public: public:
ExtensionResource() : data_(0), length_(0) {} ExtensionResource() : data_(nullptr), length_(0) {}
ExtensionResource(const char* data, size_t length) ExtensionResource(const char* data, size_t length)
: data_(data), length_(length) {} : data_(data), length_(length) {}
const char* data() const { return data_; } const char* data() const override { return data_; }
size_t length() const { return length_; } size_t length() const override { return length_; }
virtual void Dispose() {} void Dispose() override {}
private: private:
const char* data_; const char* data_;
@ -1391,7 +1394,7 @@ static Local<FunctionTemplate> FunctionTemplateNew(
next_serial_number = isolate->heap()->GetNextTemplateSerialNumber(); next_serial_number = isolate->heap()->GetNextTemplateSerialNumber();
} }
obj->set_serial_number(i::Smi::FromInt(next_serial_number)); obj->set_serial_number(i::Smi::FromInt(next_serial_number));
if (callback != 0) { if (callback != nullptr) {
Utils::ToLocal(obj)->SetCallHandler(callback, data, side_effect_type); Utils::ToLocal(obj)->SetCallHandler(callback, data, side_effect_type);
} }
obj->set_length(length); obj->set_length(length);
@ -1676,7 +1679,8 @@ static void TemplateSetAccessor(
Template* template_obj, v8::Local<Name> name, Getter getter, Setter setter, Template* template_obj, v8::Local<Name> name, Getter getter, Setter setter,
Data data, AccessControl settings, PropertyAttribute attribute, Data data, AccessControl settings, PropertyAttribute attribute,
v8::Local<AccessorSignature> signature, bool is_special_data_property, v8::Local<AccessorSignature> signature, bool is_special_data_property,
bool replace_on_access, SideEffectType getter_side_effect_type) { bool replace_on_access, SideEffectType getter_side_effect_type,
SideEffectType setter_side_effect_type) {
auto info = Utils::OpenHandle(template_obj); auto info = Utils::OpenHandle(template_obj);
auto isolate = info->GetIsolate(); auto isolate = info->GetIsolate();
ENTER_V8_NO_SCRIPT_NO_EXCEPTION(isolate); ENTER_V8_NO_SCRIPT_NO_EXCEPTION(isolate);
@ -1686,8 +1690,8 @@ static void TemplateSetAccessor(
is_special_data_property, replace_on_access); is_special_data_property, replace_on_access);
accessor_info->set_initial_property_attributes( accessor_info->set_initial_property_attributes(
static_cast<i::PropertyAttributes>(attribute)); static_cast<i::PropertyAttributes>(attribute));
accessor_info->set_has_no_side_effect(getter_side_effect_type == accessor_info->set_getter_side_effect_type(getter_side_effect_type);
SideEffectType::kHasNoSideEffect); accessor_info->set_setter_side_effect_type(setter_side_effect_type);
i::ApiNatives::AddNativeDataProperty(isolate, info, accessor_info); i::ApiNatives::AddNativeDataProperty(isolate, info, accessor_info);
} }
@ -1695,29 +1699,34 @@ void Template::SetNativeDataProperty(
v8::Local<String> name, AccessorGetterCallback getter, v8::Local<String> name, AccessorGetterCallback getter,
AccessorSetterCallback setter, v8::Local<Value> data, AccessorSetterCallback setter, v8::Local<Value> data,
PropertyAttribute attribute, v8::Local<AccessorSignature> signature, PropertyAttribute attribute, v8::Local<AccessorSignature> signature,
AccessControl settings, SideEffectType getter_side_effect_type) { AccessControl settings, SideEffectType getter_side_effect_type,
SideEffectType setter_side_effect_type) {
TemplateSetAccessor(this, name, getter, setter, data, settings, attribute, TemplateSetAccessor(this, name, getter, setter, data, settings, attribute,
signature, true, false, getter_side_effect_type); signature, true, false, getter_side_effect_type,
setter_side_effect_type);
} }
void Template::SetNativeDataProperty( void Template::SetNativeDataProperty(
v8::Local<Name> name, AccessorNameGetterCallback getter, v8::Local<Name> name, AccessorNameGetterCallback getter,
AccessorNameSetterCallback setter, v8::Local<Value> data, AccessorNameSetterCallback setter, v8::Local<Value> data,
PropertyAttribute attribute, v8::Local<AccessorSignature> signature, PropertyAttribute attribute, v8::Local<AccessorSignature> signature,
AccessControl settings, SideEffectType getter_side_effect_type) { AccessControl settings, SideEffectType getter_side_effect_type,
SideEffectType setter_side_effect_type) {
TemplateSetAccessor(this, name, getter, setter, data, settings, attribute, TemplateSetAccessor(this, name, getter, setter, data, settings, attribute,
signature, true, false, getter_side_effect_type); signature, true, false, getter_side_effect_type,
setter_side_effect_type);
} }
void Template::SetLazyDataProperty(v8::Local<Name> name, void Template::SetLazyDataProperty(v8::Local<Name> name,
AccessorNameGetterCallback getter, AccessorNameGetterCallback getter,
v8::Local<Value> data, v8::Local<Value> data,
PropertyAttribute attribute, PropertyAttribute attribute,
SideEffectType getter_side_effect_type) { SideEffectType getter_side_effect_type,
SideEffectType setter_side_effect_type) {
TemplateSetAccessor(this, name, getter, TemplateSetAccessor(this, name, getter,
static_cast<AccessorNameSetterCallback>(nullptr), data, static_cast<AccessorNameSetterCallback>(nullptr), data,
DEFAULT, attribute, Local<AccessorSignature>(), true, DEFAULT, attribute, Local<AccessorSignature>(), true,
true, getter_side_effect_type); true, getter_side_effect_type, setter_side_effect_type);
} }
void Template::SetIntrinsicDataProperty(Local<Name> name, Intrinsic intrinsic, void Template::SetIntrinsicDataProperty(Local<Name> name, Intrinsic intrinsic,
@ -1737,10 +1746,11 @@ void ObjectTemplate::SetAccessor(v8::Local<String> name,
v8::Local<Value> data, AccessControl settings, v8::Local<Value> data, AccessControl settings,
PropertyAttribute attribute, PropertyAttribute attribute,
v8::Local<AccessorSignature> signature, v8::Local<AccessorSignature> signature,
SideEffectType getter_side_effect_type) { SideEffectType getter_side_effect_type,
SideEffectType setter_side_effect_type) {
TemplateSetAccessor(this, name, getter, setter, data, settings, attribute, TemplateSetAccessor(this, name, getter, setter, data, settings, attribute,
signature, i::FLAG_disable_old_api_accessors, false, signature, i::FLAG_disable_old_api_accessors, false,
getter_side_effect_type); getter_side_effect_type, setter_side_effect_type);
} }
void ObjectTemplate::SetAccessor(v8::Local<Name> name, void ObjectTemplate::SetAccessor(v8::Local<Name> name,
@ -1749,10 +1759,11 @@ void ObjectTemplate::SetAccessor(v8::Local<Name> name,
v8::Local<Value> data, AccessControl settings, v8::Local<Value> data, AccessControl settings,
PropertyAttribute attribute, PropertyAttribute attribute,
v8::Local<AccessorSignature> signature, v8::Local<AccessorSignature> signature,
SideEffectType getter_side_effect_type) { SideEffectType getter_side_effect_type,
SideEffectType setter_side_effect_type) {
TemplateSetAccessor(this, name, getter, setter, data, settings, attribute, TemplateSetAccessor(this, name, getter, setter, data, settings, attribute,
signature, i::FLAG_disable_old_api_accessors, false, signature, i::FLAG_disable_old_api_accessors, false,
getter_side_effect_type); getter_side_effect_type, setter_side_effect_type);
} }
template <typename Getter, typename Setter, typename Query, typename Descriptor, template <typename Getter, typename Setter, typename Query, typename Descriptor,
@ -1765,15 +1776,15 @@ static i::Handle<i::InterceptorInfo> CreateInterceptorInfo(
isolate->factory()->NewStruct(i::INTERCEPTOR_INFO_TYPE, i::TENURED)); isolate->factory()->NewStruct(i::INTERCEPTOR_INFO_TYPE, i::TENURED));
obj->set_flags(0); obj->set_flags(0);
if (getter != 0) SET_FIELD_WRAPPED(isolate, obj, set_getter, getter); if (getter != nullptr) SET_FIELD_WRAPPED(isolate, obj, set_getter, getter);
if (setter != 0) SET_FIELD_WRAPPED(isolate, obj, set_setter, setter); if (setter != nullptr) SET_FIELD_WRAPPED(isolate, obj, set_setter, setter);
if (query != 0) SET_FIELD_WRAPPED(isolate, obj, set_query, query); if (query != nullptr) SET_FIELD_WRAPPED(isolate, obj, set_query, query);
if (descriptor != 0) if (descriptor != nullptr)
SET_FIELD_WRAPPED(isolate, obj, set_descriptor, descriptor); SET_FIELD_WRAPPED(isolate, obj, set_descriptor, descriptor);
if (remover != 0) SET_FIELD_WRAPPED(isolate, obj, set_deleter, remover); if (remover != nullptr) SET_FIELD_WRAPPED(isolate, obj, set_deleter, remover);
if (enumerator != 0) if (enumerator != nullptr)
SET_FIELD_WRAPPED(isolate, obj, set_enumerator, enumerator); SET_FIELD_WRAPPED(isolate, obj, set_enumerator, enumerator);
if (definer != 0) SET_FIELD_WRAPPED(isolate, obj, set_definer, definer); if (definer != nullptr) SET_FIELD_WRAPPED(isolate, obj, set_definer, definer);
obj->set_can_intercept_symbols( obj->set_can_intercept_symbols(
!(static_cast<int>(flags) & !(static_cast<int>(flags) &
static_cast<int>(PropertyHandlerFlags::kOnlyInterceptStrings))); static_cast<int>(PropertyHandlerFlags::kOnlyInterceptStrings)));
@ -2001,24 +2012,15 @@ ScriptCompiler::CachedData::~CachedData() {
} }
} }
bool ScriptCompiler::ExternalSourceStream::SetBookmark() { return false; } bool ScriptCompiler::ExternalSourceStream::SetBookmark() { return false; }
void ScriptCompiler::ExternalSourceStream::ResetToBookmark() { UNREACHABLE(); } void ScriptCompiler::ExternalSourceStream::ResetToBookmark() { UNREACHABLE(); }
ScriptCompiler::StreamedSource::StreamedSource(ExternalSourceStream* stream, ScriptCompiler::StreamedSource::StreamedSource(ExternalSourceStream* stream,
Encoding encoding) Encoding encoding)
: impl_(new i::ScriptStreamingData(stream, encoding)) {} : impl_(new i::ScriptStreamingData(stream, encoding)) {}
ScriptCompiler::StreamedSource::~StreamedSource() { delete impl_; } ScriptCompiler::StreamedSource::~StreamedSource() = default;
const ScriptCompiler::CachedData*
ScriptCompiler::StreamedSource::GetCachedData() const {
return impl_->cached_data.get();
}
Local<Script> UnboundScript::BindToCurrentContext() { Local<Script> UnboundScript::BindToCurrentContext() {
auto function_info = auto function_info =
@ -2030,7 +2032,6 @@ Local<Script> UnboundScript::BindToCurrentContext() {
return ToApiHandle<Script>(function); return ToApiHandle<Script>(function);
} }
int UnboundScript::GetId() { int UnboundScript::GetId() {
auto function_info = auto function_info =
i::Handle<i::SharedFunctionInfo>::cast(Utils::OpenHandle(this)); i::Handle<i::SharedFunctionInfo>::cast(Utils::OpenHandle(this));
@ -2157,10 +2158,6 @@ int PrimitiveArray::Length() const {
return array->length(); return array->length();
} }
void PrimitiveArray::Set(int index, Local<Primitive> item) {
return Set(Isolate::GetCurrent(), index, item);
}
void PrimitiveArray::Set(Isolate* v8_isolate, int index, void PrimitiveArray::Set(Isolate* v8_isolate, int index,
Local<Primitive> item) { Local<Primitive> item) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate); i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
@ -2174,10 +2171,6 @@ void PrimitiveArray::Set(Isolate* v8_isolate, int index,
array->set(index, *i_item); array->set(index, *i_item);
} }
Local<Primitive> PrimitiveArray::Get(int index) {
return Get(Isolate::GetCurrent(), index);
}
Local<Primitive> PrimitiveArray::Get(Isolate* v8_isolate, int index) { Local<Primitive> PrimitiveArray::Get(Isolate* v8_isolate, int index) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate); i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
i::Handle<i::FixedArray> array = Utils::OpenHandle(this); i::Handle<i::FixedArray> array = Utils::OpenHandle(this);
@ -2534,6 +2527,7 @@ MaybeLocal<Function> ScriptCompiler::CompileFunctionInContext(
RETURN_ESCAPED(Utils::CallableToLocal(result)); RETURN_ESCAPED(Utils::CallableToLocal(result));
} }
void ScriptCompiler::ScriptStreamingTask::Run() { data_->task->Run(); }
ScriptCompiler::ScriptStreamingTask* ScriptCompiler::StartStreamingScript( ScriptCompiler::ScriptStreamingTask* ScriptCompiler::StartStreamingScript(
Isolate* v8_isolate, StreamedSource* source, CompileOptions options) { Isolate* v8_isolate, StreamedSource* source, CompileOptions options) {
@ -2544,10 +2538,13 @@ ScriptCompiler::ScriptStreamingTask* ScriptCompiler::StartStreamingScript(
// TODO(rmcilroy): remove CompileOptions from the API. // TODO(rmcilroy): remove CompileOptions from the API.
CHECK(options == ScriptCompiler::kNoCompileOptions); CHECK(options == ScriptCompiler::kNoCompileOptions);
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate); i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
return i::Compiler::NewBackgroundCompileTask(source->impl(), isolate); i::ScriptStreamingData* data = source->impl();
std::unique_ptr<i::BackgroundCompileTask> task =
base::make_unique<i::BackgroundCompileTask>(data, isolate);
data->task = std::move(task);
return new ScriptCompiler::ScriptStreamingTask(data);
} }
MaybeLocal<Script> ScriptCompiler::Compile(Local<Context> context, MaybeLocal<Script> ScriptCompiler::Compile(Local<Context> context,
StreamedSource* v8_source, StreamedSource* v8_source,
Local<String> full_source_string, Local<String> full_source_string,
@ -2562,11 +2559,11 @@ MaybeLocal<Script> ScriptCompiler::Compile(Local<Context> context,
isolate, origin.ResourceName(), origin.ResourceLineOffset(), isolate, origin.ResourceName(), origin.ResourceLineOffset(),
origin.ResourceColumnOffset(), origin.SourceMapUrl(), origin.ResourceColumnOffset(), origin.SourceMapUrl(),
origin.HostDefinedOptions()); origin.HostDefinedOptions());
i::ScriptStreamingData* streaming_data = v8_source->impl(); i::ScriptStreamingData* data = v8_source->impl();
i::MaybeHandle<i::SharedFunctionInfo> maybe_function_info = i::MaybeHandle<i::SharedFunctionInfo> maybe_function_info =
i::Compiler::GetSharedFunctionInfoForStreamedScript( i::Compiler::GetSharedFunctionInfoForStreamedScript(
isolate, str, script_details, origin.Options(), streaming_data); isolate, str, script_details, origin.Options(), data);
i::Handle<i::SharedFunctionInfo> result; i::Handle<i::SharedFunctionInfo> result;
has_pending_exception = !maybe_function_info.ToHandle(&result); has_pending_exception = !maybe_function_info.ToHandle(&result);
@ -2908,10 +2905,6 @@ void Message::PrintCurrentStackTrace(Isolate* isolate, FILE* out) {
// --- S t a c k T r a c e --- // --- S t a c k T r a c e ---
Local<StackFrame> StackTrace::GetFrame(uint32_t index) const {
return GetFrame(Isolate::GetCurrent(), index);
}
Local<StackFrame> StackTrace::GetFrame(Isolate* v8_isolate, Local<StackFrame> StackTrace::GetFrame(Isolate* v8_isolate,
uint32_t index) const { uint32_t index) const {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate); i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
@ -3572,17 +3565,20 @@ MaybeLocal<BigInt> Value::ToBigInt(Local<Context> context) const {
RETURN_ESCAPED(result); RETURN_ESCAPED(result);
} }
bool Value::BooleanValue(Isolate* v8_isolate) const {
return Utils::OpenHandle(this)->BooleanValue(
reinterpret_cast<i::Isolate*>(v8_isolate));
}
MaybeLocal<Boolean> Value::ToBoolean(Local<Context> context) const { MaybeLocal<Boolean> Value::ToBoolean(Local<Context> context) const {
auto obj = Utils::OpenHandle(this); return ToBoolean(context->GetIsolate());
if (obj->IsBoolean()) return ToApiHandle<Boolean>(obj);
auto isolate = reinterpret_cast<i::Isolate*>(context->GetIsolate());
auto val = isolate->factory()->ToBoolean(obj->BooleanValue(isolate));
return ToApiHandle<Boolean>(val);
} }
Local<Boolean> Value::ToBoolean(Isolate* v8_isolate) const { Local<Boolean> Value::ToBoolean(Isolate* v8_isolate) const {
return ToBoolean(v8_isolate->GetCurrentContext()).ToLocalChecked(); auto isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
return ToApiHandle<Boolean>(
isolate->factory()->ToBoolean(BooleanValue(v8_isolate)));
} }
@ -3888,36 +3884,6 @@ void v8::RegExp::CheckCast(v8::Value* that) {
} }
bool Value::BooleanValue() const {
return BooleanValue(Isolate::GetCurrent()->GetCurrentContext())
.FromJust();
}
double Value::NumberValue() const {
return NumberValue(Isolate::GetCurrent()->GetCurrentContext())
.FromMaybe(std::numeric_limits<double>::quiet_NaN());
}
int64_t Value::IntegerValue() const {
return IntegerValue(Isolate::GetCurrent()->GetCurrentContext())
.FromMaybe(0);
}
uint32_t Value::Uint32Value() const {
return Uint32Value(Isolate::GetCurrent()->GetCurrentContext())
.FromMaybe(0);
}
int32_t Value::Int32Value() const {
return Int32Value(Isolate::GetCurrent()->GetCurrentContext())
.FromMaybe(0);
}
Maybe<bool> Value::BooleanValue(Local<Context> context) const { Maybe<bool> Value::BooleanValue(Local<Context> context) const {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(context->GetIsolate()); i::Isolate* isolate = reinterpret_cast<i::Isolate*>(context->GetIsolate());
return Just(Utils::OpenHandle(this)->BooleanValue(isolate)); return Just(Utils::OpenHandle(this)->BooleanValue(isolate));
@ -4006,12 +3972,6 @@ MaybeLocal<Uint32> Value::ToArrayIndex(Local<Context> context) const {
} }
bool Value::Equals(Local<Value> that) const {
return Equals(Isolate::GetCurrent()->GetCurrentContext(), that)
.FromMaybe(false);
}
Maybe<bool> Value::Equals(Local<Context> context, Local<Value> that) const { Maybe<bool> Value::Equals(Local<Context> context, Local<Value> that) const {
i::Isolate* isolate = Utils::OpenHandle(*context)->GetIsolate(); i::Isolate* isolate = Utils::OpenHandle(*context)->GetIsolate();
auto self = Utils::OpenHandle(this); auto self = Utils::OpenHandle(this);
@ -4063,7 +4023,8 @@ Maybe<bool> v8::Object::Set(v8::Local<v8::Context> context,
auto value_obj = Utils::OpenHandle(*value); auto value_obj = Utils::OpenHandle(*value);
has_pending_exception = has_pending_exception =
i::Runtime::SetObjectProperty(isolate, self, key_obj, value_obj, i::Runtime::SetObjectProperty(isolate, self, key_obj, value_obj,
i::LanguageMode::kSloppy) i::LanguageMode::kSloppy,
i::StoreOrigin::kMaybeKeyed)
.is_null(); .is_null();
RETURN_ON_FAILED_EXECUTION_PRIMITIVE(bool); RETURN_ON_FAILED_EXECUTION_PRIMITIVE(bool);
return Just(true); return Just(true);
@ -4617,8 +4578,8 @@ static Maybe<bool> ObjectSetAccessor(
Local<Context> context, Object* self, Local<Name> name, Getter getter, Local<Context> context, Object* self, Local<Name> name, Getter getter,
Setter setter, Data data, AccessControl settings, Setter setter, Data data, AccessControl settings,
PropertyAttribute attributes, bool is_special_data_property, PropertyAttribute attributes, bool is_special_data_property,
bool replace_on_access, bool replace_on_access, SideEffectType getter_side_effect_type,
SideEffectType getter_side_effect_type = SideEffectType::kHasSideEffect) { SideEffectType setter_side_effect_type) {
auto isolate = reinterpret_cast<i::Isolate*>(context->GetIsolate()); auto isolate = reinterpret_cast<i::Isolate*>(context->GetIsolate());
ENTER_V8_NO_SCRIPT(isolate, context, Object, SetAccessor, Nothing<bool>(), ENTER_V8_NO_SCRIPT(isolate, context, Object, SetAccessor, Nothing<bool>(),
i::HandleScope); i::HandleScope);
@ -4629,8 +4590,8 @@ static Maybe<bool> ObjectSetAccessor(
i::Handle<i::AccessorInfo> info = i::Handle<i::AccessorInfo> info =
MakeAccessorInfo(isolate, name, getter, setter, data, settings, signature, MakeAccessorInfo(isolate, name, getter, setter, data, settings, signature,
is_special_data_property, replace_on_access); is_special_data_property, replace_on_access);
info->set_has_no_side_effect(getter_side_effect_type == info->set_getter_side_effect_type(getter_side_effect_type);
SideEffectType::kHasNoSideEffect); info->set_setter_side_effect_type(setter_side_effect_type);
if (info.is_null()) return Nothing<bool>(); if (info.is_null()) return Nothing<bool>();
bool fast = obj->HasFastProperties(); bool fast = obj->HasFastProperties();
i::Handle<i::Object> result; i::Handle<i::Object> result;
@ -4653,11 +4614,12 @@ Maybe<bool> Object::SetAccessor(Local<Context> context, Local<Name> name,
AccessorNameSetterCallback setter, AccessorNameSetterCallback setter,
MaybeLocal<Value> data, AccessControl settings, MaybeLocal<Value> data, AccessControl settings,
PropertyAttribute attribute, PropertyAttribute attribute,
SideEffectType getter_side_effect_type) { SideEffectType getter_side_effect_type,
SideEffectType setter_side_effect_type) {
return ObjectSetAccessor(context, this, name, getter, setter, return ObjectSetAccessor(context, this, name, getter, setter,
data.FromMaybe(Local<Value>()), settings, attribute, data.FromMaybe(Local<Value>()), settings, attribute,
i::FLAG_disable_old_api_accessors, false, i::FLAG_disable_old_api_accessors, false,
getter_side_effect_type); getter_side_effect_type, setter_side_effect_type);
} }
@ -4684,19 +4646,22 @@ Maybe<bool> Object::SetNativeDataProperty(
v8::Local<v8::Context> context, v8::Local<Name> name, v8::Local<v8::Context> context, v8::Local<Name> name,
AccessorNameGetterCallback getter, AccessorNameSetterCallback setter, AccessorNameGetterCallback getter, AccessorNameSetterCallback setter,
v8::Local<Value> data, PropertyAttribute attributes, v8::Local<Value> data, PropertyAttribute attributes,
SideEffectType getter_side_effect_type) { SideEffectType getter_side_effect_type,
SideEffectType setter_side_effect_type) {
return ObjectSetAccessor(context, this, name, getter, setter, data, DEFAULT, return ObjectSetAccessor(context, this, name, getter, setter, data, DEFAULT,
attributes, true, false, getter_side_effect_type); attributes, true, false, getter_side_effect_type,
setter_side_effect_type);
} }
Maybe<bool> Object::SetLazyDataProperty( Maybe<bool> Object::SetLazyDataProperty(
v8::Local<v8::Context> context, v8::Local<Name> name, v8::Local<v8::Context> context, v8::Local<Name> name,
AccessorNameGetterCallback getter, v8::Local<Value> data, AccessorNameGetterCallback getter, v8::Local<Value> data,
PropertyAttribute attributes, SideEffectType getter_side_effect_type) { PropertyAttribute attributes, SideEffectType getter_side_effect_type,
SideEffectType setter_side_effect_type) {
return ObjectSetAccessor(context, this, name, getter, return ObjectSetAccessor(context, this, name, getter,
static_cast<AccessorNameSetterCallback>(nullptr), static_cast<AccessorNameSetterCallback>(nullptr),
data, DEFAULT, attributes, true, true, data, DEFAULT, attributes, true, true,
getter_side_effect_type); getter_side_effect_type, setter_side_effect_type);
} }
Maybe<bool> v8::Object::HasOwnProperty(Local<Context> context, Maybe<bool> v8::Object::HasOwnProperty(Local<Context> context,
@ -5343,10 +5308,6 @@ bool String::ContainsOnlyOneByte() const {
return helper.Check(*str); return helper.Check(*str);
} }
int String::Utf8Length() const {
return Utf8Length(Isolate::GetCurrent());
}
int String::Utf8Length(Isolate* isolate) const { int String::Utf8Length(Isolate* isolate) const {
i::Handle<i::String> str = Utils::OpenHandle(this); i::Handle<i::String> str = Utils::OpenHandle(this);
str = i::String::Flatten(reinterpret_cast<i::Isolate*>(isolate), str); str = i::String::Flatten(reinterpret_cast<i::Isolate*>(isolate), str);
@ -5570,14 +5531,6 @@ static bool RecursivelySerializeToUtf8(i::String* current,
return true; return true;
} }
int String::WriteUtf8(char* buffer, int capacity,
int* nchars_ref, int options) const {
return WriteUtf8(Isolate::GetCurrent(),
buffer, capacity, nchars_ref, options);
}
int String::WriteUtf8(Isolate* v8_isolate, char* buffer, int capacity, int String::WriteUtf8(Isolate* v8_isolate, char* buffer, int capacity,
int* nchars_ref, int options) const { int* nchars_ref, int options) const {
i::Handle<i::String> str = Utils::OpenHandle(this); i::Handle<i::String> str = Utils::OpenHandle(this);
@ -5645,18 +5598,6 @@ static inline int WriteHelper(i::Isolate* isolate, const String* string,
} }
int String::WriteOneByte(uint8_t* buffer, int start,
int length, int options) const {
return WriteOneByte(Isolate::GetCurrent(), buffer, start, length, options);
}
int String::Write(uint16_t* buffer, int start, int length,
int options) const {
return Write(Isolate::GetCurrent(), buffer, start, length, options);
}
int String::WriteOneByte(Isolate* isolate, uint8_t* buffer, int start, int String::WriteOneByte(Isolate* isolate, uint8_t* buffer, int start,
int length, int options) const { int length, int options) const {
return WriteHelper(reinterpret_cast<i::Isolate*>(isolate), this, buffer, return WriteHelper(reinterpret_cast<i::Isolate*>(isolate), this, buffer,
@ -6010,16 +5951,16 @@ HeapStatistics::HeapStatistics()
malloced_memory_(0), malloced_memory_(0),
external_memory_(0), external_memory_(0),
peak_malloced_memory_(0), peak_malloced_memory_(0),
does_zap_garbage_(0), does_zap_garbage_(false),
number_of_native_contexts_(0), number_of_native_contexts_(0),
number_of_detached_contexts_(0) {} number_of_detached_contexts_(0) {}
HeapSpaceStatistics::HeapSpaceStatistics(): space_name_(0), HeapSpaceStatistics::HeapSpaceStatistics()
space_size_(0), : space_name_(nullptr),
space_used_size_(0), space_size_(0),
space_available_size_(0), space_used_size_(0),
physical_space_size_(0) { } space_available_size_(0),
physical_space_size_(0) {}
HeapObjectStatistics::HeapObjectStatistics() HeapObjectStatistics::HeapObjectStatistics()
: object_type_(nullptr), : object_type_(nullptr),
@ -6604,11 +6545,6 @@ MaybeLocal<String> String::NewFromTwoByte(Isolate* isolate,
return result; return result;
} }
Local<String> v8::String::Concat(Local<String> left,
Local<String> right) {
return Concat(Isolate::GetCurrent(), left, right);
}
Local<String> v8::String::Concat(Isolate* v8_isolate, Local<String> left, Local<String> v8::String::Concat(Isolate* v8_isolate, Local<String> left,
Local<String> right) { Local<String> right) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate); i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
@ -6793,7 +6729,6 @@ double v8::NumberObject::ValueOf() const {
} }
Local<v8::Value> v8::BigIntObject::New(Isolate* isolate, int64_t value) { Local<v8::Value> v8::BigIntObject::New(Isolate* isolate, int64_t value) {
CHECK(i::FLAG_harmony_bigint);
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate); i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
LOG_API(i_isolate, BigIntObject, New); LOG_API(i_isolate, BigIntObject, New);
ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate); ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate);
@ -6835,11 +6770,6 @@ bool v8::BooleanObject::ValueOf() const {
} }
Local<v8::Value> v8::StringObject::New(Local<String> value) {
return New(Isolate::GetCurrent(), value);
}
Local<v8::Value> v8::StringObject::New(Isolate* v8_isolate, Local<v8::Value> v8::StringObject::New(Isolate* v8_isolate,
Local<String> value) { Local<String> value) {
i::Handle<i::String> string = Utils::OpenHandle(*value); i::Handle<i::String> string = Utils::OpenHandle(*value);
@ -6981,23 +6911,6 @@ Local<v8::Array> v8::Array::New(Isolate* isolate, int length) {
return Utils::ToLocal(obj); return Utils::ToLocal(obj);
} }
Local<v8::Array> v8::Array::New(Isolate* isolate, Local<Value>* elements,
size_t length) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
i::Factory* factory = i_isolate->factory();
LOG_API(i_isolate, Array, New);
ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate);
int len = static_cast<int>(length);
i::Handle<i::FixedArray> result = factory->NewFixedArray(len);
for (int i = 0; i < len; i++) {
i::Handle<i::Object> element = Utils::OpenHandle(*elements[i]);
result->set(i, *element);
}
return Utils::ToLocal(
factory->NewJSArrayWithElements(result, i::PACKED_ELEMENTS, len));
}
uint32_t v8::Array::Length() const { uint32_t v8::Array::Length() const {
i::Handle<i::JSArray> obj = Utils::OpenHandle(this); i::Handle<i::JSArray> obj = Utils::OpenHandle(this);
@ -7103,30 +7016,30 @@ i::Handle<i::JSArray> MapAsArray(i::Isolate* isolate, i::Object* table_obj,
i::Factory* factory = isolate->factory(); i::Factory* factory = isolate->factory();
i::Handle<i::OrderedHashMap> table(i::OrderedHashMap::cast(table_obj), i::Handle<i::OrderedHashMap> table(i::OrderedHashMap::cast(table_obj),
isolate); isolate);
const bool collect_keys = if (offset >= table->NumberOfElements()) return factory->NewJSArray(0);
kind == MapAsArrayKind::kEntries || kind == MapAsArrayKind::kKeys; int length = (table->NumberOfElements() - offset) *
const bool collect_values = (kind == MapAsArrayKind::kEntries ? 2 : 1);
kind == MapAsArrayKind::kEntries || kind == MapAsArrayKind::kValues; i::Handle<i::FixedArray> result = factory->NewFixedArray(length);
int capacity = table->UsedCapacity();
int max_length =
(capacity - offset) * ((collect_keys && collect_values) ? 2 : 1);
i::Handle<i::FixedArray> result = factory->NewFixedArray(max_length);
int result_index = 0; int result_index = 0;
{ {
i::DisallowHeapAllocation no_gc; i::DisallowHeapAllocation no_gc;
int capacity = table->UsedCapacity();
i::Oddball* the_hole = i::ReadOnlyRoots(isolate).the_hole_value(); i::Oddball* the_hole = i::ReadOnlyRoots(isolate).the_hole_value();
for (int i = offset; i < capacity; ++i) { for (int i = 0; i < capacity; ++i) {
i::Object* key = table->KeyAt(i); i::Object* key = table->KeyAt(i);
if (key == the_hole) continue; if (key == the_hole) continue;
if (collect_keys) result->set(result_index++, key); if (offset-- > 0) continue;
if (collect_values) result->set(result_index++, table->ValueAt(i)); if (kind == MapAsArrayKind::kEntries || kind == MapAsArrayKind::kKeys) {
result->set(result_index++, key);
}
if (kind == MapAsArrayKind::kEntries || kind == MapAsArrayKind::kValues) {
result->set(result_index++, table->ValueAt(i));
}
} }
} }
DCHECK_GE(max_length, result_index); DCHECK_EQ(result_index, result->length());
if (result_index == 0) return factory->NewJSArray(0); DCHECK_EQ(result_index, length);
result->Shrink(isolate, result_index); return factory->NewJSArrayWithElements(result, i::PACKED_ELEMENTS, length);
return factory->NewJSArrayWithElements(result, i::PACKED_ELEMENTS,
result_index);
} }
} // namespace } // namespace
@ -7211,26 +7124,24 @@ i::Handle<i::JSArray> SetAsArray(i::Isolate* isolate, i::Object* table_obj,
i::Factory* factory = isolate->factory(); i::Factory* factory = isolate->factory();
i::Handle<i::OrderedHashSet> table(i::OrderedHashSet::cast(table_obj), i::Handle<i::OrderedHashSet> table(i::OrderedHashSet::cast(table_obj),
isolate); isolate);
// Elements skipped by |offset| may already be deleted. int length = table->NumberOfElements() - offset;
int capacity = table->UsedCapacity(); if (length <= 0) return factory->NewJSArray(0);
int max_length = capacity - offset; i::Handle<i::FixedArray> result = factory->NewFixedArray(length);
if (max_length == 0) return factory->NewJSArray(0);
i::Handle<i::FixedArray> result = factory->NewFixedArray(max_length);
int result_index = 0; int result_index = 0;
{ {
i::DisallowHeapAllocation no_gc; i::DisallowHeapAllocation no_gc;
int capacity = table->UsedCapacity();
i::Oddball* the_hole = i::ReadOnlyRoots(isolate).the_hole_value(); i::Oddball* the_hole = i::ReadOnlyRoots(isolate).the_hole_value();
for (int i = offset; i < capacity; ++i) { for (int i = 0; i < capacity; ++i) {
i::Object* key = table->KeyAt(i); i::Object* key = table->KeyAt(i);
if (key == the_hole) continue; if (key == the_hole) continue;
if (offset-- > 0) continue;
result->set(result_index++, key); result->set(result_index++, key);
} }
} }
DCHECK_GE(max_length, result_index); DCHECK_EQ(result_index, result->length());
if (result_index == 0) return factory->NewJSArray(0); DCHECK_EQ(result_index, length);
result->Shrink(isolate, result_index); return factory->NewJSArrayWithElements(result, i::PACKED_ELEMENTS, length);
return factory->NewJSArrayWithElements(result, i::PACKED_ELEMENTS,
result_index);
} }
} // namespace } // namespace
@ -7501,7 +7412,7 @@ class AsyncCompilationResolver : public i::wasm::CompilationResultResolver {
reinterpret_cast<i::Isolate*>(isolate)->global_handles()->Create( reinterpret_cast<i::Isolate*>(isolate)->global_handles()->Create(
*Utils::OpenHandle(*promise))) {} *Utils::OpenHandle(*promise))) {}
~AsyncCompilationResolver() { ~AsyncCompilationResolver() override {
i::GlobalHandles::Destroy(i::Handle<i::Object>::cast(promise_).location()); i::GlobalHandles::Destroy(i::Handle<i::Object>::cast(promise_).location());
} }
@ -7540,9 +7451,6 @@ void WasmModuleObjectBuilderStreaming::Finish() {
void WasmModuleObjectBuilderStreaming::Abort(MaybeLocal<Value> exception) { void WasmModuleObjectBuilderStreaming::Abort(MaybeLocal<Value> exception) {
} }
WasmModuleObjectBuilderStreaming::~WasmModuleObjectBuilderStreaming() {
}
// static // static
v8::ArrayBuffer::Allocator* v8::ArrayBuffer::Allocator::NewDefaultAllocator() { v8::ArrayBuffer::Allocator* v8::ArrayBuffer::Allocator::NewDefaultAllocator() {
return new ArrayBufferAllocator(); return new ArrayBufferAllocator();
@ -7602,9 +7510,8 @@ void ArrayBufferDeleter(void* buffer, size_t length, void* info) {
v8::ArrayBuffer::Contents v8::ArrayBuffer::GetContents() { v8::ArrayBuffer::Contents v8::ArrayBuffer::GetContents() {
i::Handle<i::JSArrayBuffer> self = Utils::OpenHandle(this); i::Handle<i::JSArrayBuffer> self = Utils::OpenHandle(this);
size_t byte_length = static_cast<size_t>(self->byte_length()->Number());
Contents contents( Contents contents(
self->backing_store(), byte_length, self->allocation_base(), self->backing_store(), self->byte_length(), self->allocation_base(),
self->allocation_length(), self->allocation_length(),
self->is_wasm_memory() ? Allocator::AllocationMode::kReservation self->is_wasm_memory() ? Allocator::AllocationMode::kReservation
: Allocator::AllocationMode::kNormal, : Allocator::AllocationMode::kNormal,
@ -7632,7 +7539,7 @@ void v8::ArrayBuffer::Neuter() {
size_t v8::ArrayBuffer::ByteLength() const { size_t v8::ArrayBuffer::ByteLength() const {
i::Handle<i::JSArrayBuffer> obj = Utils::OpenHandle(this); i::Handle<i::JSArrayBuffer> obj = Utils::OpenHandle(this);
return static_cast<size_t>(obj->byte_length()->Number()); return obj->byte_length();
} }
@ -7656,6 +7563,7 @@ Local<ArrayBuffer> v8::ArrayBuffer::New(Isolate* isolate, void* data,
ArrayBufferCreationMode mode) { ArrayBufferCreationMode mode) {
// Embedders must guarantee that the external backing store is valid. // Embedders must guarantee that the external backing store is valid.
CHECK(byte_length == 0 || data != nullptr); CHECK(byte_length == 0 || data != nullptr);
CHECK_LE(byte_length, i::JSArrayBuffer::kMaxByteLength);
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate); i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
LOG_API(i_isolate, ArrayBuffer, New); LOG_API(i_isolate, ArrayBuffer, New);
ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate); ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate);
@ -7687,9 +7595,8 @@ Local<ArrayBuffer> v8::ArrayBufferView::Buffer() {
size_t v8::ArrayBufferView::CopyContents(void* dest, size_t byte_length) { size_t v8::ArrayBufferView::CopyContents(void* dest, size_t byte_length) {
i::Handle<i::JSArrayBufferView> self = Utils::OpenHandle(this); i::Handle<i::JSArrayBufferView> self = Utils::OpenHandle(this);
size_t byte_offset = i::NumberToSize(self->byte_offset()); size_t byte_offset = self->byte_offset();
size_t bytes_to_copy = size_t bytes_to_copy = i::Min(byte_length, self->byte_length());
i::Min(byte_length, i::NumberToSize(self->byte_length()));
if (bytes_to_copy) { if (bytes_to_copy) {
i::DisallowHeapAllocation no_gc; i::DisallowHeapAllocation no_gc;
i::Isolate* isolate = self->GetIsolate(); i::Isolate* isolate = self->GetIsolate();
@ -7720,19 +7627,19 @@ bool v8::ArrayBufferView::HasBuffer() const {
size_t v8::ArrayBufferView::ByteOffset() { size_t v8::ArrayBufferView::ByteOffset() {
i::Handle<i::JSArrayBufferView> obj = Utils::OpenHandle(this); i::Handle<i::JSArrayBufferView> obj = Utils::OpenHandle(this);
return static_cast<size_t>(obj->byte_offset()->Number()); return obj->WasNeutered() ? 0 : obj->byte_offset();
} }
size_t v8::ArrayBufferView::ByteLength() { size_t v8::ArrayBufferView::ByteLength() {
i::Handle<i::JSArrayBufferView> obj = Utils::OpenHandle(this); i::Handle<i::JSArrayBufferView> obj = Utils::OpenHandle(this);
return static_cast<size_t>(obj->byte_length()->Number()); return obj->WasNeutered() ? 0 : obj->byte_length();
} }
size_t v8::TypedArray::Length() { size_t v8::TypedArray::Length() {
i::Handle<i::JSTypedArray> obj = Utils::OpenHandle(this); i::Handle<i::JSTypedArray> obj = Utils::OpenHandle(this);
return obj->length_value(); return obj->WasNeutered() ? 0 : obj->length_value();
} }
static_assert(v8::TypedArray::kMaxLength == i::Smi::kMaxValue, static_assert(v8::TypedArray::kMaxLength == i::Smi::kMaxValue,
@ -7840,9 +7747,8 @@ v8::SharedArrayBuffer::Contents::Contents(
v8::SharedArrayBuffer::Contents v8::SharedArrayBuffer::GetContents() { v8::SharedArrayBuffer::Contents v8::SharedArrayBuffer::GetContents() {
i::Handle<i::JSArrayBuffer> self = Utils::OpenHandle(this); i::Handle<i::JSArrayBuffer> self = Utils::OpenHandle(this);
size_t byte_length = static_cast<size_t>(self->byte_length()->Number());
Contents contents( Contents contents(
self->backing_store(), byte_length, self->allocation_base(), self->backing_store(), self->byte_length(), self->allocation_base(),
self->allocation_length(), self->allocation_length(),
self->is_wasm_memory() self->is_wasm_memory()
? ArrayBuffer::Allocator::AllocationMode::kReservation ? ArrayBuffer::Allocator::AllocationMode::kReservation
@ -7858,7 +7764,7 @@ v8::SharedArrayBuffer::Contents v8::SharedArrayBuffer::GetContents() {
size_t v8::SharedArrayBuffer::ByteLength() const { size_t v8::SharedArrayBuffer::ByteLength() const {
i::Handle<i::JSArrayBuffer> obj = Utils::OpenHandle(this); i::Handle<i::JSArrayBuffer> obj = Utils::OpenHandle(this);
return static_cast<size_t>(obj->byte_length()->Number()); return obj->byte_length();
} }
Local<SharedArrayBuffer> v8::SharedArrayBuffer::New(Isolate* isolate, Local<SharedArrayBuffer> v8::SharedArrayBuffer::New(Isolate* isolate,
@ -7912,8 +7818,8 @@ Local<Symbol> v8::Symbol::New(Isolate* isolate, Local<String> name) {
Local<Symbol> v8::Symbol::For(Isolate* isolate, Local<String> name) { Local<Symbol> v8::Symbol::For(Isolate* isolate, Local<String> name) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate); i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
i::Handle<i::String> i_name = Utils::OpenHandle(*name); i::Handle<i::String> i_name = Utils::OpenHandle(*name);
return Utils::ToLocal(i_isolate->SymbolFor( return Utils::ToLocal(
i::Heap::kPublicSymbolTableRootIndex, i_name, false)); i_isolate->SymbolFor(i::RootIndex::kPublicSymbolTable, i_name, false));
} }
@ -7921,10 +7827,11 @@ Local<Symbol> v8::Symbol::ForApi(Isolate* isolate, Local<String> name) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate); i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
i::Handle<i::String> i_name = Utils::OpenHandle(*name); i::Handle<i::String> i_name = Utils::OpenHandle(*name);
return Utils::ToLocal( return Utils::ToLocal(
i_isolate->SymbolFor(i::Heap::kApiSymbolTableRootIndex, i_name, false)); i_isolate->SymbolFor(i::RootIndex::kApiSymbolTable, i_name, false));
} }
#define WELL_KNOWN_SYMBOLS(V) \ #define WELL_KNOWN_SYMBOLS(V) \
V(AsyncIterator, async_iterator) \
V(HasInstance, has_instance) \ V(HasInstance, has_instance) \
V(IsConcatSpreadable, is_concat_spreadable) \ V(IsConcatSpreadable, is_concat_spreadable) \
V(Iterator, iterator) \ V(Iterator, iterator) \
@ -7961,8 +7868,8 @@ Local<Private> v8::Private::New(Isolate* isolate, Local<String> name) {
Local<Private> v8::Private::ForApi(Isolate* isolate, Local<String> name) { Local<Private> v8::Private::ForApi(Isolate* isolate, Local<String> name) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate); i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
i::Handle<i::String> i_name = Utils::OpenHandle(*name); i::Handle<i::String> i_name = Utils::OpenHandle(*name);
Local<Symbol> result = Utils::ToLocal(i_isolate->SymbolFor( Local<Symbol> result = Utils::ToLocal(
i::Heap::kApiPrivateSymbolTableRootIndex, i_name, true)); i_isolate->SymbolFor(i::RootIndex::kApiPrivateSymbolTable, i_name, true));
return v8::Local<Private>(reinterpret_cast<Private*>(*result)); return v8::Local<Private>(reinterpret_cast<Private*>(*result));
} }
@ -8003,7 +7910,6 @@ Local<Integer> v8::Integer::NewFromUnsigned(Isolate* isolate, uint32_t value) {
} }
Local<BigInt> v8::BigInt::New(Isolate* isolate, int64_t value) { Local<BigInt> v8::BigInt::New(Isolate* isolate, int64_t value) {
CHECK(i::FLAG_harmony_bigint);
i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(isolate); i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(isolate);
ENTER_V8_NO_SCRIPT_NO_EXCEPTION(internal_isolate); ENTER_V8_NO_SCRIPT_NO_EXCEPTION(internal_isolate);
i::Handle<i::BigInt> result = i::BigInt::FromInt64(internal_isolate, value); i::Handle<i::BigInt> result = i::BigInt::FromInt64(internal_isolate, value);
@ -8011,7 +7917,6 @@ Local<BigInt> v8::BigInt::New(Isolate* isolate, int64_t value) {
} }
Local<BigInt> v8::BigInt::NewFromUnsigned(Isolate* isolate, uint64_t value) { Local<BigInt> v8::BigInt::NewFromUnsigned(Isolate* isolate, uint64_t value) {
CHECK(i::FLAG_harmony_bigint);
i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(isolate); i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(isolate);
ENTER_V8_NO_SCRIPT_NO_EXCEPTION(internal_isolate); ENTER_V8_NO_SCRIPT_NO_EXCEPTION(internal_isolate);
i::Handle<i::BigInt> result = i::BigInt::FromUint64(internal_isolate, value); i::Handle<i::BigInt> result = i::BigInt::FromUint64(internal_isolate, value);
@ -8021,7 +7926,6 @@ Local<BigInt> v8::BigInt::NewFromUnsigned(Isolate* isolate, uint64_t value) {
MaybeLocal<BigInt> v8::BigInt::NewFromWords(Local<Context> context, MaybeLocal<BigInt> v8::BigInt::NewFromWords(Local<Context> context,
int sign_bit, int word_count, int sign_bit, int word_count,
const uint64_t* words) { const uint64_t* words) {
CHECK(i::FLAG_harmony_bigint);
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(context->GetIsolate()); i::Isolate* isolate = reinterpret_cast<i::Isolate*>(context->GetIsolate());
ENTER_V8_NO_SCRIPT(isolate, context, BigInt, NewFromWords, ENTER_V8_NO_SCRIPT(isolate, context, BigInt, NewFromWords,
MaybeLocal<BigInt>(), InternalEscapableScope); MaybeLocal<BigInt>(), InternalEscapableScope);
@ -8186,6 +8090,11 @@ void Isolate::SetEmbedderHeapTracer(EmbedderHeapTracer* tracer) {
isolate->heap()->SetEmbedderHeapTracer(tracer); isolate->heap()->SetEmbedderHeapTracer(tracer);
} }
EmbedderHeapTracer* Isolate::GetEmbedderHeapTracer() {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
return isolate->heap()->GetEmbedderHeapTracer();
}
void Isolate::SetGetExternallyAllocatedMemoryInBytesCallback( void Isolate::SetGetExternallyAllocatedMemoryInBytesCallback(
GetExternallyAllocatedMemoryInBytesCallback callback) { GetExternallyAllocatedMemoryInBytesCallback callback) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this); i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
@ -8225,9 +8134,9 @@ void Isolate::RequestGarbageCollectionForTesting(GarbageCollectionType type) {
kGCCallbackFlagForced); kGCCallbackFlagForced);
} else { } else {
DCHECK_EQ(kFullGarbageCollection, type); DCHECK_EQ(kFullGarbageCollection, type);
reinterpret_cast<i::Isolate*>(this)->heap()->CollectAllGarbage( reinterpret_cast<i::Isolate*>(this)->heap()->PreciseCollectAllGarbage(
i::Heap::kAbortIncrementalMarkingMask, i::Heap::kNoGCFlags, i::GarbageCollectionReason::kTesting,
i::GarbageCollectionReason::kTesting, kGCCallbackFlagForced); kGCCallbackFlagForced);
} }
} }
@ -8296,7 +8205,11 @@ void Isolate::Initialize(Isolate* isolate,
if (params.entry_hook || !i::Snapshot::Initialize(i_isolate)) { if (params.entry_hook || !i::Snapshot::Initialize(i_isolate)) {
// If snapshot data was provided and we failed to deserialize it must // If snapshot data was provided and we failed to deserialize it must
// have been corrupted. // have been corrupted.
CHECK_NULL(i_isolate->snapshot_blob()); if (i_isolate->snapshot_blob() != nullptr) {
FATAL(
"Failed to deserialize the V8 snapshot blob. This can mean that the "
"snapshot blob file is corrupted or missing.");
}
base::ElapsedTimer timer; base::ElapsedTimer timer;
if (i::FLAG_profile_deserialization) timer.Start(); if (i::FLAG_profile_deserialization) timer.Start();
i_isolate->Init(nullptr); i_isolate->Init(nullptr);
@ -8366,6 +8279,11 @@ void Isolate::SetHostInitializeImportMetaObjectCallback(
isolate->SetHostInitializeImportMetaObjectCallback(callback); isolate->SetHostInitializeImportMetaObjectCallback(callback);
} }
void Isolate::SetPrepareStackTraceCallback(PrepareStackTraceCallback callback) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
isolate->SetPrepareStackTraceCallback(callback);
}
Isolate::DisallowJavascriptExecutionScope::DisallowJavascriptExecutionScope( Isolate::DisallowJavascriptExecutionScope::DisallowJavascriptExecutionScope(
Isolate* isolate, Isolate* isolate,
Isolate::DisallowJavascriptExecutionScope::OnFailure on_failure) Isolate::DisallowJavascriptExecutionScope::OnFailure on_failure)
@ -8791,17 +8709,17 @@ void Isolate::SetStackLimit(uintptr_t stack_limit) {
void Isolate::GetCodeRange(void** start, size_t* length_in_bytes) { void Isolate::GetCodeRange(void** start, size_t* length_in_bytes) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this); i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
if (isolate->heap()->memory_allocator()->code_range()->valid()) { const base::AddressRegion& code_range =
*start = reinterpret_cast<void*>( isolate->heap()->memory_allocator()->code_range();
isolate->heap()->memory_allocator()->code_range()->start()); *start = reinterpret_cast<void*>(code_range.begin());
*length_in_bytes = *length_in_bytes = code_range.size();
isolate->heap()->memory_allocator()->code_range()->size();
} else {
*start = nullptr;
*length_in_bytes = 0;
}
} }
MemoryRange Isolate::GetEmbeddedCodeRange() {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
return {reinterpret_cast<const void*>(isolate->embedded_blob()),
isolate->embedded_blob_size()};
}
#define CALLBACK_SETTER(ExternalName, Type, InternalName) \ #define CALLBACK_SETTER(ExternalName, Type, InternalName) \
void Isolate::Set##ExternalName(Type callback) { \ void Isolate::Set##ExternalName(Type callback) { \
@ -8986,9 +8904,6 @@ bool MicrotasksScope::IsRunningMicrotasks(Isolate* v8Isolate) {
return isolate->IsRunningMicrotasks(); return isolate->IsRunningMicrotasks();
} }
String::Utf8Value::Utf8Value(v8::Local<v8::Value> obj)
: Utf8Value(Isolate::GetCurrent(), obj) {}
String::Utf8Value::Utf8Value(v8::Isolate* isolate, v8::Local<v8::Value> obj) String::Utf8Value::Utf8Value(v8::Isolate* isolate, v8::Local<v8::Value> obj)
: str_(nullptr), length_(0) { : str_(nullptr), length_(0) {
if (obj.IsEmpty()) return; if (obj.IsEmpty()) return;
@ -9008,9 +8923,6 @@ String::Utf8Value::~Utf8Value() {
i::DeleteArray(str_); i::DeleteArray(str_);
} }
String::Value::Value(v8::Local<v8::Value> obj)
: Value(Isolate::GetCurrent(), obj) {}
String::Value::Value(v8::Isolate* isolate, v8::Local<v8::Value> obj) String::Value::Value(v8::Isolate* isolate, v8::Local<v8::Value> obj)
: str_(nullptr), length_(0) { : str_(nullptr), length_(0) {
if (obj.IsEmpty()) return; if (obj.IsEmpty()) return;
@ -9192,7 +9104,10 @@ int debug::Script::ColumnOffset() const {
std::vector<int> debug::Script::LineEnds() const { std::vector<int> debug::Script::LineEnds() const {
i::Handle<i::Script> script = Utils::OpenHandle(this); i::Handle<i::Script> script = Utils::OpenHandle(this);
if (script->type() == i::Script::TYPE_WASM) return std::vector<int>(); if (script->type() == i::Script::TYPE_WASM &&
this->SourceMappingURL().IsEmpty()) {
return std::vector<int>();
}
i::Isolate* isolate = script->GetIsolate(); i::Isolate* isolate = script->GetIsolate();
i::HandleScope scope(isolate); i::HandleScope scope(isolate);
i::Script::InitLineEnds(script); i::Script::InitLineEnds(script);
@ -9281,7 +9196,8 @@ bool debug::Script::GetPossibleBreakpoints(
std::vector<debug::BreakLocation>* locations) const { std::vector<debug::BreakLocation>* locations) const {
CHECK(!start.IsEmpty()); CHECK(!start.IsEmpty());
i::Handle<i::Script> script = Utils::OpenHandle(this); i::Handle<i::Script> script = Utils::OpenHandle(this);
if (script->type() == i::Script::TYPE_WASM) { if (script->type() == i::Script::TYPE_WASM &&
this->SourceMappingURL().IsEmpty()) {
i::WasmModuleObject* module_object = i::WasmModuleObject* module_object =
i::WasmModuleObject::cast(script->wasm_module_object()); i::WasmModuleObject::cast(script->wasm_module_object());
return module_object->GetPossibleBreakpoints(start, end, locations); return module_object->GetPossibleBreakpoints(start, end, locations);
@ -9332,9 +9248,13 @@ bool debug::Script::GetPossibleBreakpoints(
int debug::Script::GetSourceOffset(const debug::Location& location) const { int debug::Script::GetSourceOffset(const debug::Location& location) const {
i::Handle<i::Script> script = Utils::OpenHandle(this); i::Handle<i::Script> script = Utils::OpenHandle(this);
if (script->type() == i::Script::TYPE_WASM) { if (script->type() == i::Script::TYPE_WASM) {
return i::WasmModuleObject::cast(script->wasm_module_object()) if (this->SourceMappingURL().IsEmpty()) {
->GetFunctionOffset(location.GetLineNumber()) + return i::WasmModuleObject::cast(script->wasm_module_object())
location.GetColumnNumber(); ->GetFunctionOffset(location.GetLineNumber()) +
location.GetColumnNumber();
}
DCHECK_EQ(0, location.GetLineNumber());
return location.GetColumnNumber();
} }
int line = std::max(location.GetLineNumber() - script->line_offset(), 0); int line = std::max(location.GetLineNumber() - script->line_offset(), 0);
@ -9777,10 +9697,10 @@ int debug::GetNativeAccessorDescriptor(v8::Local<v8::Context> context,
} }
auto isolate = reinterpret_cast<i::Isolate*>(context->GetIsolate()); auto isolate = reinterpret_cast<i::Isolate*>(context->GetIsolate());
int result = 0; int result = 0;
#define IS_BUILTIN_ACESSOR(name, _) \ #define IS_BUILTIN_ACESSOR(_, name, ...) \
if (*structure == *isolate->factory()->name##_accessor()) \ if (*structure == *isolate->factory()->name##_accessor()) \
result |= static_cast<int>(debug::NativeAccessorType::IsBuiltin); result |= static_cast<int>(debug::NativeAccessorType::IsBuiltin);
ACCESSOR_INFO_LIST(IS_BUILTIN_ACESSOR) ACCESSOR_INFO_LIST_GENERATOR(IS_BUILTIN_ACESSOR, /* not used */)
#undef IS_BUILTIN_ACESSOR #undef IS_BUILTIN_ACESSOR
i::Handle<i::AccessorInfo> accessor_info = i::Handle<i::AccessorInfo> accessor_info =
i::Handle<i::AccessorInfo>::cast(structure); i::Handle<i::AccessorInfo>::cast(structure);
@ -9826,7 +9746,7 @@ debug::PostponeInterruptsScope::PostponeInterruptsScope(v8::Isolate* isolate)
new i::PostponeInterruptsScope(reinterpret_cast<i::Isolate*>(isolate), new i::PostponeInterruptsScope(reinterpret_cast<i::Isolate*>(isolate),
i::StackGuard::API_INTERRUPT)) {} i::StackGuard::API_INTERRUPT)) {}
debug::PostponeInterruptsScope::~PostponeInterruptsScope() {} debug::PostponeInterruptsScope::~PostponeInterruptsScope() = default;
Local<String> CpuProfileNode::GetFunctionName() const { Local<String> CpuProfileNode::GetFunctionName() const {
const i::ProfileNode* node = reinterpret_cast<const i::ProfileNode*>(this); const i::ProfileNode* node = reinterpret_cast<const i::ProfileNode*>(this);
@ -9950,6 +9870,47 @@ debug::TypeProfile::ScriptData debug::TypeProfile::GetScriptData(
return ScriptData(i, type_profile_); return ScriptData(i, type_profile_);
} }
v8::MaybeLocal<v8::Value> debug::WeakMap::Get(v8::Local<v8::Context> context,
v8::Local<v8::Value> key) {
PREPARE_FOR_EXECUTION(context, WeakMap, Get, Value);
auto self = Utils::OpenHandle(this);
Local<Value> result;
i::Handle<i::Object> argv[] = {Utils::OpenHandle(*key)};
has_pending_exception =
!ToLocal<Value>(i::Execution::Call(isolate, isolate->weakmap_get(), self,
arraysize(argv), argv),
&result);
RETURN_ON_FAILED_EXECUTION(Value);
RETURN_ESCAPED(result);
}
v8::MaybeLocal<debug::WeakMap> debug::WeakMap::Set(
v8::Local<v8::Context> context, v8::Local<v8::Value> key,
v8::Local<v8::Value> value) {
PREPARE_FOR_EXECUTION(context, WeakMap, Set, WeakMap);
auto self = Utils::OpenHandle(this);
i::Handle<i::Object> result;
i::Handle<i::Object> argv[] = {Utils::OpenHandle(*key),
Utils::OpenHandle(*value)};
has_pending_exception = !i::Execution::Call(isolate, isolate->weakmap_set(),
self, arraysize(argv), argv)
.ToHandle(&result);
RETURN_ON_FAILED_EXECUTION(WeakMap);
RETURN_ESCAPED(Local<WeakMap>::Cast(Utils::ToLocal(result)));
}
Local<debug::WeakMap> debug::WeakMap::New(v8::Isolate* isolate) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
LOG_API(i_isolate, WeakMap, New);
ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate);
i::Handle<i::JSWeakMap> obj = i_isolate->factory()->NewJSWeakMap();
return ToApiHandle<debug::WeakMap>(obj);
}
debug::WeakMap* debug::WeakMap::Cast(v8::Value* value) {
return static_cast<debug::WeakMap*>(value);
}
const char* CpuProfileNode::GetFunctionNameStr() const { const char* CpuProfileNode::GetFunctionNameStr() const {
const i::ProfileNode* node = reinterpret_cast<const i::ProfileNode*>(this); const i::ProfileNode* node = reinterpret_cast<const i::ProfileNode*>(this);
return node->entry()->name(); return node->entry()->name();
@ -10134,11 +10095,6 @@ void CpuProfiler::SetIdle(bool is_idle) {
isolate->SetIdle(is_idle); isolate->SetIdle(is_idle);
} }
void CpuProfiler::UseDetailedSourcePositionsForProfiling(Isolate* isolate) {
reinterpret_cast<i::Isolate*>(isolate)
->set_detailed_source_positions_for_profiling(true);
}
uintptr_t CodeEvent::GetCodeStartAddress() { uintptr_t CodeEvent::GetCodeStartAddress() {
return reinterpret_cast<i::CodeEvent*>(this)->code_start_address; return reinterpret_cast<i::CodeEvent*>(this)->code_start_address;
} }
@ -10546,9 +10502,9 @@ void EmbedderHeapTracer::GarbageCollectionForTesting(
CHECK(i::FLAG_expose_gc); CHECK(i::FLAG_expose_gc);
i::Heap* const heap = reinterpret_cast<i::Isolate*>(isolate_)->heap(); i::Heap* const heap = reinterpret_cast<i::Isolate*>(isolate_)->heap();
heap->SetEmbedderStackStateForNextFinalizaton(stack_state); heap->SetEmbedderStackStateForNextFinalizaton(stack_state);
heap->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask, heap->PreciseCollectAllGarbage(i::Heap::kNoGCFlags,
i::GarbageCollectionReason::kTesting, i::GarbageCollectionReason::kTesting,
kGCCallbackFlagForced); kGCCallbackFlagForced);
} }
bool EmbedderHeapTracer::AdvanceTracing(double deadline_in_ms) { bool EmbedderHeapTracer::AdvanceTracing(double deadline_in_ms) {

1
deps/v8/src/api.h vendored
View File

@ -116,6 +116,7 @@ class RegisteredExtension {
V(Proxy, JSProxy) \ V(Proxy, JSProxy) \
V(debug::GeneratorObject, JSGeneratorObject) \ V(debug::GeneratorObject, JSGeneratorObject) \
V(debug::Script, Script) \ V(debug::Script, Script) \
V(debug::WeakMap, JSWeakMap) \
V(Promise, JSPromise) \ V(Promise, JSPromise) \
V(Primitive, Object) \ V(Primitive, Object) \
V(PrimitiveArray, FixedArray) \ V(PrimitiveArray, FixedArray) \

View File

@ -27,7 +27,7 @@ namespace internal {
// Note that length_ (whose value is in the integer range) is defined // Note that length_ (whose value is in the integer range) is defined
// as intptr_t to provide endian-neutrality on 64-bit archs. // as intptr_t to provide endian-neutrality on 64-bit archs.
class Arguments BASE_EMBEDDED { class Arguments {
public: public:
Arguments(int length, Object** arguments) Arguments(int length, Object** arguments)
: length_(length), arguments_(arguments) { : length_(length), arguments_(arguments) {

View File

@ -46,6 +46,7 @@
#include "src/deoptimizer.h" #include "src/deoptimizer.h"
#include "src/macro-assembler.h" #include "src/macro-assembler.h"
#include "src/objects-inl.h" #include "src/objects-inl.h"
#include "src/string-constants.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
@ -417,6 +418,13 @@ Operand Operand::EmbeddedCode(CodeStub* stub) {
return result; return result;
} }
Operand Operand::EmbeddedStringConstant(const StringConstantBase* str) {
Operand result(0, RelocInfo::EMBEDDED_OBJECT);
result.is_heap_object_request_ = true;
result.value_.heap_object_request = HeapObjectRequest(str);
return result;
}
MemOperand::MemOperand(Register rn, int32_t offset, AddrMode am) MemOperand::MemOperand(Register rn, int32_t offset, AddrMode am)
: rn_(rn), rm_(no_reg), offset_(offset), am_(am) { : rn_(rn), rm_(no_reg), offset_(offset), am_(am) {
// Accesses below the stack pointer are not safe, and are prohibited by the // Accesses below the stack pointer are not safe, and are prohibited by the
@ -472,6 +480,7 @@ void NeonMemOperand::SetAlignment(int align) {
} }
void Assembler::AllocateAndInstallRequestedHeapObjects(Isolate* isolate) { void Assembler::AllocateAndInstallRequestedHeapObjects(Isolate* isolate) {
DCHECK_IMPLIES(isolate == nullptr, heap_object_requests_.empty());
for (auto& request : heap_object_requests_) { for (auto& request : heap_object_requests_) {
Handle<HeapObject> object; Handle<HeapObject> object;
switch (request.kind()) { switch (request.kind()) {
@ -483,6 +492,12 @@ void Assembler::AllocateAndInstallRequestedHeapObjects(Isolate* isolate) {
request.code_stub()->set_isolate(isolate); request.code_stub()->set_isolate(isolate);
object = request.code_stub()->GetCode(); object = request.code_stub()->GetCode();
break; break;
case HeapObjectRequest::kStringConstant: {
const StringConstantBase* str = request.string();
CHECK_NOT_NULL(str);
object = str->AllocateStringConstant(isolate);
break;
}
} }
Address pc = reinterpret_cast<Address>(buffer_) + request.offset(); Address pc = reinterpret_cast<Address>(buffer_) + request.offset();
Memory<Address>(constant_pool_entry_address(pc, 0 /* unused */)) = Memory<Address>(constant_pool_entry_address(pc, 0 /* unused */)) =
@ -1418,7 +1433,7 @@ int Assembler::branch_offset(Label* L) {
// Branch instructions. // Branch instructions.
void Assembler::b(int branch_offset, Condition cond, RelocInfo::Mode rmode) { void Assembler::b(int branch_offset, Condition cond, RelocInfo::Mode rmode) {
RecordRelocInfo(rmode); if (!RelocInfo::IsNone(rmode)) RecordRelocInfo(rmode);
DCHECK_EQ(branch_offset & 3, 0); DCHECK_EQ(branch_offset & 3, 0);
int imm24 = branch_offset >> 2; int imm24 = branch_offset >> 2;
const bool b_imm_check = is_int24(imm24); const bool b_imm_check = is_int24(imm24);
@ -1432,7 +1447,7 @@ void Assembler::b(int branch_offset, Condition cond, RelocInfo::Mode rmode) {
} }
void Assembler::bl(int branch_offset, Condition cond, RelocInfo::Mode rmode) { void Assembler::bl(int branch_offset, Condition cond, RelocInfo::Mode rmode) {
RecordRelocInfo(rmode); if (!RelocInfo::IsNone(rmode)) RecordRelocInfo(rmode);
DCHECK_EQ(branch_offset & 3, 0); DCHECK_EQ(branch_offset & 3, 0);
int imm24 = branch_offset >> 2; int imm24 = branch_offset >> 2;
const bool bl_imm_check = is_int24(imm24); const bool bl_imm_check = is_int24(imm24);
@ -5103,13 +5118,7 @@ void Assembler::dq(uint64_t value) {
} }
void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
if (options().disable_reloc_info_for_patching) return; if (!ShouldRecordRelocInfo(rmode)) return;
if (RelocInfo::IsNone(rmode) ||
// Don't record external references unless the heap will be serialized.
(RelocInfo::IsOnlyForSerializer(rmode) &&
!options().record_reloc_info_for_serialization && !emit_debug_code())) {
return;
}
DCHECK_GE(buffer_space(), kMaxRelocSize); // too late to grow buffer here DCHECK_GE(buffer_space(), kMaxRelocSize); // too late to grow buffer here
RelocInfo rinfo(reinterpret_cast<Address>(pc_), rmode, data, nullptr); RelocInfo rinfo(reinterpret_cast<Address>(pc_), rmode, data, nullptr);
reloc_info_writer.Write(&rinfo); reloc_info_writer.Write(&rinfo);

View File

@ -393,7 +393,7 @@ enum Coprocessor {
// Machine instruction Operands // Machine instruction Operands
// Class Operand represents a shifter operand in data processing instructions // Class Operand represents a shifter operand in data processing instructions
class Operand BASE_EMBEDDED { class Operand {
public: public:
// immediate // immediate
V8_INLINE explicit Operand(int32_t immediate, V8_INLINE explicit Operand(int32_t immediate,
@ -425,6 +425,7 @@ class Operand BASE_EMBEDDED {
static Operand EmbeddedNumber(double number); // Smi or HeapNumber. static Operand EmbeddedNumber(double number); // Smi or HeapNumber.
static Operand EmbeddedCode(CodeStub* stub); static Operand EmbeddedCode(CodeStub* stub);
static Operand EmbeddedStringConstant(const StringConstantBase* str);
// Return true if this is a register operand. // Return true if this is a register operand.
bool IsRegister() const { bool IsRegister() const {
@ -498,7 +499,7 @@ class Operand BASE_EMBEDDED {
// Class MemOperand represents a memory operand in load and store instructions // Class MemOperand represents a memory operand in load and store instructions
class MemOperand BASE_EMBEDDED { class MemOperand {
public: public:
// [rn +/- offset] Offset/NegOffset // [rn +/- offset] Offset/NegOffset
// [rn +/- offset]! PreIndex/NegPreIndex // [rn +/- offset]! PreIndex/NegPreIndex
@ -557,7 +558,7 @@ class MemOperand BASE_EMBEDDED {
// Class NeonMemOperand represents a memory operand in load and // Class NeonMemOperand represents a memory operand in load and
// store NEON instructions // store NEON instructions
class NeonMemOperand BASE_EMBEDDED { class NeonMemOperand {
public: public:
// [rn {:align}] Offset // [rn {:align}] Offset
// [rn {:align}]! PostIndex // [rn {:align}]! PostIndex
@ -580,7 +581,7 @@ class NeonMemOperand BASE_EMBEDDED {
// Class NeonListOperand represents a list of NEON registers // Class NeonListOperand represents a list of NEON registers
class NeonListOperand BASE_EMBEDDED { class NeonListOperand {
public: public:
explicit NeonListOperand(DoubleRegister base, int register_count = 1) explicit NeonListOperand(DoubleRegister base, int register_count = 1)
: base_(base), register_count_(register_count) {} : base_(base), register_count_(register_count) {}
@ -1693,7 +1694,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
friend class UseScratchRegisterScope; friend class UseScratchRegisterScope;
}; };
class EnsureSpace BASE_EMBEDDED { class EnsureSpace {
public: public:
V8_INLINE explicit EnsureSpace(Assembler* assembler); V8_INLINE explicit EnsureSpace(Assembler* assembler);
}; };

View File

@ -131,7 +131,7 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
IsolateAddressId::kPendingExceptionAddress, isolate()))); IsolateAddressId::kPendingExceptionAddress, isolate())));
} }
__ str(r0, MemOperand(scratch)); __ str(r0, MemOperand(scratch));
__ LoadRoot(r0, Heap::kExceptionRootIndex); __ LoadRoot(r0, RootIndex::kException);
__ b(&exit); __ b(&exit);
// Invoke: Link this frame into the handler chain. // Invoke: Link this frame into the handler chain.
@ -418,7 +418,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
__ LeaveExitFrame(false, r4, stack_space_operand != nullptr); __ LeaveExitFrame(false, r4, stack_space_operand != nullptr);
// Check if the function scheduled an exception. // Check if the function scheduled an exception.
__ LoadRoot(r4, Heap::kTheHoleValueRootIndex); __ LoadRoot(r4, RootIndex::kTheHoleValue);
__ Move(r6, ExternalReference::scheduled_exception_address(isolate)); __ Move(r6, ExternalReference::scheduled_exception_address(isolate));
__ ldr(r5, MemOperand(r6)); __ ldr(r5, MemOperand(r6));
__ cmp(r4, r5); __ cmp(r4, r5);
@ -469,14 +469,14 @@ void CallApiCallbackStub::Generate(MacroAssembler* masm) {
STATIC_ASSERT(FCA::kHolderIndex == 0); STATIC_ASSERT(FCA::kHolderIndex == 0);
// new target // new target
__ PushRoot(Heap::kUndefinedValueRootIndex); __ PushRoot(RootIndex::kUndefinedValue);
// call data // call data
__ push(call_data); __ push(call_data);
Register scratch0 = call_data; Register scratch0 = call_data;
Register scratch1 = r5; Register scratch1 = r5;
__ LoadRoot(scratch0, Heap::kUndefinedValueRootIndex); __ LoadRoot(scratch0, RootIndex::kUndefinedValue);
// return value // return value
__ push(scratch0); __ push(scratch0);
// return value default // return value default
@ -549,7 +549,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
// Push data from AccessorInfo. // Push data from AccessorInfo.
__ ldr(scratch, FieldMemOperand(callback, AccessorInfo::kDataOffset)); __ ldr(scratch, FieldMemOperand(callback, AccessorInfo::kDataOffset));
__ push(scratch); __ push(scratch);
__ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); __ LoadRoot(scratch, RootIndex::kUndefinedValue);
__ Push(scratch, scratch); __ Push(scratch, scratch);
__ Move(scratch, ExternalReference::isolate_address(isolate())); __ Move(scratch, ExternalReference::isolate_address(isolate()));
__ Push(scratch, holder); __ Push(scratch, holder);

View File

@ -9,7 +9,6 @@
#include "src/arm/assembler-arm-inl.h" #include "src/arm/assembler-arm-inl.h"
#include "src/arm/simulator-arm.h" #include "src/arm/simulator-arm.h"
#include "src/codegen.h" #include "src/codegen.h"
#include "src/isolate.h"
#include "src/macro-assembler.h" #include "src/macro-assembler.h"
namespace v8 { namespace v8 {
@ -19,17 +18,17 @@ namespace internal {
#if defined(V8_HOST_ARCH_ARM) #if defined(V8_HOST_ARCH_ARM)
MemCopyUint8Function CreateMemCopyUint8Function(Isolate* isolate, MemCopyUint8Function CreateMemCopyUint8Function(MemCopyUint8Function stub) {
MemCopyUint8Function stub) {
#if defined(USE_SIMULATOR) #if defined(USE_SIMULATOR)
return stub; return stub;
#else #else
v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
size_t allocated = 0; size_t allocated = 0;
byte* buffer = AllocatePage(isolate->heap()->GetRandomMmapAddr(), &allocated); byte* buffer = AllocatePage(page_allocator,
page_allocator->GetRandomMmapAddr(), &allocated);
if (buffer == nullptr) return stub; if (buffer == nullptr) return stub;
MacroAssembler masm(isolate, buffer, static_cast<int>(allocated), MacroAssembler masm(AssemblerOptions{}, buffer, static_cast<int>(allocated));
CodeObjectRequired::kNo);
Register dest = r0; Register dest = r0;
Register src = r1; Register src = r1;
@ -166,11 +165,12 @@ MemCopyUint8Function CreateMemCopyUint8Function(Isolate* isolate,
__ Ret(); __ Ret();
CodeDesc desc; CodeDesc desc;
masm.GetCode(isolate, &desc); masm.GetCode(nullptr, &desc);
DCHECK(!RelocInfo::RequiresRelocationAfterCodegen(desc)); DCHECK(!RelocInfo::RequiresRelocationAfterCodegen(desc));
Assembler::FlushICache(buffer, allocated); Assembler::FlushICache(buffer, allocated);
CHECK(SetPermissions(buffer, allocated, PageAllocator::kReadExecute)); CHECK(SetPermissions(page_allocator, buffer, allocated,
PageAllocator::kReadExecute));
return FUNCTION_CAST<MemCopyUint8Function>(buffer); return FUNCTION_CAST<MemCopyUint8Function>(buffer);
#endif #endif
} }
@ -178,16 +178,17 @@ MemCopyUint8Function CreateMemCopyUint8Function(Isolate* isolate,
// Convert 8 to 16. The number of character to copy must be at least 8. // Convert 8 to 16. The number of character to copy must be at least 8.
MemCopyUint16Uint8Function CreateMemCopyUint16Uint8Function( MemCopyUint16Uint8Function CreateMemCopyUint16Uint8Function(
Isolate* isolate, MemCopyUint16Uint8Function stub) { MemCopyUint16Uint8Function stub) {
#if defined(USE_SIMULATOR) #if defined(USE_SIMULATOR)
return stub; return stub;
#else #else
v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
size_t allocated = 0; size_t allocated = 0;
byte* buffer = AllocatePage(isolate->heap()->GetRandomMmapAddr(), &allocated); byte* buffer = AllocatePage(page_allocator,
page_allocator->GetRandomMmapAddr(), &allocated);
if (buffer == nullptr) return stub; if (buffer == nullptr) return stub;
MacroAssembler masm(isolate, buffer, static_cast<int>(allocated), MacroAssembler masm(AssemblerOptions{}, buffer, static_cast<int>(allocated));
CodeObjectRequired::kNo);
Register dest = r0; Register dest = r0;
Register src = r1; Register src = r1;
@ -256,25 +257,27 @@ MemCopyUint16Uint8Function CreateMemCopyUint16Uint8Function(
} }
CodeDesc desc; CodeDesc desc;
masm.GetCode(isolate, &desc); masm.GetCode(nullptr, &desc);
Assembler::FlushICache(buffer, allocated); Assembler::FlushICache(buffer, allocated);
CHECK(SetPermissions(buffer, allocated, PageAllocator::kReadExecute)); CHECK(SetPermissions(page_allocator, buffer, allocated,
PageAllocator::kReadExecute));
return FUNCTION_CAST<MemCopyUint16Uint8Function>(buffer); return FUNCTION_CAST<MemCopyUint16Uint8Function>(buffer);
#endif #endif
} }
#endif #endif
UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) { UnaryMathFunction CreateSqrtFunction() {
#if defined(USE_SIMULATOR) #if defined(USE_SIMULATOR)
return nullptr; return nullptr;
#else #else
v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
size_t allocated = 0; size_t allocated = 0;
byte* buffer = AllocatePage(isolate->heap()->GetRandomMmapAddr(), &allocated); byte* buffer = AllocatePage(page_allocator,
page_allocator->GetRandomMmapAddr(), &allocated);
if (buffer == nullptr) return nullptr; if (buffer == nullptr) return nullptr;
MacroAssembler masm(isolate, buffer, static_cast<int>(allocated), MacroAssembler masm(AssemblerOptions{}, buffer, static_cast<int>(allocated));
CodeObjectRequired::kNo);
__ MovFromFloatParameter(d0); __ MovFromFloatParameter(d0);
__ vsqrt(d0, d0); __ vsqrt(d0, d0);
@ -282,12 +285,13 @@ UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
__ Ret(); __ Ret();
CodeDesc desc; CodeDesc desc;
masm.GetCode(isolate, &desc); masm.GetCode(nullptr, &desc);
DCHECK(!RelocInfo::RequiresRelocationAfterCodegen(desc)); DCHECK(!RelocInfo::RequiresRelocationAfterCodegen(desc));
Assembler::FlushICache(buffer, allocated); Assembler::FlushICache(buffer, allocated);
CHECK(SetPermissions(buffer, allocated, PageAllocator::kReadExecute)); CHECK(SetPermissions(page_allocator, buffer, allocated,
return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer); PageAllocator::kReadExecute));
return FUNCTION_CAST<UnaryMathFunction>(buffer);
#endif #endif
} }

View File

@ -88,9 +88,9 @@ void CallVarargsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) { CallInterfaceDescriptorData* data) {
// r0 : number of arguments (on the stack, not including receiver) // r0 : number of arguments (on the stack, not including receiver)
// r1 : the target to call // r1 : the target to call
// r2 : arguments list (FixedArray)
// r4 : arguments list length (untagged) // r4 : arguments list length (untagged)
Register registers[] = {r1, r0, r2, r4}; // r2 : arguments list (FixedArray)
Register registers[] = {r1, r0, r4, r2};
data->InitializePlatformSpecific(arraysize(registers), registers); data->InitializePlatformSpecific(arraysize(registers), registers);
} }
@ -125,9 +125,9 @@ void ConstructVarargsDescriptor::InitializePlatformSpecific(
// r0 : number of arguments (on the stack, not including receiver) // r0 : number of arguments (on the stack, not including receiver)
// r1 : the target to call // r1 : the target to call
// r3 : the new target // r3 : the new target
// r2 : arguments list (FixedArray)
// r4 : arguments list length (untagged) // r4 : arguments list length (untagged)
Register registers[] = {r1, r3, r0, r2, r4}; // r2 : arguments list (FixedArray)
Register registers[] = {r1, r3, r0, r4, r2};
data->InitializePlatformSpecific(arraysize(registers), registers); data->InitializePlatformSpecific(arraysize(registers), registers);
} }
@ -193,7 +193,7 @@ void BinaryOpDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers); data->InitializePlatformSpecific(arraysize(registers), registers);
} }
void ArgumentAdaptorDescriptor::InitializePlatformSpecific( void ArgumentsAdaptorDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) { CallInterfaceDescriptorData* data) {
Register registers[] = { Register registers[] = {
r1, // JSFunction r1, // JSFunction
@ -237,10 +237,10 @@ void InterpreterPushArgsThenConstructDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) { CallInterfaceDescriptorData* data) {
Register registers[] = { Register registers[] = {
r0, // argument count (not including receiver) r0, // argument count (not including receiver)
r3, // new target r4, // address of the first argument
r1, // constructor to call r1, // constructor to call
r3, // new target
r2, // allocation site feedback if available, undefined otherwise r2, // allocation site feedback if available, undefined otherwise
r4 // address of the first argument
}; };
data->InitializePlatformSpecific(arraysize(registers), registers); data->InitializePlatformSpecific(arraysize(registers), registers);
} }

View File

@ -130,7 +130,7 @@ int TurboAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
void TurboAssembler::LoadFromConstantsTable(Register destination, void TurboAssembler::LoadFromConstantsTable(Register destination,
int constant_index) { int constant_index) {
DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant( DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(
Heap::kBuiltinsConstantsTableRootIndex)); RootIndex::kBuiltinsConstantsTable));
// The ldr call below could end up clobbering ip when the offset does not fit // The ldr call below could end up clobbering ip when the offset does not fit
// into 12 bits (and thus needs to be loaded from the constant pool). In that // into 12 bits (and thus needs to be loaded from the constant pool). In that
@ -147,7 +147,7 @@ void TurboAssembler::LoadFromConstantsTable(Register destination,
reg = r7; reg = r7;
} }
LoadRoot(reg, Heap::kBuiltinsConstantsTableRootIndex); LoadRoot(reg, RootIndex::kBuiltinsConstantsTable);
ldr(destination, MemOperand(reg, offset)); ldr(destination, MemOperand(reg, offset));
if (could_clobber_ip) { if (could_clobber_ip) {
@ -527,7 +527,7 @@ void MacroAssembler::Store(Register src,
} }
} }
void TurboAssembler::LoadRoot(Register destination, Heap::RootListIndex index, void TurboAssembler::LoadRoot(Register destination, RootIndex index,
Condition cond) { Condition cond) {
ldr(destination, MemOperand(kRootRegister, RootRegisterOffset(index)), cond); ldr(destination, MemOperand(kRootRegister, RootRegisterOffset(index)), cond);
} }
@ -615,8 +615,6 @@ void TurboAssembler::CallRecordWriteStub(
RecordWriteDescriptor::kObject)); RecordWriteDescriptor::kObject));
Register slot_parameter( Register slot_parameter(
callable.descriptor().GetRegisterParameter(RecordWriteDescriptor::kSlot)); callable.descriptor().GetRegisterParameter(RecordWriteDescriptor::kSlot));
Register isolate_parameter(callable.descriptor().GetRegisterParameter(
RecordWriteDescriptor::kIsolate));
Register remembered_set_parameter(callable.descriptor().GetRegisterParameter( Register remembered_set_parameter(callable.descriptor().GetRegisterParameter(
RecordWriteDescriptor::kRememberedSet)); RecordWriteDescriptor::kRememberedSet));
Register fp_mode_parameter(callable.descriptor().GetRegisterParameter( Register fp_mode_parameter(callable.descriptor().GetRegisterParameter(
@ -628,7 +626,6 @@ void TurboAssembler::CallRecordWriteStub(
Pop(slot_parameter); Pop(slot_parameter);
Pop(object_parameter); Pop(object_parameter);
Move(isolate_parameter, ExternalReference::isolate_address(isolate()));
Move(remembered_set_parameter, Smi::FromEnum(remembered_set_action)); Move(remembered_set_parameter, Smi::FromEnum(remembered_set_action));
Move(fp_mode_parameter, Smi::FromEnum(fp_mode)); Move(fp_mode_parameter, Smi::FromEnum(fp_mode));
Call(callable.code(), RelocInfo::CODE_TARGET); Call(callable.code(), RelocInfo::CODE_TARGET);
@ -1520,7 +1517,7 @@ void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
// Clear the new.target register if not given. // Clear the new.target register if not given.
if (!new_target.is_valid()) { if (!new_target.is_valid()) {
LoadRoot(r3, Heap::kUndefinedValueRootIndex); LoadRoot(r3, RootIndex::kUndefinedValue);
} }
Label done; Label done;
@ -1642,9 +1639,7 @@ void MacroAssembler::CompareInstanceType(Register map,
cmp(type_reg, Operand(type)); cmp(type_reg, Operand(type));
} }
void MacroAssembler::CompareRoot(Register obj, RootIndex index) {
void MacroAssembler::CompareRoot(Register obj,
Heap::RootListIndex index) {
UseScratchRegisterScope temps(this); UseScratchRegisterScope temps(this);
Register scratch = temps.Acquire(); Register scratch = temps.Acquire();
DCHECK(obj != scratch); DCHECK(obj != scratch);
@ -2053,7 +2048,7 @@ void MacroAssembler::AssertUndefinedOrAllocationSite(Register object,
if (emit_debug_code()) { if (emit_debug_code()) {
Label done_checking; Label done_checking;
AssertNotSmi(object); AssertNotSmi(object);
CompareRoot(object, Heap::kUndefinedValueRootIndex); CompareRoot(object, RootIndex::kUndefinedValue);
b(eq, &done_checking); b(eq, &done_checking);
ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset)); ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
CompareInstanceType(scratch, scratch, ALLOCATION_SITE_TYPE); CompareInstanceType(scratch, scratch, ALLOCATION_SITE_TYPE);

View File

@ -71,6 +71,9 @@ enum TargetAddressStorageMode {
class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase { class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
public: public:
TurboAssembler(const AssemblerOptions& options, void* buffer, int buffer_size)
: TurboAssemblerBase(options, buffer, buffer_size) {}
TurboAssembler(Isolate* isolate, const AssemblerOptions& options, TurboAssembler(Isolate* isolate, const AssemblerOptions& options,
void* buffer, int buffer_size, void* buffer, int buffer_size,
CodeObjectRequired create_code_object) CodeObjectRequired create_code_object)
@ -481,11 +484,10 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
} }
// Load an object from the root table. // Load an object from the root table.
void LoadRoot(Register destination, Heap::RootListIndex index) override { void LoadRoot(Register destination, RootIndex index) override {
LoadRoot(destination, index, al); LoadRoot(destination, index, al);
} }
void LoadRoot(Register destination, Heap::RootListIndex index, void LoadRoot(Register destination, RootIndex index, Condition cond);
Condition cond);
// Jump if the register contains a smi. // Jump if the register contains a smi.
void JumpIfSmi(Register value, Label* smi_label); void JumpIfSmi(Register value, Label* smi_label);
@ -566,10 +568,14 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
// MacroAssembler implements a collection of frequently used macros. // MacroAssembler implements a collection of frequently used macros.
class MacroAssembler : public TurboAssembler { class MacroAssembler : public TurboAssembler {
public: public:
MacroAssembler(const AssemblerOptions& options, void* buffer, int size)
: TurboAssembler(options, buffer, size) {}
MacroAssembler(Isolate* isolate, void* buffer, int size, MacroAssembler(Isolate* isolate, void* buffer, int size,
CodeObjectRequired create_code_object) CodeObjectRequired create_code_object)
: MacroAssembler(isolate, AssemblerOptions::Default(isolate), buffer, : MacroAssembler(isolate, AssemblerOptions::Default(isolate), buffer,
size, create_code_object) {} size, create_code_object) {}
MacroAssembler(Isolate* isolate, const AssemblerOptions& options, MacroAssembler(Isolate* isolate, const AssemblerOptions& options,
void* buffer, int size, CodeObjectRequired create_code_object); void* buffer, int size, CodeObjectRequired create_code_object);
@ -713,8 +719,8 @@ class MacroAssembler : public TurboAssembler {
// Compare the object in a register to a value from the root list. // Compare the object in a register to a value from the root list.
// Acquires a scratch register. // Acquires a scratch register.
void CompareRoot(Register obj, Heap::RootListIndex index); void CompareRoot(Register obj, RootIndex index);
void PushRoot(Heap::RootListIndex index) { void PushRoot(RootIndex index) {
UseScratchRegisterScope temps(this); UseScratchRegisterScope temps(this);
Register scratch = temps.Acquire(); Register scratch = temps.Acquire();
LoadRoot(scratch, index); LoadRoot(scratch, index);
@ -722,14 +728,13 @@ class MacroAssembler : public TurboAssembler {
} }
// Compare the object in a register to a value and jump if they are equal. // Compare the object in a register to a value and jump if they are equal.
void JumpIfRoot(Register with, Heap::RootListIndex index, Label* if_equal) { void JumpIfRoot(Register with, RootIndex index, Label* if_equal) {
CompareRoot(with, index); CompareRoot(with, index);
b(eq, if_equal); b(eq, if_equal);
} }
// Compare the object in a register to a value and jump if they are not equal. // Compare the object in a register to a value and jump if they are not equal.
void JumpIfNotRoot(Register with, Heap::RootListIndex index, void JumpIfNotRoot(Register with, RootIndex index, Label* if_not_equal) {
Label* if_not_equal) {
CompareRoot(with, index); CompareRoot(with, index);
b(ne, if_not_equal); b(ne, if_not_equal);
} }

View File

@ -3212,15 +3212,15 @@ void Simulator::DecodeTypeVFP(Instruction* instr) {
DecodeVCMP(instr); DecodeVCMP(instr);
} else if (((instr->Opc2Value() == 0x1)) && (instr->Opc3Value() == 0x3)) { } else if (((instr->Opc2Value() == 0x1)) && (instr->Opc3Value() == 0x3)) {
// vsqrt // vsqrt
lazily_initialize_fast_sqrt(isolate_); lazily_initialize_fast_sqrt();
if (instr->SzValue() == 0x1) { if (instr->SzValue() == 0x1) {
double dm_value = get_double_from_d_register(vm).get_scalar(); double dm_value = get_double_from_d_register(vm).get_scalar();
double dd_value = fast_sqrt(dm_value, isolate_); double dd_value = fast_sqrt(dm_value);
dd_value = canonicalizeNaN(dd_value); dd_value = canonicalizeNaN(dd_value);
set_d_register_from_double(vd, dd_value); set_d_register_from_double(vd, dd_value);
} else { } else {
float sm_value = get_float_from_s_register(m).get_scalar(); float sm_value = get_float_from_s_register(m).get_scalar();
float sd_value = fast_sqrt(sm_value, isolate_); float sd_value = fast_sqrt(sm_value);
sd_value = canonicalizeNaN(sd_value); sd_value = canonicalizeNaN(sd_value);
set_s_register_from_float(d, sd_value); set_s_register_from_float(d, sd_value);
} }
@ -5282,10 +5282,10 @@ void Simulator::DecodeSpecialCondition(Instruction* instr) {
src[i] = bit_cast<uint32_t>(result); src[i] = bit_cast<uint32_t>(result);
} }
} else { } else {
lazily_initialize_fast_sqrt(isolate_); lazily_initialize_fast_sqrt();
for (int i = 0; i < 4; i++) { for (int i = 0; i < 4; i++) {
float radicand = bit_cast<float>(src[i]); float radicand = bit_cast<float>(src[i]);
float result = 1.0f / fast_sqrt(radicand, isolate_); float result = 1.0f / fast_sqrt(radicand);
result = canonicalizeNaN(result); result = canonicalizeNaN(result);
src[i] = bit_cast<uint32_t>(result); src[i] = bit_cast<uint32_t>(result);
} }

View File

@ -341,7 +341,9 @@ Immediate Operand::immediate_for_heap_object_request() const {
DCHECK((heap_object_request().kind() == HeapObjectRequest::kHeapNumber && DCHECK((heap_object_request().kind() == HeapObjectRequest::kHeapNumber &&
immediate_.rmode() == RelocInfo::EMBEDDED_OBJECT) || immediate_.rmode() == RelocInfo::EMBEDDED_OBJECT) ||
(heap_object_request().kind() == HeapObjectRequest::kCodeStub && (heap_object_request().kind() == HeapObjectRequest::kCodeStub &&
immediate_.rmode() == RelocInfo::CODE_TARGET)); immediate_.rmode() == RelocInfo::CODE_TARGET) ||
(heap_object_request().kind() == HeapObjectRequest::kStringConstant &&
immediate_.rmode() == RelocInfo::EMBEDDED_OBJECT));
return immediate_; return immediate_;
} }

View File

@ -36,6 +36,7 @@
#include "src/code-stubs.h" #include "src/code-stubs.h"
#include "src/frame-constants.h" #include "src/frame-constants.h"
#include "src/register-configuration.h" #include "src/register-configuration.h"
#include "src/string-constants.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
@ -583,6 +584,7 @@ void Assembler::Reset() {
} }
void Assembler::AllocateAndInstallRequestedHeapObjects(Isolate* isolate) { void Assembler::AllocateAndInstallRequestedHeapObjects(Isolate* isolate) {
DCHECK_IMPLIES(isolate == nullptr, heap_object_requests_.empty());
for (auto& request : heap_object_requests_) { for (auto& request : heap_object_requests_) {
Address pc = reinterpret_cast<Address>(buffer_) + request.offset(); Address pc = reinterpret_cast<Address>(buffer_) + request.offset();
switch (request.kind()) { switch (request.kind()) {
@ -601,6 +603,13 @@ void Assembler::AllocateAndInstallRequestedHeapObjects(Isolate* isolate) {
request.code_stub()->GetCode()); request.code_stub()->GetCode());
break; break;
} }
case HeapObjectRequest::kStringConstant: {
const StringConstantBase* str = request.string();
CHECK_NOT_NULL(str);
set_target_address_at(pc, 0 /* unused */,
str->AllocateStringConstant(isolate).address());
break;
}
} }
} }
} }
@ -1717,6 +1726,13 @@ Operand Operand::EmbeddedCode(CodeStub* stub) {
return result; return result;
} }
Operand Operand::EmbeddedStringConstant(const StringConstantBase* str) {
Operand result(0, RelocInfo::EMBEDDED_OBJECT);
result.heap_object_request_.emplace(str);
DCHECK(result.IsHeapObjectRequest());
return result;
}
void Assembler::ldr(const CPURegister& rt, const Operand& operand) { void Assembler::ldr(const CPURegister& rt, const Operand& operand) {
if (operand.IsHeapObjectRequest()) { if (operand.IsHeapObjectRequest()) {
RequestHeapObject(operand.heap_object_request()); RequestHeapObject(operand.heap_object_request());
@ -4751,14 +4767,6 @@ void Assembler::GrowBuffer() {
void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data, void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data,
ConstantPoolMode constant_pool_mode) { ConstantPoolMode constant_pool_mode) {
// Non-relocatable constants should not end up in the literal pool.
DCHECK(!RelocInfo::IsNone(rmode));
if (options().disable_reloc_info_for_patching) return;
// We do not try to reuse pool constants.
RelocInfo rinfo(reinterpret_cast<Address>(pc_), rmode, data, nullptr);
bool write_reloc_info = true;
if ((rmode == RelocInfo::COMMENT) || if ((rmode == RelocInfo::COMMENT) ||
(rmode == RelocInfo::INTERNAL_REFERENCE) || (rmode == RelocInfo::INTERNAL_REFERENCE) ||
(rmode == RelocInfo::CONST_POOL) || (rmode == RelocInfo::VENEER_POOL) || (rmode == RelocInfo::CONST_POOL) || (rmode == RelocInfo::VENEER_POOL) ||
@ -4772,23 +4780,22 @@ void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data,
RelocInfo::IsConstPool(rmode) || RelocInfo::IsVeneerPool(rmode)); RelocInfo::IsConstPool(rmode) || RelocInfo::IsVeneerPool(rmode));
// These modes do not need an entry in the constant pool. // These modes do not need an entry in the constant pool.
} else if (constant_pool_mode == NEEDS_POOL_ENTRY) { } else if (constant_pool_mode == NEEDS_POOL_ENTRY) {
write_reloc_info = constpool_.RecordEntry(data, rmode); bool new_constpool_entry = constpool_.RecordEntry(data, rmode);
// Make sure the constant pool is not emitted in place of the next // Make sure the constant pool is not emitted in place of the next
// instruction for which we just recorded relocation info. // instruction for which we just recorded relocation info.
BlockConstPoolFor(1); BlockConstPoolFor(1);
if (!new_constpool_entry) return;
} }
// For modes that cannot use the constant pool, a different sequence of // For modes that cannot use the constant pool, a different sequence of
// instructions will be emitted by this function's caller. // instructions will be emitted by this function's caller.
if (write_reloc_info) { if (!ShouldRecordRelocInfo(rmode)) return;
// Don't record external references unless the heap will be serialized.
if (RelocInfo::IsOnlyForSerializer(rmode) && // We do not try to reuse pool constants.
!options().record_reloc_info_for_serialization && !emit_debug_code()) { RelocInfo rinfo(reinterpret_cast<Address>(pc_), rmode, data, nullptr);
return;
} DCHECK_GE(buffer_space(), kMaxRelocSize); // too late to grow buffer here
DCHECK_GE(buffer_space(), kMaxRelocSize); // too late to grow buffer here reloc_info_writer.Write(&rinfo);
reloc_info_writer.Write(&rinfo);
}
} }
void Assembler::near_jump(int offset, RelocInfo::Mode rmode) { void Assembler::near_jump(int offset, RelocInfo::Mode rmode) {

View File

@ -718,6 +718,7 @@ class Operand {
static Operand EmbeddedNumber(double number); // Smi or HeapNumber. static Operand EmbeddedNumber(double number); // Smi or HeapNumber.
static Operand EmbeddedCode(CodeStub* stub); static Operand EmbeddedCode(CodeStub* stub);
static Operand EmbeddedStringConstant(const StringConstantBase* str);
inline bool IsHeapObjectRequest() const; inline bool IsHeapObjectRequest() const;
inline HeapObjectRequest heap_object_request() const; inline HeapObjectRequest heap_object_request() const;
@ -3624,8 +3625,7 @@ class PatchingAssembler : public Assembler {
void PatchSubSp(uint32_t immediate); void PatchSubSp(uint32_t immediate);
}; };
class EnsureSpace {
class EnsureSpace BASE_EMBEDDED {
public: public:
explicit EnsureSpace(Assembler* assembler) { explicit EnsureSpace(Assembler* assembler) {
assembler->CheckBufferSpace(); assembler->CheckBufferSpace();

View File

@ -124,7 +124,7 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
IsolateAddressId::kPendingExceptionAddress, isolate()))); IsolateAddressId::kPendingExceptionAddress, isolate())));
} }
__ Str(code_entry, MemOperand(x10)); __ Str(code_entry, MemOperand(x10));
__ LoadRoot(x0, Heap::kExceptionRootIndex); __ LoadRoot(x0, RootIndex::kException);
__ B(&exit); __ B(&exit);
// Invoke: Link this frame into the handler chain. // Invoke: Link this frame into the handler chain.
@ -434,8 +434,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
// Check if the function scheduled an exception. // Check if the function scheduled an exception.
__ Mov(x5, ExternalReference::scheduled_exception_address(isolate)); __ Mov(x5, ExternalReference::scheduled_exception_address(isolate));
__ Ldr(x5, MemOperand(x5)); __ Ldr(x5, MemOperand(x5));
__ JumpIfNotRoot(x5, Heap::kTheHoleValueRootIndex, __ JumpIfNotRoot(x5, RootIndex::kTheHoleValue, &promote_scheduled_exception);
&promote_scheduled_exception);
__ DropSlots(stack_space); __ DropSlots(stack_space);
__ Ret(); __ Ret();
@ -484,7 +483,7 @@ void CallApiCallbackStub::Generate(MacroAssembler* masm) {
STATIC_ASSERT(FCA::kHolderIndex == 0); STATIC_ASSERT(FCA::kHolderIndex == 0);
Register undef = x7; Register undef = x7;
__ LoadRoot(undef, Heap::kUndefinedValueRootIndex); __ LoadRoot(undef, RootIndex::kUndefinedValue);
// Push new target, call data. // Push new target, call data.
__ Push(undef, call_data); __ Push(undef, call_data);
@ -562,7 +561,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
name)); name));
__ Ldr(data, FieldMemOperand(callback, AccessorInfo::kDataOffset)); __ Ldr(data, FieldMemOperand(callback, AccessorInfo::kDataOffset));
__ LoadRoot(undef, Heap::kUndefinedValueRootIndex); __ LoadRoot(undef, RootIndex::kUndefinedValue);
__ Mov(isolate_address, ExternalReference::isolate_address(isolate())); __ Mov(isolate_address, ExternalReference::isolate_address(isolate()));
__ Ldr(name, FieldMemOperand(callback, AccessorInfo::kNameOffset)); __ Ldr(name, FieldMemOperand(callback, AccessorInfo::kNameOffset));

View File

@ -8,7 +8,6 @@
#include "src/arm64/macro-assembler-arm64-inl.h" #include "src/arm64/macro-assembler-arm64-inl.h"
#include "src/arm64/simulator-arm64.h" #include "src/arm64/simulator-arm64.h"
#include "src/codegen.h" #include "src/codegen.h"
#include "src/isolate.h"
#include "src/macro-assembler.h" #include "src/macro-assembler.h"
namespace v8 { namespace v8 {
@ -16,9 +15,7 @@ namespace internal {
#define __ ACCESS_MASM(masm) #define __ ACCESS_MASM(masm)
UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) { UnaryMathFunction CreateSqrtFunction() { return nullptr; }
return nullptr;
}
#undef __ #undef __

View File

@ -291,10 +291,8 @@ M_(FPCR, AHP_mask | DN_mask | FZ_mask | RMode_mask)
const uint32_t Name##_mask = ((1 << Name##_width) - 1) << LowBit; const uint32_t Name##_mask = ((1 << Name##_width) - 1) << LowBit;
#define DECLARE_INSTRUCTION_FIELDS_OFFSETS(Name, HighBit, LowBit, unused_1) \ #define DECLARE_INSTRUCTION_FIELDS_OFFSETS(Name, HighBit, LowBit, unused_1) \
DECLARE_FIELDS_OFFSETS(Name, HighBit, LowBit, unused_1, unused_2) DECLARE_FIELDS_OFFSETS(Name, HighBit, LowBit, unused_1, unused_2)
#define NOTHING(A, B)
INSTRUCTION_FIELDS_LIST(DECLARE_INSTRUCTION_FIELDS_OFFSETS) INSTRUCTION_FIELDS_LIST(DECLARE_INSTRUCTION_FIELDS_OFFSETS)
SYSTEM_REGISTER_FIELDS_LIST(DECLARE_FIELDS_OFFSETS, NOTHING) SYSTEM_REGISTER_FIELDS_LIST(DECLARE_FIELDS_OFFSETS, NOTHING)
#undef NOTHING
#undef DECLARE_FIELDS_OFFSETS #undef DECLARE_FIELDS_OFFSETS
#undef DECLARE_INSTRUCTION_FIELDS_OFFSETS #undef DECLARE_INSTRUCTION_FIELDS_OFFSETS

View File

@ -89,9 +89,9 @@ void CallVarargsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) { CallInterfaceDescriptorData* data) {
// x0 : number of arguments (on the stack, not including receiver) // x0 : number of arguments (on the stack, not including receiver)
// x1 : the target to call // x1 : the target to call
// x2 : arguments list (FixedArray)
// x4 : arguments list length (untagged) // x4 : arguments list length (untagged)
Register registers[] = {x1, x0, x2, x4}; // x2 : arguments list (FixedArray)
Register registers[] = {x1, x0, x4, x2};
data->InitializePlatformSpecific(arraysize(registers), registers); data->InitializePlatformSpecific(arraysize(registers), registers);
} }
@ -126,9 +126,9 @@ void ConstructVarargsDescriptor::InitializePlatformSpecific(
// x0 : number of arguments (on the stack, not including receiver) // x0 : number of arguments (on the stack, not including receiver)
// x1 : the target to call // x1 : the target to call
// x3 : the new target // x3 : the new target
// x2 : arguments list (FixedArray)
// x4 : arguments list length (untagged) // x4 : arguments list length (untagged)
Register registers[] = {x1, x3, x0, x2, x4}; // x2 : arguments list (FixedArray)
Register registers[] = {x1, x3, x0, x4, x2};
data->InitializePlatformSpecific(arraysize(registers), registers); data->InitializePlatformSpecific(arraysize(registers), registers);
} }
@ -198,7 +198,7 @@ void BinaryOpDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers); data->InitializePlatformSpecific(arraysize(registers), registers);
} }
void ArgumentAdaptorDescriptor::InitializePlatformSpecific( void ArgumentsAdaptorDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) { CallInterfaceDescriptorData* data) {
Register registers[] = { Register registers[] = {
x1, // JSFunction x1, // JSFunction
@ -242,10 +242,10 @@ void InterpreterPushArgsThenConstructDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) { CallInterfaceDescriptorData* data) {
Register registers[] = { Register registers[] = {
x0, // argument count (not including receiver) x0, // argument count (not including receiver)
x3, // new target x4, // address of the first argument
x1, // constructor to call x1, // constructor to call
x3, // new target
x2, // allocation site feedback if available, undefined otherwise x2, // allocation site feedback if available, undefined otherwise
x4 // address of the first argument
}; };
data->InitializePlatformSpecific(arraysize(registers), registers); data->InitializePlatformSpecific(arraysize(registers), registers);
} }

View File

@ -1516,7 +1516,7 @@ void TurboAssembler::CanonicalizeNaN(const VRegister& dst,
Fsub(dst, src, fp_zero); Fsub(dst, src, fp_zero);
} }
void TurboAssembler::LoadRoot(Register destination, Heap::RootListIndex index) { void TurboAssembler::LoadRoot(Register destination, RootIndex index) {
// TODO(jbramley): Most root values are constants, and can be synthesized // TODO(jbramley): Most root values are constants, and can be synthesized
// without a load. Refer to the ARM back end for details. // without a load. Refer to the ARM back end for details.
Ldr(destination, MemOperand(kRootRegister, RootRegisterOffset(index))); Ldr(destination, MemOperand(kRootRegister, RootRegisterOffset(index)));
@ -1646,7 +1646,7 @@ void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
Register scratch = temps.AcquireX(); Register scratch = temps.AcquireX();
Label done_checking; Label done_checking;
AssertNotSmi(object); AssertNotSmi(object);
JumpIfRoot(object, Heap::kUndefinedValueRootIndex, &done_checking); JumpIfRoot(object, RootIndex::kUndefinedValue, &done_checking);
Ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset)); Ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
CompareInstanceType(scratch, scratch, ALLOCATION_SITE_TYPE); CompareInstanceType(scratch, scratch, ALLOCATION_SITE_TYPE);
Assert(eq, AbortReason::kExpectedUndefinedOrCell); Assert(eq, AbortReason::kExpectedUndefinedOrCell);
@ -1727,7 +1727,7 @@ void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin,
} }
void MacroAssembler::JumpToInstructionStream(Address entry) { void MacroAssembler::JumpToInstructionStream(Address entry) {
Mov(kOffHeapTrampolineRegister, Operand(entry, RelocInfo::OFF_HEAP_TARGET)); Ldr(kOffHeapTrampolineRegister, Operand(entry, RelocInfo::OFF_HEAP_TARGET));
Br(kOffHeapTrampolineRegister); Br(kOffHeapTrampolineRegister);
} }
@ -1806,8 +1806,8 @@ void TurboAssembler::CallCFunction(Register function, int num_of_reg_args,
void TurboAssembler::LoadFromConstantsTable(Register destination, void TurboAssembler::LoadFromConstantsTable(Register destination,
int constant_index) { int constant_index) {
DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant( DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(
Heap::kBuiltinsConstantsTableRootIndex)); RootIndex::kBuiltinsConstantsTable));
LoadRoot(destination, Heap::kBuiltinsConstantsTableRootIndex); LoadRoot(destination, RootIndex::kBuiltinsConstantsTable);
Ldr(destination, Ldr(destination,
FieldMemOperand(destination, FieldMemOperand(destination,
FixedArray::kHeaderSize + constant_index * kPointerSize)); FixedArray::kHeaderSize + constant_index * kPointerSize));
@ -1905,7 +1905,7 @@ void TurboAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
Register scratch = temps.AcquireX(); Register scratch = temps.AcquireX();
EmbeddedData d = EmbeddedData::FromBlob(); EmbeddedData d = EmbeddedData::FromBlob();
Address entry = d.InstructionStartOfBuiltin(builtin_index); Address entry = d.InstructionStartOfBuiltin(builtin_index);
Mov(scratch, Operand(entry, RelocInfo::OFF_HEAP_TARGET)); Ldr(scratch, Operand(entry, RelocInfo::OFF_HEAP_TARGET));
Jump(scratch, cond); Jump(scratch, cond);
return; return;
} }
@ -1963,7 +1963,7 @@ void TurboAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode) {
Register scratch = temps.AcquireX(); Register scratch = temps.AcquireX();
EmbeddedData d = EmbeddedData::FromBlob(); EmbeddedData d = EmbeddedData::FromBlob();
Address entry = d.InstructionStartOfBuiltin(builtin_index); Address entry = d.InstructionStartOfBuiltin(builtin_index);
Mov(scratch, Operand(entry, RelocInfo::OFF_HEAP_TARGET)); Ldr(scratch, Operand(entry, RelocInfo::OFF_HEAP_TARGET));
Call(scratch); Call(scratch);
return; return;
} }
@ -2225,7 +2225,7 @@ void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
// Clear the new.target register if not given. // Clear the new.target register if not given.
if (!new_target.is_valid()) { if (!new_target.is_valid()) {
LoadRoot(x3, Heap::kUndefinedValueRootIndex); LoadRoot(x3, RootIndex::kUndefinedValue);
} }
Label done; Label done;
@ -2597,8 +2597,7 @@ void MacroAssembler::LoadElementsKindFromMap(Register result, Register map) {
DecodeField<Map::ElementsKindBits>(result); DecodeField<Map::ElementsKindBits>(result);
} }
void MacroAssembler::CompareRoot(const Register& obj, void MacroAssembler::CompareRoot(const Register& obj, RootIndex index) {
Heap::RootListIndex index) {
UseScratchRegisterScope temps(this); UseScratchRegisterScope temps(this);
Register temp = temps.AcquireX(); Register temp = temps.AcquireX();
DCHECK(!AreAliased(obj, temp)); DCHECK(!AreAliased(obj, temp));
@ -2606,17 +2605,13 @@ void MacroAssembler::CompareRoot(const Register& obj,
Cmp(obj, temp); Cmp(obj, temp);
} }
void MacroAssembler::JumpIfRoot(const Register& obj, RootIndex index,
void MacroAssembler::JumpIfRoot(const Register& obj,
Heap::RootListIndex index,
Label* if_equal) { Label* if_equal) {
CompareRoot(obj, index); CompareRoot(obj, index);
B(eq, if_equal); B(eq, if_equal);
} }
void MacroAssembler::JumpIfNotRoot(const Register& obj, RootIndex index,
void MacroAssembler::JumpIfNotRoot(const Register& obj,
Heap::RootListIndex index,
Label* if_not_equal) { Label* if_not_equal) {
CompareRoot(obj, index); CompareRoot(obj, index);
B(ne, if_not_equal); B(ne, if_not_equal);
@ -2823,8 +2818,6 @@ void TurboAssembler::CallRecordWriteStub(
RecordWriteDescriptor::kObject)); RecordWriteDescriptor::kObject));
Register slot_parameter( Register slot_parameter(
callable.descriptor().GetRegisterParameter(RecordWriteDescriptor::kSlot)); callable.descriptor().GetRegisterParameter(RecordWriteDescriptor::kSlot));
Register isolate_parameter(callable.descriptor().GetRegisterParameter(
RecordWriteDescriptor::kIsolate));
Register remembered_set_parameter(callable.descriptor().GetRegisterParameter( Register remembered_set_parameter(callable.descriptor().GetRegisterParameter(
RecordWriteDescriptor::kRememberedSet)); RecordWriteDescriptor::kRememberedSet));
Register fp_mode_parameter(callable.descriptor().GetRegisterParameter( Register fp_mode_parameter(callable.descriptor().GetRegisterParameter(
@ -2834,7 +2827,6 @@ void TurboAssembler::CallRecordWriteStub(
Pop(slot_parameter, object_parameter); Pop(slot_parameter, object_parameter);
Mov(isolate_parameter, ExternalReference::isolate_address(isolate()));
Mov(remembered_set_parameter, Smi::FromEnum(remembered_set_action)); Mov(remembered_set_parameter, Smi::FromEnum(remembered_set_action));
Mov(fp_mode_parameter, Smi::FromEnum(fp_mode)); Mov(fp_mode_parameter, Smi::FromEnum(fp_mode));
Call(callable.code(), RelocInfo::CODE_TARGET); Call(callable.code(), RelocInfo::CODE_TARGET);
@ -2915,8 +2907,7 @@ void TurboAssembler::AssertUnreachable(AbortReason reason) {
if (emit_debug_code()) Abort(reason); if (emit_debug_code()) Abort(reason);
} }
void MacroAssembler::AssertRegisterIsRoot(Register reg, void MacroAssembler::AssertRegisterIsRoot(Register reg, RootIndex index,
Heap::RootListIndex index,
AbortReason reason) { AbortReason reason) {
if (emit_debug_code()) { if (emit_debug_code()) {
CompareRoot(reg, index); CompareRoot(reg, index);

View File

@ -180,6 +180,9 @@ enum PreShiftImmMode {
class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase { class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
public: public:
TurboAssembler(const AssemblerOptions& options, void* buffer, int buffer_size)
: TurboAssemblerBase(options, buffer, buffer_size) {}
TurboAssembler(Isolate* isolate, const AssemblerOptions& options, TurboAssembler(Isolate* isolate, const AssemblerOptions& options,
void* buffer, int buffer_size, void* buffer, int buffer_size,
CodeObjectRequired create_code_object) CodeObjectRequired create_code_object)
@ -1126,7 +1129,7 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
#undef DECLARE_FUNCTION #undef DECLARE_FUNCTION
// Load an object from the root table. // Load an object from the root table.
void LoadRoot(Register destination, Heap::RootListIndex index) override; void LoadRoot(Register destination, RootIndex index) override;
inline void Ret(const Register& xn = lr); inline void Ret(const Register& xn = lr);
@ -1262,10 +1265,14 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
class MacroAssembler : public TurboAssembler { class MacroAssembler : public TurboAssembler {
public: public:
MacroAssembler(const AssemblerOptions& options, void* buffer, int size)
: TurboAssembler(options, buffer, size) {}
MacroAssembler(Isolate* isolate, void* buffer, int size, MacroAssembler(Isolate* isolate, void* buffer, int size,
CodeObjectRequired create_code_object) CodeObjectRequired create_code_object)
: MacroAssembler(isolate, AssemblerOptions::Default(isolate), buffer, : MacroAssembler(isolate, AssemblerOptions::Default(isolate), buffer,
size, create_code_object) {} size, create_code_object) {}
MacroAssembler(Isolate* isolate, const AssemblerOptions& options, MacroAssembler(Isolate* isolate, const AssemblerOptions& options,
void* buffer, int size, CodeObjectRequired create_code_object); void* buffer, int size, CodeObjectRequired create_code_object);
@ -1821,17 +1828,13 @@ class MacroAssembler : public TurboAssembler {
void LoadElementsKindFromMap(Register result, Register map); void LoadElementsKindFromMap(Register result, Register map);
// Compare the object in a register to a value from the root list. // Compare the object in a register to a value from the root list.
void CompareRoot(const Register& obj, Heap::RootListIndex index); void CompareRoot(const Register& obj, RootIndex index);
// Compare the object in a register to a value and jump if they are equal. // Compare the object in a register to a value and jump if they are equal.
void JumpIfRoot(const Register& obj, void JumpIfRoot(const Register& obj, RootIndex index, Label* if_equal);
Heap::RootListIndex index,
Label* if_equal);
// Compare the object in a register to a value and jump if they are not equal. // Compare the object in a register to a value and jump if they are not equal.
void JumpIfNotRoot(const Register& obj, void JumpIfNotRoot(const Register& obj, RootIndex index, Label* if_not_equal);
Heap::RootListIndex index,
Label* if_not_equal);
// Compare the contents of a register with an operand, and branch to true, // Compare the contents of a register with an operand, and branch to true,
// false or fall through, depending on condition. // false or fall through, depending on condition.
@ -1944,7 +1947,7 @@ class MacroAssembler : public TurboAssembler {
// Debugging. // Debugging.
void AssertRegisterIsRoot( void AssertRegisterIsRoot(
Register reg, Heap::RootListIndex index, Register reg, RootIndex index,
AbortReason reason = AbortReason::kRegisterDidNotMatchExpectedRoot); AbortReason reason = AbortReason::kRegisterDidNotMatchExpectedRoot);
// Abort if the specified register contains the invalid color bit pattern. // Abort if the specified register contains the invalid color bit pattern.
@ -2025,7 +2028,7 @@ class MacroAssembler : public TurboAssembler {
// instructions. This scope prevents the MacroAssembler from being called and // instructions. This scope prevents the MacroAssembler from being called and
// literal pools from being emitted. It also asserts the number of instructions // literal pools from being emitted. It also asserts the number of instructions
// emitted is what you specified when creating the scope. // emitted is what you specified when creating the scope.
class InstructionAccurateScope BASE_EMBEDDED { class InstructionAccurateScope {
public: public:
explicit InstructionAccurateScope(TurboAssembler* tasm, size_t count = 0) explicit InstructionAccurateScope(TurboAssembler* tasm, size_t count = 0)
: tasm_(tasm) : tasm_(tasm)

View File

@ -391,7 +391,7 @@ MaybeHandle<Object> AsmJs::InstantiateAsmWasm(Isolate* isolate,
return MaybeHandle<Object>(); return MaybeHandle<Object>();
} }
memory->set_is_growable(false); memory->set_is_growable(false);
size_t size = NumberToSize(memory->byte_length()); size_t size = memory->byte_length();
// Check the asm.js heap size against the valid limits. // Check the asm.js heap size against the valid limits.
if (!IsValidAsmjsMemorySize(size)) { if (!IsValidAsmjsMemorySize(size)) {
ReportInstantiationFailure(script, position, "Invalid heap size"); ReportInstantiationFailure(script, position, "Invalid heap size");

View File

@ -44,10 +44,24 @@
#include "src/simulator.h" // For flushing instruction cache. #include "src/simulator.h" // For flushing instruction cache.
#include "src/snapshot/serializer-common.h" #include "src/snapshot/serializer-common.h"
#include "src/snapshot/snapshot.h" #include "src/snapshot/snapshot.h"
#include "src/string-constants.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
AssemblerOptions AssemblerOptions::EnableV8AgnosticCode() const {
AssemblerOptions options = *this;
options.v8_agnostic_code = true;
options.record_reloc_info_for_serialization = false;
options.enable_root_array_delta_access = false;
// Inherit |enable_simulator_code| value.
options.isolate_independent_code = false;
options.inline_offheap_trampolines = false;
// Inherit |code_range_start| value.
// Inherit |use_pc_relative_calls_and_jumps| value.
return options;
}
AssemblerOptions AssemblerOptions::Default( AssemblerOptions AssemblerOptions::Default(
Isolate* isolate, bool explicitly_support_serialization) { Isolate* isolate, bool explicitly_support_serialization) {
AssemblerOptions options; AssemblerOptions options;
@ -61,9 +75,12 @@ AssemblerOptions AssemblerOptions::Default(
options.enable_simulator_code = !serializer; options.enable_simulator_code = !serializer;
#endif #endif
options.inline_offheap_trampolines = !serializer; options.inline_offheap_trampolines = !serializer;
#if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_ARM64 #if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_ARM64
options.code_range_start = const base::AddressRegion& code_range =
isolate->heap()->memory_allocator()->code_range()->start(); isolate->heap()->memory_allocator()->code_range();
DCHECK_IMPLIES(code_range.begin() != kNullAddress, !code_range.is_empty());
options.code_range_start = code_range.begin();
#endif #endif
return options; return options;
} }
@ -355,6 +372,13 @@ HeapObjectRequest::HeapObjectRequest(CodeStub* code_stub, int offset)
DCHECK_NOT_NULL(value_.code_stub); DCHECK_NOT_NULL(value_.code_stub);
} }
HeapObjectRequest::HeapObjectRequest(const StringConstantBase* string,
int offset)
: kind_(kStringConstant), offset_(offset) {
value_.string = string;
DCHECK_NOT_NULL(value_.string);
}
// Platform specific but identical code for all the platforms. // Platform specific but identical code for all the platforms.
void Assembler::RecordDeoptReason(DeoptimizeReason reason, void Assembler::RecordDeoptReason(DeoptimizeReason reason,
@ -381,11 +405,13 @@ void Assembler::DataAlign(int m) {
} }
void AssemblerBase::RequestHeapObject(HeapObjectRequest request) { void AssemblerBase::RequestHeapObject(HeapObjectRequest request) {
DCHECK(!options().v8_agnostic_code);
request.set_offset(pc_offset()); request.set_offset(pc_offset());
heap_object_requests_.push_front(request); heap_object_requests_.push_front(request);
} }
int AssemblerBase::AddCodeTarget(Handle<Code> target) { int AssemblerBase::AddCodeTarget(Handle<Code> target) {
DCHECK(!options().v8_agnostic_code);
int current = static_cast<int>(code_targets_.size()); int current = static_cast<int>(code_targets_.size());
if (current > 0 && !target.is_null() && if (current > 0 && !target.is_null() &&
code_targets_.back().address() == target.address()) { code_targets_.back().address() == target.address()) {
@ -398,6 +424,7 @@ int AssemblerBase::AddCodeTarget(Handle<Code> target) {
} }
Handle<Code> AssemblerBase::GetCodeTarget(intptr_t code_target_index) const { Handle<Code> AssemblerBase::GetCodeTarget(intptr_t code_target_index) const {
DCHECK(!options().v8_agnostic_code);
DCHECK_LE(0, code_target_index); DCHECK_LE(0, code_target_index);
DCHECK_LT(code_target_index, code_targets_.size()); DCHECK_LT(code_target_index, code_targets_.size());
return code_targets_[code_target_index]; return code_targets_[code_target_index];
@ -405,6 +432,7 @@ Handle<Code> AssemblerBase::GetCodeTarget(intptr_t code_target_index) const {
void AssemblerBase::UpdateCodeTarget(intptr_t code_target_index, void AssemblerBase::UpdateCodeTarget(intptr_t code_target_index,
Handle<Code> code) { Handle<Code> code) {
DCHECK(!options().v8_agnostic_code);
DCHECK_LE(0, code_target_index); DCHECK_LE(0, code_target_index);
DCHECK_LT(code_target_index, code_targets_.size()); DCHECK_LT(code_target_index, code_targets_.size());
code_targets_[code_target_index] = code; code_targets_[code_target_index] = code;

View File

@ -67,6 +67,7 @@ class Isolate;
class SCTableReference; class SCTableReference;
class SourcePosition; class SourcePosition;
class StatsCounter; class StatsCounter;
class StringConstantBase;
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// Optimization for far-jmp like instructions that can be replaced by shorter. // Optimization for far-jmp like instructions that can be replaced by shorter.
@ -97,8 +98,9 @@ class HeapObjectRequest {
public: public:
explicit HeapObjectRequest(double heap_number, int offset = -1); explicit HeapObjectRequest(double heap_number, int offset = -1);
explicit HeapObjectRequest(CodeStub* code_stub, int offset = -1); explicit HeapObjectRequest(CodeStub* code_stub, int offset = -1);
explicit HeapObjectRequest(const StringConstantBase* string, int offset = -1);
enum Kind { kHeapNumber, kCodeStub }; enum Kind { kHeapNumber, kCodeStub, kStringConstant };
Kind kind() const { return kind_; } Kind kind() const { return kind_; }
double heap_number() const { double heap_number() const {
@ -111,6 +113,11 @@ class HeapObjectRequest {
return value_.code_stub; return value_.code_stub;
} }
const StringConstantBase* string() const {
DCHECK_EQ(kind(), kStringConstant);
return value_.string;
}
// The code buffer offset at the time of the request. // The code buffer offset at the time of the request.
int offset() const { int offset() const {
DCHECK_GE(offset_, 0); DCHECK_GE(offset_, 0);
@ -128,6 +135,7 @@ class HeapObjectRequest {
union { union {
double heap_number; double heap_number;
CodeStub* code_stub; CodeStub* code_stub;
const StringConstantBase* string;
} value_; } value_;
int offset_; int offset_;
@ -139,6 +147,9 @@ class HeapObjectRequest {
enum class CodeObjectRequired { kNo, kYes }; enum class CodeObjectRequired { kNo, kYes };
struct V8_EXPORT_PRIVATE AssemblerOptions { struct V8_EXPORT_PRIVATE AssemblerOptions {
// Prohibits using any V8-specific features of assembler like (isolates,
// heap objects, external references, etc.).
bool v8_agnostic_code = false;
// Recording reloc info for external references and off-heap targets is // Recording reloc info for external references and off-heap targets is
// needed whenever code is serialized, e.g. into the snapshot or as a WASM // needed whenever code is serialized, e.g. into the snapshot or as a WASM
// module. This flag allows this reloc info to be disabled for code that // module. This flag allows this reloc info to be disabled for code that
@ -168,6 +179,9 @@ struct V8_EXPORT_PRIVATE AssemblerOptions {
// the instruction immediates. // the instruction immediates.
bool use_pc_relative_calls_and_jumps = false; bool use_pc_relative_calls_and_jumps = false;
// Constructs V8-agnostic set of options from current state.
AssemblerOptions EnableV8AgnosticCode() const;
static AssemblerOptions Default( static AssemblerOptions Default(
Isolate* isolate, bool explicitly_support_serialization = false); Isolate* isolate, bool explicitly_support_serialization = false);
}; };
@ -268,13 +282,23 @@ class V8_EXPORT_PRIVATE AssemblerBase : public Malloced {
} }
} }
// {RequestHeapObject} records the need for a future heap number allocation or // {RequestHeapObject} records the need for a future heap number allocation,
// code stub generation. After code assembly, each platform's // code stub generation or string allocation. After code assembly, each
// {Assembler::AllocateAndInstallRequestedHeapObjects} will allocate these // platform's {Assembler::AllocateAndInstallRequestedHeapObjects} will
// objects and place them where they are expected (determined by the pc offset // allocate these objects and place them where they are expected (determined
// associated with each request). // by the pc offset associated with each request).
void RequestHeapObject(HeapObjectRequest request); void RequestHeapObject(HeapObjectRequest request);
bool ShouldRecordRelocInfo(RelocInfo::Mode rmode) const {
DCHECK(!RelocInfo::IsNone(rmode));
if (options().disable_reloc_info_for_patching) return false;
if (RelocInfo::IsOnlyForSerializer(rmode) &&
!options().record_reloc_info_for_serialization && !emit_debug_code()) {
return false;
}
return true;
}
private: private:
// Before we copy code into the code space, we sometimes cannot encode // Before we copy code into the code space, we sometimes cannot encode
// call/jump code targets as we normally would, as the difference between the // call/jump code targets as we normally would, as the difference between the
@ -301,7 +325,7 @@ class V8_EXPORT_PRIVATE AssemblerBase : public Malloced {
}; };
// Avoids emitting debug code during the lifetime of this scope object. // Avoids emitting debug code during the lifetime of this scope object.
class DontEmitDebugCodeScope BASE_EMBEDDED { class DontEmitDebugCodeScope {
public: public:
explicit DontEmitDebugCodeScope(AssemblerBase* assembler) explicit DontEmitDebugCodeScope(AssemblerBase* assembler)
: assembler_(assembler), old_value_(assembler->emit_debug_code()) { : assembler_(assembler), old_value_(assembler->emit_debug_code()) {
@ -332,7 +356,7 @@ class PredictableCodeSizeScope {
// Enable a specified feature within a scope. // Enable a specified feature within a scope.
class CpuFeatureScope BASE_EMBEDDED { class CpuFeatureScope {
public: public:
enum CheckPolicy { enum CheckPolicy {
kCheckSupported, kCheckSupported,
@ -350,12 +374,12 @@ class CpuFeatureScope BASE_EMBEDDED {
#else #else
CpuFeatureScope(AssemblerBase* assembler, CpuFeature f, CpuFeatureScope(AssemblerBase* assembler, CpuFeature f,
CheckPolicy check = kCheckSupported) {} CheckPolicy check = kCheckSupported) {}
// Define a destructor to avoid unused variable warnings. ~CpuFeatureScope() { // NOLINT (modernize-use-equals-default)
~CpuFeatureScope() {} // Define a destructor to avoid unused variable warnings.
}
#endif #endif
}; };
// CpuFeatures keeps track of which features are supported by the target CPU. // CpuFeatures keeps track of which features are supported by the target CPU.
// Supported features must be enabled by a CpuFeatureScope before use. // Supported features must be enabled by a CpuFeatureScope before use.
// Example: // Example:
@ -420,7 +444,7 @@ class CpuFeatures : public AllStatic {
// Utility functions // Utility functions
// Computes pow(x, y) with the special cases in the spec for Math.pow. // Computes pow(x, y) with the special cases in the spec for Math.pow.
double power_helper(Isolate* isolate, double x, double y); double power_helper(double x, double y);
double power_double_int(double x, int y); double power_double_int(double x, int y);
double power_double_double(double x, double y); double power_double_double(double x, double y);
@ -430,7 +454,7 @@ double power_double_double(double x, double y);
class ConstantPoolEntry { class ConstantPoolEntry {
public: public:
ConstantPoolEntry() {} ConstantPoolEntry() = default;
ConstantPoolEntry(int position, intptr_t value, bool sharing_ok, ConstantPoolEntry(int position, intptr_t value, bool sharing_ok,
RelocInfo::Mode rmode = RelocInfo::NONE) RelocInfo::Mode rmode = RelocInfo::NONE)
: position_(position), : position_(position),
@ -447,7 +471,7 @@ class ConstantPoolEntry {
int position() const { return position_; } int position() const { return position_; }
bool sharing_ok() const { return merged_index_ != SHARING_PROHIBITED; } bool sharing_ok() const { return merged_index_ != SHARING_PROHIBITED; }
bool is_merged() const { return merged_index_ >= 0; } bool is_merged() const { return merged_index_ >= 0; }
int merged_index(void) const { int merged_index() const {
DCHECK(is_merged()); DCHECK(is_merged());
return merged_index_; return merged_index_;
} }
@ -456,7 +480,7 @@ class ConstantPoolEntry {
merged_index_ = index; merged_index_ = index;
DCHECK(is_merged()); DCHECK(is_merged());
} }
int offset(void) const { int offset() const {
DCHECK_GE(merged_index_, 0); DCHECK_GE(merged_index_, 0);
return merged_index_; return merged_index_;
} }
@ -493,7 +517,7 @@ class ConstantPoolEntry {
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// Embedded constant pool support // Embedded constant pool support
class ConstantPoolBuilder BASE_EMBEDDED { class ConstantPoolBuilder {
public: public:
ConstantPoolBuilder(int ptr_reach_bits, int double_reach_bits); ConstantPoolBuilder(int ptr_reach_bits, int double_reach_bits);

View File

@ -77,7 +77,9 @@ class PerThreadAssertScopeDebugOnly : public
#else #else
class PerThreadAssertScopeDebugOnly { class PerThreadAssertScopeDebugOnly {
public: public:
PerThreadAssertScopeDebugOnly() { } PerThreadAssertScopeDebugOnly() { // NOLINT (modernize-use-equals-default)
// Define a constructor to avoid unused variable warnings.
}
void Release() {} void Release() {}
#endif #endif
}; };

View File

@ -14,7 +14,7 @@ AstFunctionLiteralIdReindexer::AstFunctionLiteralIdReindexer(size_t stack_limit,
int delta) int delta)
: AstTraversalVisitor(stack_limit), delta_(delta) {} : AstTraversalVisitor(stack_limit), delta_(delta) {}
AstFunctionLiteralIdReindexer::~AstFunctionLiteralIdReindexer() {} AstFunctionLiteralIdReindexer::~AstFunctionLiteralIdReindexer() = default;
void AstFunctionLiteralIdReindexer::Reindex(Expression* pattern) { void AstFunctionLiteralIdReindexer::Reindex(Expression* pattern) {
Visit(pattern); Visit(pattern);

View File

@ -21,8 +21,9 @@ struct SourceRange {
static SourceRange OpenEnded(int32_t start) { static SourceRange OpenEnded(int32_t start) {
return SourceRange(start, kNoSourcePosition); return SourceRange(start, kNoSourcePosition);
} }
static SourceRange ContinuationOf(const SourceRange& that) { static SourceRange ContinuationOf(const SourceRange& that,
return that.IsEmpty() ? Empty() : OpenEnded(that.end); int end = kNoSourcePosition) {
return that.IsEmpty() ? Empty() : SourceRange(that.end, end);
} }
int32_t start, end; int32_t start, end;
}; };
@ -56,7 +57,7 @@ enum class SourceRangeKind {
class AstNodeSourceRanges : public ZoneObject { class AstNodeSourceRanges : public ZoneObject {
public: public:
virtual ~AstNodeSourceRanges() {} virtual ~AstNodeSourceRanges() = default;
virtual SourceRange GetRange(SourceRangeKind kind) = 0; virtual SourceRange GetRange(SourceRangeKind kind) = 0;
}; };
@ -65,7 +66,7 @@ class BinaryOperationSourceRanges final : public AstNodeSourceRanges {
explicit BinaryOperationSourceRanges(const SourceRange& right_range) explicit BinaryOperationSourceRanges(const SourceRange& right_range)
: right_range_(right_range) {} : right_range_(right_range) {}
SourceRange GetRange(SourceRangeKind kind) { SourceRange GetRange(SourceRangeKind kind) override {
DCHECK_EQ(kind, SourceRangeKind::kRight); DCHECK_EQ(kind, SourceRangeKind::kRight);
return right_range_; return right_range_;
} }
@ -79,7 +80,7 @@ class ContinuationSourceRanges : public AstNodeSourceRanges {
explicit ContinuationSourceRanges(int32_t continuation_position) explicit ContinuationSourceRanges(int32_t continuation_position)
: continuation_position_(continuation_position) {} : continuation_position_(continuation_position) {}
SourceRange GetRange(SourceRangeKind kind) { SourceRange GetRange(SourceRangeKind kind) override {
DCHECK_EQ(kind, SourceRangeKind::kContinuation); DCHECK_EQ(kind, SourceRangeKind::kContinuation);
return SourceRange::OpenEnded(continuation_position_); return SourceRange::OpenEnded(continuation_position_);
} }
@ -99,7 +100,7 @@ class CaseClauseSourceRanges final : public AstNodeSourceRanges {
explicit CaseClauseSourceRanges(const SourceRange& body_range) explicit CaseClauseSourceRanges(const SourceRange& body_range)
: body_range_(body_range) {} : body_range_(body_range) {}
SourceRange GetRange(SourceRangeKind kind) { SourceRange GetRange(SourceRangeKind kind) override {
DCHECK_EQ(kind, SourceRangeKind::kBody); DCHECK_EQ(kind, SourceRangeKind::kBody);
return body_range_; return body_range_;
} }
@ -114,7 +115,7 @@ class ConditionalSourceRanges final : public AstNodeSourceRanges {
const SourceRange& else_range) const SourceRange& else_range)
: then_range_(then_range), else_range_(else_range) {} : then_range_(then_range), else_range_(else_range) {}
SourceRange GetRange(SourceRangeKind kind) { SourceRange GetRange(SourceRangeKind kind) override {
switch (kind) { switch (kind) {
case SourceRangeKind::kThen: case SourceRangeKind::kThen:
return then_range_; return then_range_;
@ -136,7 +137,7 @@ class IfStatementSourceRanges final : public AstNodeSourceRanges {
const SourceRange& else_range) const SourceRange& else_range)
: then_range_(then_range), else_range_(else_range) {} : then_range_(then_range), else_range_(else_range) {}
SourceRange GetRange(SourceRangeKind kind) { SourceRange GetRange(SourceRangeKind kind) override {
switch (kind) { switch (kind) {
case SourceRangeKind::kElse: case SourceRangeKind::kElse:
return else_range_; return else_range_;
@ -162,7 +163,7 @@ class IterationStatementSourceRanges final : public AstNodeSourceRanges {
explicit IterationStatementSourceRanges(const SourceRange& body_range) explicit IterationStatementSourceRanges(const SourceRange& body_range)
: body_range_(body_range) {} : body_range_(body_range) {}
SourceRange GetRange(SourceRangeKind kind) { SourceRange GetRange(SourceRangeKind kind) override {
switch (kind) { switch (kind) {
case SourceRangeKind::kBody: case SourceRangeKind::kBody:
return body_range_; return body_range_;
@ -198,7 +199,7 @@ class NaryOperationSourceRanges final : public AstNodeSourceRanges {
void AddRange(const SourceRange& range) { ranges_.push_back(range); } void AddRange(const SourceRange& range) { ranges_.push_back(range); }
size_t RangeCount() const { return ranges_.size(); } size_t RangeCount() const { return ranges_.size(); }
SourceRange GetRange(SourceRangeKind kind) { UNREACHABLE(); } SourceRange GetRange(SourceRangeKind kind) override { UNREACHABLE(); }
private: private:
ZoneVector<SourceRange> ranges_; ZoneVector<SourceRange> ranges_;
@ -227,7 +228,7 @@ class TryCatchStatementSourceRanges final : public AstNodeSourceRanges {
explicit TryCatchStatementSourceRanges(const SourceRange& catch_range) explicit TryCatchStatementSourceRanges(const SourceRange& catch_range)
: catch_range_(catch_range) {} : catch_range_(catch_range) {}
SourceRange GetRange(SourceRangeKind kind) { SourceRange GetRange(SourceRangeKind kind) override {
switch (kind) { switch (kind) {
case SourceRangeKind::kCatch: case SourceRangeKind::kCatch:
return catch_range_; return catch_range_;
@ -247,7 +248,7 @@ class TryFinallyStatementSourceRanges final : public AstNodeSourceRanges {
explicit TryFinallyStatementSourceRanges(const SourceRange& finally_range) explicit TryFinallyStatementSourceRanges(const SourceRange& finally_range)
: finally_range_(finally_range) {} : finally_range_(finally_range) {}
SourceRange GetRange(SourceRangeKind kind) { SourceRange GetRange(SourceRangeKind kind) override {
switch (kind) { switch (kind) {
case SourceRangeKind::kFinally: case SourceRangeKind::kFinally:
return finally_range_; return finally_range_;

View File

@ -242,6 +242,17 @@ const AstRawString* AstValueFactory::GetString(Handle<String> literal) {
return result; return result;
} }
const AstRawString* AstValueFactory::CloneFromOtherFactory(
const AstRawString* raw_string) {
const AstRawString* result = GetString(
raw_string->hash_field(), raw_string->is_one_byte(),
Vector<const byte>(raw_string->raw_data(), raw_string->byte_length()));
// Check we weren't trying to clone a string that was already in this
// ast-value-factory.
DCHECK_NE(result, raw_string);
return result;
}
AstConsString* AstValueFactory::NewConsString() { AstConsString* AstValueFactory::NewConsString() {
AstConsString* new_string = new (zone_) AstConsString; AstConsString* new_string = new (zone_) AstConsString;
DCHECK_NOT_NULL(new_string); DCHECK_NOT_NULL(new_string);

View File

@ -194,48 +194,49 @@ class AstBigInt {
}; };
// For generating constants. // For generating constants.
#define AST_STRING_CONSTANTS(F) \ #define AST_STRING_CONSTANTS(F) \
F(anonymous_function, "(anonymous function)") \ F(anonymous_function, "(anonymous function)") \
F(arguments, "arguments") \ F(arguments, "arguments") \
F(async, "async") \ F(async, "async") \
F(await, "await") \ F(await, "await") \
F(bigint, "bigint") \ F(bigint, "bigint") \
F(boolean, "boolean") \ F(boolean, "boolean") \
F(constructor, "constructor") \ F(constructor, "constructor") \
F(default, "default") \ F(default, "default") \
F(done, "done") \ F(done, "done") \
F(dot, ".") \ F(dot, ".") \
F(dot_for, ".for") \ F(dot_for, ".for") \
F(dot_generator_object, ".generator_object") \ F(dot_generator_object, ".generator_object") \
F(dot_iterator, ".iterator") \ F(dot_iterator, ".iterator") \
F(dot_result, ".result") \ F(dot_promise, ".promise") \
F(dot_switch_tag, ".switch_tag") \ F(dot_result, ".result") \
F(dot_catch, ".catch") \ F(dot_switch_tag, ".switch_tag") \
F(empty, "") \ F(dot_catch, ".catch") \
F(eval, "eval") \ F(empty, "") \
F(function, "function") \ F(eval, "eval") \
F(get_space, "get ") \ F(function, "function") \
F(length, "length") \ F(get_space, "get ") \
F(let, "let") \ F(length, "length") \
F(name, "name") \ F(let, "let") \
F(native, "native") \ F(name, "name") \
F(new_target, ".new.target") \ F(native, "native") \
F(next, "next") \ F(new_target, ".new.target") \
F(number, "number") \ F(next, "next") \
F(object, "object") \ F(number, "number") \
F(proto, "__proto__") \ F(object, "object") \
F(prototype, "prototype") \ F(proto, "__proto__") \
F(return, "return") \ F(prototype, "prototype") \
F(set_space, "set ") \ F(return, "return") \
F(star_default_star, "*default*") \ F(set_space, "set ") \
F(string, "string") \ F(star_default_star, "*default*") \
F(symbol, "symbol") \ F(string, "string") \
F(this, "this") \ F(symbol, "symbol") \
F(this_function, ".this_function") \ F(this, "this") \
F(throw, "throw") \ F(this_function, ".this_function") \
F(undefined, "undefined") \ F(throw, "throw") \
F(use_asm, "use asm") \ F(undefined, "undefined") \
F(use_strict, "use strict") \ F(use_asm, "use asm") \
F(use_strict, "use strict") \
F(value, "value") F(value, "value")
class AstStringConstants final { class AstStringConstants final {
@ -297,10 +298,15 @@ class AstValueFactory {
return GetTwoByteStringInternal(literal); return GetTwoByteStringInternal(literal);
} }
const AstRawString* GetString(Handle<String> literal); const AstRawString* GetString(Handle<String> literal);
// Clones an AstRawString from another ast value factory, adding it to this
// factory and returning the clone.
const AstRawString* CloneFromOtherFactory(const AstRawString* raw_string);
V8_EXPORT_PRIVATE AstConsString* NewConsString(); V8_EXPORT_PRIVATE AstConsString* NewConsString();
AstConsString* NewConsString(const AstRawString* str); V8_EXPORT_PRIVATE AstConsString* NewConsString(const AstRawString* str);
AstConsString* NewConsString(const AstRawString* str1, V8_EXPORT_PRIVATE AstConsString* NewConsString(const AstRawString* str1,
const AstRawString* str2); const AstRawString* str2);
V8_EXPORT_PRIVATE void Internalize(Isolate* isolate); V8_EXPORT_PRIVATE void Internalize(Isolate* isolate);

View File

@ -551,12 +551,6 @@ bool ObjectLiteral::IsFastCloningSupported() const {
ConstructorBuiltins::kMaximumClonedShallowObjectProperties; ConstructorBuiltins::kMaximumClonedShallowObjectProperties;
} }
bool ArrayLiteral::is_empty() const {
DCHECK(is_initialized());
return values()->is_empty() && (boilerplate_description().is_null() ||
boilerplate_description()->is_empty());
}
int ArrayLiteral::InitDepthAndFlags() { int ArrayLiteral::InitDepthAndFlags() {
if (is_initialized()) return depth(); if (is_initialized()) return depth();

38
deps/v8/src/ast/ast.h vendored
View File

@ -383,7 +383,7 @@ class DoExpression final : public Expression {
class Declaration : public AstNode { class Declaration : public AstNode {
public: public:
typedef ThreadedList<Declaration> List; typedef base::ThreadedList<Declaration> List;
VariableProxy* proxy() const { return proxy_; } VariableProxy* proxy() const { return proxy_; }
@ -397,6 +397,7 @@ class Declaration : public AstNode {
Declaration** next() { return &next_; } Declaration** next() { return &next_; }
Declaration* next_; Declaration* next_;
friend List; friend List;
friend base::ThreadedListTraits<Declaration>;
}; };
class VariableDeclaration : public Declaration { class VariableDeclaration : public Declaration {
@ -1477,8 +1478,6 @@ class ArrayLiteral final : public AggregateLiteral {
int first_spread_index() const { return first_spread_index_; } int first_spread_index() const { return first_spread_index_; }
bool is_empty() const;
// Populate the depth field and flags, returns the depth. // Populate the depth field and flags, returns the depth.
int InitDepthAndFlags(); int InitDepthAndFlags();
@ -1578,8 +1577,15 @@ class VariableProxy final : public Expression {
// Bind this proxy to the variable var. // Bind this proxy to the variable var.
void BindTo(Variable* var); void BindTo(Variable* var);
void set_next_unresolved(VariableProxy* next) { next_unresolved_ = next; } V8_INLINE VariableProxy* next_unresolved() { return next_unresolved_; }
VariableProxy* next_unresolved() { return next_unresolved_; }
// Provides an access type for the ThreadedList used by the PreParsers
// expressions, lists, and formal parameters.
struct PreParserNext {
static VariableProxy** next(VariableProxy* t) {
return t->pre_parser_expr_next();
}
};
private: private:
friend class AstNodeFactory; friend class AstNodeFactory;
@ -1590,7 +1596,8 @@ class VariableProxy final : public Expression {
int start_position) int start_position)
: Expression(start_position, kVariableProxy), : Expression(start_position, kVariableProxy),
raw_name_(name), raw_name_(name),
next_unresolved_(nullptr) { next_unresolved_(nullptr),
pre_parser_expr_next_(nullptr) {
bit_field_ |= IsThisField::encode(variable_kind == THIS_VARIABLE) | bit_field_ |= IsThisField::encode(variable_kind == THIS_VARIABLE) |
IsAssignedField::encode(false) | IsAssignedField::encode(false) |
IsResolvedField::encode(false) | IsResolvedField::encode(false) |
@ -1613,9 +1620,15 @@ class VariableProxy final : public Expression {
const AstRawString* raw_name_; // if !is_resolved_ const AstRawString* raw_name_; // if !is_resolved_
Variable* var_; // if is_resolved_ Variable* var_; // if is_resolved_
}; };
VariableProxy* next_unresolved_;
};
V8_INLINE VariableProxy** next() { return &next_unresolved_; }
VariableProxy* next_unresolved_;
VariableProxy** pre_parser_expr_next() { return &pre_parser_expr_next_; }
VariableProxy* pre_parser_expr_next_;
friend base::ThreadedListTraits<VariableProxy>;
};
// Left-hand side can only be a property, a global or a (parameter or local) // Left-hand side can only be a property, a global or a (parameter or local)
// slot. // slot.
@ -2248,7 +2261,7 @@ class FunctionLiteral final : public Expression {
void mark_as_iife() { bit_field_ = IIFEBit::update(bit_field_, true); } void mark_as_iife() { bit_field_ = IIFEBit::update(bit_field_, true); }
bool is_iife() const { return IIFEBit::decode(bit_field_); } bool is_iife() const { return IIFEBit::decode(bit_field_); }
bool is_top_level() const { bool is_toplevel() const {
return function_literal_id() == FunctionLiteral::kIdTypeTopLevel; return function_literal_id() == FunctionLiteral::kIdTypeTopLevel;
} }
bool is_wrapped() const { return function_type() == kWrapped; } bool is_wrapped() const { return function_type() == kWrapped; }
@ -2308,7 +2321,7 @@ class FunctionLiteral final : public Expression {
// - (function() { ... })(); // - (function() { ... })();
// - var x = function() { ... }(); // - var x = function() { ... }();
bool ShouldEagerCompile() const; bool ShouldEagerCompile() const;
void SetShouldEagerCompile(); V8_EXPORT_PRIVATE void SetShouldEagerCompile();
FunctionType function_type() const { FunctionType function_type() const {
return FunctionTypeBits::decode(bit_field_); return FunctionTypeBits::decode(bit_field_);
@ -2736,7 +2749,7 @@ class TemplateLiteral final : public Expression {
// class SpecificVisitor : public AstVisitor<SpecificVisitor> { ... } // class SpecificVisitor : public AstVisitor<SpecificVisitor> { ... }
template <class Subclass> template <class Subclass>
class AstVisitor BASE_EMBEDDED { class AstVisitor {
public: public:
void Visit(AstNode* node) { impl()->Visit(node); } void Visit(AstNode* node) { impl()->Visit(node); }
@ -2823,7 +2836,7 @@ class AstVisitor BASE_EMBEDDED {
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
// AstNode factory // AstNode factory
class AstNodeFactory final BASE_EMBEDDED { class AstNodeFactory final {
public: public:
AstNodeFactory(AstValueFactory* ast_value_factory, Zone* zone) AstNodeFactory(AstValueFactory* ast_value_factory, Zone* zone)
: zone_(zone), ast_value_factory_(ast_value_factory) {} : zone_(zone), ast_value_factory_(ast_value_factory) {}
@ -3330,7 +3343,6 @@ class AstNodeFactory final BASE_EMBEDDED {
} }
Zone* zone() const { return zone_; } Zone* zone() const { return zone_; }
void set_zone(Zone* zone) { zone_ = zone; }
private: private:
// This zone may be deallocated upon returning from parsing a function body // This zone may be deallocated upon returning from parsing a function body

View File

@ -31,7 +31,7 @@ CallPrinter::CallPrinter(Isolate* isolate, bool is_user_js)
InitializeAstVisitor(isolate); InitializeAstVisitor(isolate);
} }
CallPrinter::~CallPrinter() {} CallPrinter::~CallPrinter() = default;
CallPrinter::ErrorHint CallPrinter::GetErrorHint() const { CallPrinter::ErrorHint CallPrinter::GetErrorHint() const {
if (is_call_error_) { if (is_call_error_) {
@ -666,7 +666,7 @@ void AstPrinter::PrintLiteral(const AstConsString* value, bool quote) {
//----------------------------------------------------------------------------- //-----------------------------------------------------------------------------
class IndentedScope BASE_EMBEDDED { class IndentedScope {
public: public:
IndentedScope(AstPrinter* printer, const char* txt) IndentedScope(AstPrinter* printer, const char* txt)
: ast_printer_(printer) { : ast_printer_(printer) {

66
deps/v8/src/ast/scopes-inl.h vendored Normal file
View File

@ -0,0 +1,66 @@
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_AST_SCOPES_INL_H_
#define V8_AST_SCOPES_INL_H_
#include "src/ast/scopes.h"
namespace v8 {
namespace internal {
template <typename T>
void Scope::ResolveScopesThenForEachVariable(DeclarationScope* max_outer_scope,
T variable_proxy_stackvisitor,
ParseInfo* info) {
// Module variables must be allocated before variable resolution
// to ensure that UpdateNeedsHoleCheck() can detect import variables.
if (info != nullptr && is_module_scope()) {
AsModuleScope()->AllocateModuleVariables();
}
// Lazy parsed declaration scopes are already partially analyzed. If there are
// unresolved references remaining, they just need to be resolved in outer
// scopes.
Scope* lookup =
is_declaration_scope() && AsDeclarationScope()->was_lazily_parsed()
? outer_scope()
: this;
for (VariableProxy *proxy = unresolved_list_.first(), *next = nullptr;
proxy != nullptr; proxy = next) {
next = proxy->next_unresolved();
DCHECK(!proxy->is_resolved());
Variable* var =
lookup->LookupRecursive(info, proxy, max_outer_scope->outer_scope());
if (var == nullptr) {
variable_proxy_stackvisitor(proxy);
} else if (var != Scope::kDummyPreParserVariable &&
var != Scope::kDummyPreParserLexicalVariable) {
if (info != nullptr) {
// In this case we need to leave scopes in a way that they can be
// allocated. If we resolved variables from lazy parsed scopes, we need
// to context allocate the var.
ResolveTo(info, proxy, var);
if (!var->is_dynamic() && lookup != this) var->ForceContextAllocation();
} else {
var->set_is_used();
if (proxy->is_assigned()) var->set_maybe_assigned();
}
}
}
// Clear unresolved_list_ as it's in an inconsistent state.
unresolved_list_.Clear();
for (Scope* scope = inner_scope_; scope != nullptr; scope = scope->sibling_) {
scope->ResolveScopesThenForEachVariable(max_outer_scope,
variable_proxy_stackvisitor, info);
}
}
} // namespace internal
} // namespace v8
#endif // V8_AST_SCOPES_INL_H_

View File

@ -8,6 +8,7 @@
#include "src/accessors.h" #include "src/accessors.h"
#include "src/ast/ast.h" #include "src/ast/ast.h"
#include "src/ast/scopes-inl.h"
#include "src/base/optional.h" #include "src/base/optional.h"
#include "src/bootstrapper.h" #include "src/bootstrapper.h"
#include "src/counters.h" #include "src/counters.h"
@ -23,15 +24,11 @@ namespace v8 {
namespace internal { namespace internal {
namespace { namespace {
void* kDummyPreParserVariable = reinterpret_cast<void*>(0x1);
void* kDummyPreParserLexicalVariable = reinterpret_cast<void*>(0x2);
bool IsLexical(Variable* variable) { bool IsLexical(Variable* variable) {
if (variable == kDummyPreParserLexicalVariable) return true; if (variable == Scope::kDummyPreParserLexicalVariable) return true;
if (variable == kDummyPreParserVariable) return false; if (variable == Scope::kDummyPreParserVariable) return false;
return IsLexicalVariableMode(variable->mode()); return IsLexicalVariableMode(variable->mode());
} }
} // namespace } // namespace
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
@ -76,8 +73,9 @@ Variable* VariableMap::DeclareName(Zone* zone, const AstRawString* name,
if (p->value == nullptr) { if (p->value == nullptr) {
// The variable has not been declared yet -> insert it. // The variable has not been declared yet -> insert it.
DCHECK_EQ(name, p->key); DCHECK_EQ(name, p->key);
p->value = mode == VariableMode::kVar ? kDummyPreParserVariable p->value = mode == VariableMode::kVar
: kDummyPreParserLexicalVariable; ? Scope::kDummyPreParserVariable
: Scope::kDummyPreParserLexicalVariable;
} }
return reinterpret_cast<Variable*>(p->value); return reinterpret_cast<Variable*>(p->value);
} }
@ -154,7 +152,7 @@ Scope::Scope(Zone* zone, Scope* outer_scope, ScopeType scope_type)
Scope::Snapshot::Snapshot(Scope* scope) Scope::Snapshot::Snapshot(Scope* scope)
: outer_scope_(scope), : outer_scope_(scope),
top_inner_scope_(scope->inner_scope_), top_inner_scope_(scope->inner_scope_),
top_unresolved_(scope->unresolved_), top_unresolved_(scope->unresolved_list_.first()),
top_local_(scope->GetClosureScope()->locals_.end()), top_local_(scope->GetClosureScope()->locals_.end()),
top_decl_(scope->GetClosureScope()->decls_.end()), top_decl_(scope->GetClosureScope()->decls_.end()),
outer_scope_calls_eval_(scope->scope_calls_eval_) { outer_scope_calls_eval_(scope->scope_calls_eval_) {
@ -310,6 +308,8 @@ void DeclarationScope::SetDefaults() {
has_arguments_parameter_ = false; has_arguments_parameter_ = false;
scope_uses_super_property_ = false; scope_uses_super_property_ = false;
has_rest_ = false; has_rest_ = false;
has_promise_ = false;
has_generator_object_ = false;
sloppy_block_function_map_ = nullptr; sloppy_block_function_map_ = nullptr;
receiver_ = nullptr; receiver_ = nullptr;
new_target_ = nullptr; new_target_ = nullptr;
@ -319,7 +319,7 @@ void DeclarationScope::SetDefaults() {
should_eager_compile_ = false; should_eager_compile_ = false;
was_lazily_parsed_ = false; was_lazily_parsed_ = false;
is_skipped_function_ = false; is_skipped_function_ = false;
produced_preparsed_scope_data_ = nullptr; preparsed_scope_data_builder_ = nullptr;
#ifdef DEBUG #ifdef DEBUG
DeclarationScope* outer_declaration_scope = DeclarationScope* outer_declaration_scope =
outer_scope_ ? outer_scope_->GetDeclarationScope() : nullptr; outer_scope_ ? outer_scope_->GetDeclarationScope() : nullptr;
@ -337,7 +337,7 @@ void Scope::SetDefaults() {
#endif #endif
inner_scope_ = nullptr; inner_scope_ = nullptr;
sibling_ = nullptr; sibling_ = nullptr;
unresolved_ = nullptr; unresolved_list_.Clear();
start_position_ = kNoSourcePosition; start_position_ = kNoSourcePosition;
end_position_ = kNoSourcePosition; end_position_ = kNoSourcePosition;
@ -779,6 +779,7 @@ Variable* DeclarationScope::DeclareGeneratorObjectVar(
Variable* result = EnsureRareData()->generator_object = Variable* result = EnsureRareData()->generator_object =
NewTemporary(name, kNotAssigned); NewTemporary(name, kNotAssigned);
result->set_is_used(); result->set_is_used();
has_generator_object_ = true;
return result; return result;
} }
@ -787,6 +788,7 @@ Variable* DeclarationScope::DeclarePromiseVar(const AstRawString* name) {
DCHECK_NULL(promise_var()); DCHECK_NULL(promise_var());
Variable* result = EnsureRareData()->promise = NewTemporary(name); Variable* result = EnsureRareData()->promise = NewTemporary(name);
result->set_is_used(); result->set_is_used();
has_promise_ = true;
return result; return result;
} }
@ -834,16 +836,9 @@ Scope* Scope::FinalizeBlockScope() {
} }
// Move unresolved variables // Move unresolved variables
if (unresolved_ != nullptr) { if (!unresolved_list_.is_empty()) {
if (outer_scope()->unresolved_ != nullptr) { outer_scope()->unresolved_list_.Prepend(std::move(unresolved_list_));
VariableProxy* unresolved = unresolved_; unresolved_list_.Clear();
while (unresolved->next_unresolved() != nullptr) {
unresolved = unresolved->next_unresolved();
}
unresolved->set_next_unresolved(outer_scope()->unresolved_);
}
outer_scope()->unresolved_ = unresolved_;
unresolved_ = nullptr;
} }
if (inner_scope_calls_eval_) outer_scope()->inner_scope_calls_eval_ = true; if (inner_scope_calls_eval_) outer_scope()->inner_scope_calls_eval_ = true;
@ -887,7 +882,7 @@ void Scope::Snapshot::Reparent(DeclarationScope* new_parent) const {
DCHECK_EQ(new_parent->outer_scope_, outer_scope_); DCHECK_EQ(new_parent->outer_scope_, outer_scope_);
DCHECK_EQ(new_parent, new_parent->GetClosureScope()); DCHECK_EQ(new_parent, new_parent->GetClosureScope());
DCHECK_NULL(new_parent->inner_scope_); DCHECK_NULL(new_parent->inner_scope_);
DCHECK_NULL(new_parent->unresolved_); DCHECK(new_parent->unresolved_list_.is_empty());
DCHECK(new_parent->locals_.is_empty()); DCHECK(new_parent->locals_.is_empty());
Scope* inner_scope = new_parent->sibling_; Scope* inner_scope = new_parent->sibling_;
if (inner_scope != top_inner_scope_) { if (inner_scope != top_inner_scope_) {
@ -910,14 +905,21 @@ void Scope::Snapshot::Reparent(DeclarationScope* new_parent) const {
new_parent->sibling_ = top_inner_scope_; new_parent->sibling_ = top_inner_scope_;
} }
if (outer_scope_->unresolved_ != top_unresolved_) { if (outer_scope_->unresolved_list_.first() != top_unresolved_) {
VariableProxy* last = outer_scope_->unresolved_; // If the marked VariableProxy (snapshoted) is not the first, we need to
while (last->next_unresolved() != top_unresolved_) { // find it and move all VariableProxys up to that point into the new_parent,
last = last->next_unresolved(); // then we restore the snapshoted state by reinitializing the outer_scope
// list.
{
auto iter = outer_scope_->unresolved_list_.begin();
while (*iter != top_unresolved_) {
++iter;
}
outer_scope_->unresolved_list_.Rewind(iter);
} }
last->set_next_unresolved(nullptr);
new_parent->unresolved_ = outer_scope_->unresolved_; new_parent->unresolved_list_ = std::move(outer_scope_->unresolved_list_);
outer_scope_->unresolved_ = top_unresolved_; outer_scope_->unresolved_list_.ReinitializeHead(top_unresolved_);
} }
// TODO(verwaest): This currently only moves do-expression declared variables // TODO(verwaest): This currently only moves do-expression declared variables
@ -1261,8 +1263,7 @@ void Scope::DeclareCatchVariableName(const AstRawString* name) {
void Scope::AddUnresolved(VariableProxy* proxy) { void Scope::AddUnresolved(VariableProxy* proxy) {
DCHECK(!already_resolved_); DCHECK(!already_resolved_);
DCHECK(!proxy->is_resolved()); DCHECK(!proxy->is_resolved());
proxy->set_next_unresolved(unresolved_); unresolved_list_.AddFront(proxy);
unresolved_ = proxy;
} }
Variable* DeclarationScope::DeclareDynamicGlobal(const AstRawString* name, Variable* DeclarationScope::DeclareDynamicGlobal(const AstRawString* name,
@ -1274,22 +1275,7 @@ Variable* DeclarationScope::DeclareDynamicGlobal(const AstRawString* name,
} }
bool Scope::RemoveUnresolved(VariableProxy* var) { bool Scope::RemoveUnresolved(VariableProxy* var) {
if (unresolved_ == var) { return unresolved_list_.Remove(var);
unresolved_ = var->next_unresolved();
var->set_next_unresolved(nullptr);
return true;
}
VariableProxy* current = unresolved_;
while (current != nullptr) {
VariableProxy* next = current->next_unresolved();
if (var == next) {
current->set_next_unresolved(next->next_unresolved());
var->set_next_unresolved(nullptr);
return true;
}
current = next;
}
return false;
} }
Variable* Scope::NewTemporary(const AstRawString* name) { Variable* Scope::NewTemporary(const AstRawString* name) {
@ -1483,11 +1469,12 @@ Scope* Scope::GetOuterScopeWithContext() {
Handle<StringSet> DeclarationScope::CollectNonLocals( Handle<StringSet> DeclarationScope::CollectNonLocals(
Isolate* isolate, ParseInfo* info, Handle<StringSet> non_locals) { Isolate* isolate, ParseInfo* info, Handle<StringSet> non_locals) {
VariableProxy* free_variables = FetchFreeVariables(this, info); ResolveScopesThenForEachVariable(this,
for (VariableProxy* proxy = free_variables; proxy != nullptr; [=, &non_locals](VariableProxy* proxy) {
proxy = proxy->next_unresolved()) { non_locals = StringSet::Add(
non_locals = StringSet::Add(isolate, non_locals, proxy->name()); isolate, non_locals, proxy->name());
} },
info);
return non_locals; return non_locals;
} }
@ -1504,10 +1491,15 @@ void DeclarationScope::ResetAfterPreparsing(AstValueFactory* ast_value_factory,
decls_.Clear(); decls_.Clear();
locals_.Clear(); locals_.Clear();
inner_scope_ = nullptr; inner_scope_ = nullptr;
unresolved_ = nullptr; unresolved_list_.Clear();
sloppy_block_function_map_ = nullptr; sloppy_block_function_map_ = nullptr;
rare_data_ = nullptr; rare_data_ = nullptr;
has_rest_ = false; has_rest_ = false;
has_promise_ = false;
has_generator_object_ = false;
DCHECK_NE(zone_, ast_value_factory->zone());
zone_->ReleaseMemory();
if (aborted) { if (aborted) {
// Prepare scope for use in the outer zone. // Prepare scope for use in the outer zone.
@ -1532,7 +1524,7 @@ void DeclarationScope::ResetAfterPreparsing(AstValueFactory* ast_value_factory,
void Scope::SavePreParsedScopeData() { void Scope::SavePreParsedScopeData() {
DCHECK(FLAG_preparser_scope_analysis); DCHECK(FLAG_preparser_scope_analysis);
if (ProducedPreParsedScopeData::ScopeIsSkippableFunctionScope(this)) { if (PreParsedScopeDataBuilder::ScopeIsSkippableFunctionScope(this)) {
AsDeclarationScope()->SavePreParsedScopeDataForDeclarationScope(); AsDeclarationScope()->SavePreParsedScopeDataForDeclarationScope();
} }
@ -1542,30 +1534,33 @@ void Scope::SavePreParsedScopeData() {
} }
void DeclarationScope::SavePreParsedScopeDataForDeclarationScope() { void DeclarationScope::SavePreParsedScopeDataForDeclarationScope() {
if (produced_preparsed_scope_data_ != nullptr) { if (preparsed_scope_data_builder_ != nullptr) {
DCHECK(FLAG_preparser_scope_analysis); DCHECK(FLAG_preparser_scope_analysis);
produced_preparsed_scope_data_->SaveScopeAllocationData(this); preparsed_scope_data_builder_->SaveScopeAllocationData(this);
} }
} }
void DeclarationScope::AnalyzePartially(AstNodeFactory* ast_node_factory) { void DeclarationScope::AnalyzePartially(AstNodeFactory* ast_node_factory) {
DCHECK(!force_eager_compilation_); DCHECK(!force_eager_compilation_);
VariableProxy* unresolved = nullptr; base::ThreadedList<VariableProxy> new_unresolved_list;
if (!IsArrowFunction(function_kind_) &&
if (!outer_scope_->is_script_scope() || (!outer_scope_->is_script_scope() ||
(FLAG_preparser_scope_analysis && (FLAG_preparser_scope_analysis &&
produced_preparsed_scope_data_ != nullptr && preparsed_scope_data_builder_ != nullptr &&
produced_preparsed_scope_data_->ContainsInnerFunctions())) { preparsed_scope_data_builder_->ContainsInnerFunctions()))) {
// Try to resolve unresolved variables for this Scope and migrate those // Try to resolve unresolved variables for this Scope and migrate those
// which cannot be resolved inside. It doesn't make sense to try to resolve // which cannot be resolved inside. It doesn't make sense to try to resolve
// them in the outer Scopes here, because they are incomplete. // them in the outer Scopes here, because they are incomplete.
for (VariableProxy* proxy = FetchFreeVariables(this); proxy != nullptr; ResolveScopesThenForEachVariable(
proxy = proxy->next_unresolved()) { this, [=, &new_unresolved_list](VariableProxy* proxy) {
DCHECK(!proxy->is_resolved()); // Don't copy unresolved references to the script scope, unless it's a
VariableProxy* copy = ast_node_factory->CopyVariableProxy(proxy); // reference to a private field. In that case keep it so we can fail
copy->set_next_unresolved(unresolved); // later.
unresolved = copy; if (!outer_scope_->is_script_scope() || proxy->is_private_field()) {
} VariableProxy* copy = ast_node_factory->CopyVariableProxy(proxy);
new_unresolved_list.AddFront(copy);
}
});
// Migrate function_ to the right Zone. // Migrate function_ to the right Zone.
if (function_ != nullptr) { if (function_ != nullptr) {
@ -1586,7 +1581,7 @@ void DeclarationScope::AnalyzePartially(AstNodeFactory* ast_node_factory) {
ResetAfterPreparsing(ast_node_factory->ast_value_factory(), false); ResetAfterPreparsing(ast_node_factory->ast_value_factory(), false);
unresolved_ = unresolved; unresolved_list_ = std::move(new_unresolved_list);
} }
#ifdef DEBUG #ifdef DEBUG
@ -1673,8 +1668,8 @@ void PrintMap(int indent, const char* label, VariableMap* map, bool locals,
for (VariableMap::Entry* p = map->Start(); p != nullptr; p = map->Next(p)) { for (VariableMap::Entry* p = map->Start(); p != nullptr; p = map->Next(p)) {
Variable* var = reinterpret_cast<Variable*>(p->value); Variable* var = reinterpret_cast<Variable*>(p->value);
if (var == function_var) continue; if (var == function_var) continue;
if (var == kDummyPreParserVariable || if (var == Scope::kDummyPreParserVariable ||
var == kDummyPreParserLexicalVariable) { var == Scope::kDummyPreParserLexicalVariable) {
continue; continue;
} }
bool local = !IsDynamicVariableMode(var->mode()); bool local = !IsDynamicVariableMode(var->mode());
@ -2045,8 +2040,7 @@ bool Scope::ResolveVariablesRecursively(ParseInfo* info) {
// scopes. // scopes.
if (is_declaration_scope() && AsDeclarationScope()->was_lazily_parsed()) { if (is_declaration_scope() && AsDeclarationScope()->was_lazily_parsed()) {
DCHECK_EQ(variables_.occupancy(), 0); DCHECK_EQ(variables_.occupancy(), 0);
for (VariableProxy* proxy = unresolved_; proxy != nullptr; for (VariableProxy* proxy : unresolved_list_) {
proxy = proxy->next_unresolved()) {
Variable* var = outer_scope()->LookupRecursive(info, proxy, nullptr); Variable* var = outer_scope()->LookupRecursive(info, proxy, nullptr);
if (var == nullptr) { if (var == nullptr) {
DCHECK(proxy->is_private_field()); DCHECK(proxy->is_private_field());
@ -2060,8 +2054,7 @@ bool Scope::ResolveVariablesRecursively(ParseInfo* info) {
} }
} else { } else {
// Resolve unresolved variables for this scope. // Resolve unresolved variables for this scope.
for (VariableProxy* proxy = unresolved_; proxy != nullptr; for (VariableProxy* proxy : unresolved_list_) {
proxy = proxy->next_unresolved()) {
if (!ResolveVariable(info, proxy)) return false; if (!ResolveVariable(info, proxy)) return false;
} }
@ -2074,57 +2067,6 @@ bool Scope::ResolveVariablesRecursively(ParseInfo* info) {
return true; return true;
} }
VariableProxy* Scope::FetchFreeVariables(DeclarationScope* max_outer_scope,
ParseInfo* info,
VariableProxy* stack) {
// Module variables must be allocated before variable resolution
// to ensure that UpdateNeedsHoleCheck() can detect import variables.
if (info != nullptr && is_module_scope()) {
AsModuleScope()->AllocateModuleVariables();
}
// Lazy parsed declaration scopes are already partially analyzed. If there are
// unresolved references remaining, they just need to be resolved in outer
// scopes.
Scope* lookup =
is_declaration_scope() && AsDeclarationScope()->was_lazily_parsed()
? outer_scope()
: this;
for (VariableProxy *proxy = unresolved_, *next = nullptr; proxy != nullptr;
proxy = next) {
next = proxy->next_unresolved();
DCHECK(!proxy->is_resolved());
Variable* var =
lookup->LookupRecursive(info, proxy, max_outer_scope->outer_scope());
if (var == nullptr) {
proxy->set_next_unresolved(stack);
stack = proxy;
} else if (var != kDummyPreParserVariable &&
var != kDummyPreParserLexicalVariable) {
if (info != nullptr) {
// In this case we need to leave scopes in a way that they can be
// allocated. If we resolved variables from lazy parsed scopes, we need
// to context allocate the var.
ResolveTo(info, proxy, var);
if (!var->is_dynamic() && lookup != this) var->ForceContextAllocation();
} else {
var->set_is_used();
if (proxy->is_assigned()) {
var->set_maybe_assigned();
}
}
}
}
// Clear unresolved_ as it's in an inconsistent state.
unresolved_ = nullptr;
for (Scope* scope = inner_scope_; scope != nullptr; scope = scope->sibling_) {
stack = scope->FetchFreeVariables(max_outer_scope, info, stack);
}
return stack;
}
bool Scope::MustAllocate(Variable* var) { bool Scope::MustAllocate(Variable* var) {
if (var == kDummyPreParserLexicalVariable || var == kDummyPreParserVariable) { if (var == kDummyPreParserLexicalVariable || var == kDummyPreParserVariable) {
return true; return true;
@ -2236,6 +2178,24 @@ void DeclarationScope::AllocateReceiver() {
AllocateParameter(receiver(), -1); AllocateParameter(receiver(), -1);
} }
void DeclarationScope::AllocatePromise() {
if (!has_promise_) return;
DCHECK_NOT_NULL(promise_var());
DCHECK_EQ(this, promise_var()->scope());
AllocateStackSlot(promise_var());
DCHECK_EQ(VariableLocation::LOCAL, promise_var()->location());
DCHECK_EQ(kPromiseVarIndex, promise_var()->index());
}
void DeclarationScope::AllocateGeneratorObject() {
if (!has_generator_object_) return;
DCHECK_NOT_NULL(generator_object_var());
DCHECK_EQ(this, generator_object_var()->scope());
AllocateStackSlot(generator_object_var());
DCHECK_EQ(VariableLocation::LOCAL, generator_object_var()->location());
DCHECK_EQ(kGeneratorObjectVarIndex, generator_object_var()->index());
}
void Scope::AllocateNonParameterLocal(Variable* var) { void Scope::AllocateNonParameterLocal(Variable* var) {
DCHECK(var->scope() == this); DCHECK(var->scope() == this);
if (var->IsUnallocated() && MustAllocate(var)) { if (var->IsUnallocated() && MustAllocate(var)) {
@ -2304,6 +2264,19 @@ void Scope::AllocateVariablesRecursively() {
return; return;
} }
// Make sure to allocate the .promise (for async functions) or
// .generator_object (for async generators) first, so that it
// get's the required stack slot 0 in case it's needed. See
// http://bit.ly/v8-zero-cost-async-stack-traces for details.
if (is_function_scope()) {
FunctionKind kind = GetClosureScope()->function_kind();
if (IsAsyncGeneratorFunction(kind)) {
AsDeclarationScope()->AllocateGeneratorObject();
} else if (IsAsyncFunction(kind)) {
AsDeclarationScope()->AllocatePromise();
}
}
// Allocate variables for inner scopes. // Allocate variables for inner scopes.
for (Scope* scope = inner_scope_; scope != nullptr; scope = scope->sibling_) { for (Scope* scope = inner_scope_; scope != nullptr; scope = scope->sibling_) {
scope->AllocateVariablesRecursively(); scope->AllocateVariablesRecursively();
@ -2410,5 +2383,9 @@ int Scope::ContextLocalCount() const {
(is_function_var_in_context ? 1 : 0); (is_function_var_in_context ? 1 : 0);
} }
void* const Scope::kDummyPreParserVariable = reinterpret_cast<void*>(0x1);
void* const Scope::kDummyPreParserLexicalVariable =
reinterpret_cast<void*>(0x2);
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8

View File

@ -20,8 +20,7 @@ class AstValueFactory;
class AstRawString; class AstRawString;
class Declaration; class Declaration;
class ParseInfo; class ParseInfo;
class PreParsedScopeData; class PreParsedScopeDataBuilder;
class ProducedPreParsedScopeData;
class SloppyBlockFunctionStatement; class SloppyBlockFunctionStatement;
class Statement; class Statement;
class StringSet; class StringSet;
@ -103,7 +102,6 @@ class V8_EXPORT_PRIVATE Scope : public NON_EXPORTED_BASE(ZoneObject) {
void SetScopeName(const AstRawString* scope_name) { void SetScopeName(const AstRawString* scope_name) {
scope_name_ = scope_name; scope_name_ = scope_name;
} }
void set_needs_migration() { needs_migration_ = true; }
#endif #endif
// TODO(verwaest): Is this needed on Scope? // TODO(verwaest): Is this needed on Scope?
@ -114,7 +112,7 @@ class V8_EXPORT_PRIVATE Scope : public NON_EXPORTED_BASE(ZoneObject) {
ModuleScope* AsModuleScope(); ModuleScope* AsModuleScope();
const ModuleScope* AsModuleScope() const; const ModuleScope* AsModuleScope() const;
class Snapshot final BASE_EMBEDDED { class Snapshot final {
public: public:
explicit Snapshot(Scope* scope); explicit Snapshot(Scope* scope);
~Snapshot(); ~Snapshot();
@ -125,8 +123,8 @@ class V8_EXPORT_PRIVATE Scope : public NON_EXPORTED_BASE(ZoneObject) {
Scope* outer_scope_; Scope* outer_scope_;
Scope* top_inner_scope_; Scope* top_inner_scope_;
VariableProxy* top_unresolved_; VariableProxy* top_unresolved_;
ThreadedList<Variable>::Iterator top_local_; base::ThreadedList<Variable>::Iterator top_local_;
ThreadedList<Declaration>::Iterator top_decl_; base::ThreadedList<Declaration>::Iterator top_decl_;
const bool outer_scope_calls_eval_; const bool outer_scope_calls_eval_;
}; };
@ -203,9 +201,9 @@ class V8_EXPORT_PRIVATE Scope : public NON_EXPORTED_BASE(ZoneObject) {
void DeclareCatchVariableName(const AstRawString* name); void DeclareCatchVariableName(const AstRawString* name);
// Declarations list. // Declarations list.
ThreadedList<Declaration>* declarations() { return &decls_; } base::ThreadedList<Declaration>* declarations() { return &decls_; }
ThreadedList<Variable>* locals() { return &locals_; } base::ThreadedList<Variable>* locals() { return &locals_; }
// Create a new unresolved variable. // Create a new unresolved variable.
VariableProxy* NewUnresolved(AstNodeFactory* factory, VariableProxy* NewUnresolved(AstNodeFactory* factory,
@ -218,8 +216,7 @@ class V8_EXPORT_PRIVATE Scope : public NON_EXPORTED_BASE(ZoneObject) {
DCHECK(!already_resolved_); DCHECK(!already_resolved_);
DCHECK_EQ(factory->zone(), zone()); DCHECK_EQ(factory->zone(), zone());
VariableProxy* proxy = factory->NewVariableProxy(name, kind, start_pos); VariableProxy* proxy = factory->NewVariableProxy(name, kind, start_pos);
proxy->set_next_unresolved(unresolved_); AddUnresolved(proxy);
unresolved_ = proxy;
return proxy; return proxy;
} }
@ -480,6 +477,9 @@ class V8_EXPORT_PRIVATE Scope : public NON_EXPORTED_BASE(ZoneObject) {
return false; return false;
} }
static void* const kDummyPreParserVariable;
static void* const kDummyPreParserLexicalVariable;
protected: protected:
explicit Scope(Zone* zone); explicit Scope(Zone* zone);
@ -522,12 +522,12 @@ class V8_EXPORT_PRIVATE Scope : public NON_EXPORTED_BASE(ZoneObject) {
VariableMap variables_; VariableMap variables_;
// In case of non-scopeinfo-backed scopes, this contains the variables of the // In case of non-scopeinfo-backed scopes, this contains the variables of the
// map above in order of addition. // map above in order of addition.
ThreadedList<Variable> locals_; base::ThreadedList<Variable> locals_;
// Unresolved variables referred to from this scope. The proxies themselves // Unresolved variables referred to from this scope. The proxies themselves
// form a linked list of all unresolved proxies. // form a linked list of all unresolved proxies.
VariableProxy* unresolved_; base::ThreadedList<VariableProxy> unresolved_list_;
// Declarations. // Declarations.
ThreadedList<Declaration> decls_; base::ThreadedList<Declaration> decls_;
// Serialized scope info support. // Serialized scope info support.
Handle<ScopeInfo> scope_info_; Handle<ScopeInfo> scope_info_;
@ -597,9 +597,10 @@ class V8_EXPORT_PRIVATE Scope : public NON_EXPORTED_BASE(ZoneObject) {
// Finds free variables of this scope. This mutates the unresolved variables // Finds free variables of this scope. This mutates the unresolved variables
// list along the way, so full resolution cannot be done afterwards. // list along the way, so full resolution cannot be done afterwards.
// If a ParseInfo* is passed, non-free variables will be resolved. // If a ParseInfo* is passed, non-free variables will be resolved.
VariableProxy* FetchFreeVariables(DeclarationScope* max_outer_scope, template <typename T>
ParseInfo* info = nullptr, void ResolveScopesThenForEachVariable(DeclarationScope* max_outer_scope,
VariableProxy* stack = nullptr); T variable_proxy_stackvisitor,
ParseInfo* info = nullptr);
// Predicates. // Predicates.
bool MustAllocate(Variable* var); bool MustAllocate(Variable* var);
@ -682,6 +683,12 @@ class V8_EXPORT_PRIVATE DeclarationScope : public Scope {
} }
bool is_being_lazily_parsed() const { return is_being_lazily_parsed_; } bool is_being_lazily_parsed() const { return is_being_lazily_parsed_; }
#endif #endif
void set_zone(Zone* zone) {
#ifdef DEBUG
needs_migration_ = true;
#endif
zone_ = zone;
}
bool ShouldEagerCompile() const; bool ShouldEagerCompile() const;
void set_should_eager_compile(); void set_should_eager_compile();
@ -759,11 +766,22 @@ class V8_EXPORT_PRIVATE DeclarationScope : public Scope {
// literals, or nullptr. Only valid for function scopes. // literals, or nullptr. Only valid for function scopes.
Variable* function_var() const { return function_; } Variable* function_var() const { return function_; }
// The variable holding the JSGeneratorObject for generator, async
// and async generator functions, and modules. Only valid for
// function and module scopes.
Variable* generator_object_var() const { Variable* generator_object_var() const {
DCHECK(is_function_scope() || is_module_scope()); DCHECK(is_function_scope() || is_module_scope());
return GetRareVariable(RareVariable::kGeneratorObject); return GetRareVariable(RareVariable::kGeneratorObject);
} }
// For async generators, the .generator_object variable is always
// allocated to a fixed stack slot, such that the stack trace
// construction logic can access it.
static constexpr int kGeneratorObjectVarIndex = 0;
// The variable holding the promise returned from async functions.
// Only valid for function scopes in async functions (i.e. not
// for async generators).
Variable* promise_var() const { Variable* promise_var() const {
DCHECK(is_function_scope()); DCHECK(is_function_scope());
DCHECK(IsAsyncFunction(function_kind_)); DCHECK(IsAsyncFunction(function_kind_));
@ -771,6 +789,11 @@ class V8_EXPORT_PRIVATE DeclarationScope : public Scope {
return GetRareVariable(RareVariable::kPromise); return GetRareVariable(RareVariable::kPromise);
} }
// For async functions, the .promise variable is always allocated
// to a fixed stack slot, such that the stack trace construction
// logic can access it.
static constexpr int kPromiseVarIndex = 0;
// Parameters. The left-most parameter has index 0. // Parameters. The left-most parameter has index 0.
// Only valid for function and module scopes. // Only valid for function and module scopes.
Variable* parameter(int index) const { Variable* parameter(int index) const {
@ -898,6 +921,8 @@ class V8_EXPORT_PRIVATE DeclarationScope : public Scope {
void AllocateLocals(); void AllocateLocals();
void AllocateParameterLocals(); void AllocateParameterLocals();
void AllocateReceiver(); void AllocateReceiver();
void AllocatePromise();
void AllocateGeneratorObject();
void ResetAfterPreparsing(AstValueFactory* ast_value_factory, bool aborted); void ResetAfterPreparsing(AstValueFactory* ast_value_factory, bool aborted);
@ -919,13 +944,13 @@ class V8_EXPORT_PRIVATE DeclarationScope : public Scope {
// saved in produced_preparsed_scope_data_. // saved in produced_preparsed_scope_data_.
void SavePreParsedScopeDataForDeclarationScope(); void SavePreParsedScopeDataForDeclarationScope();
void set_produced_preparsed_scope_data( void set_preparsed_scope_data_builder(
ProducedPreParsedScopeData* produced_preparsed_scope_data) { PreParsedScopeDataBuilder* preparsed_scope_data_builder) {
produced_preparsed_scope_data_ = produced_preparsed_scope_data; preparsed_scope_data_builder_ = preparsed_scope_data_builder;
} }
ProducedPreParsedScopeData* produced_preparsed_scope_data() const { PreParsedScopeDataBuilder* preparsed_scope_data_builder() const {
return produced_preparsed_scope_data_; return preparsed_scope_data_builder_;
} }
private: private:
@ -954,6 +979,10 @@ class V8_EXPORT_PRIVATE DeclarationScope : public Scope {
bool force_eager_compilation_ : 1; bool force_eager_compilation_ : 1;
// This function scope has a rest parameter. // This function scope has a rest parameter.
bool has_rest_ : 1; bool has_rest_ : 1;
// This function scope has a .promise variable.
bool has_promise_ : 1;
// This function scope has a .generator_object variable.
bool has_generator_object_ : 1;
// This scope has a parameter called "arguments". // This scope has a parameter called "arguments".
bool has_arguments_parameter_ : 1; bool has_arguments_parameter_ : 1;
// This scope uses "super" property ('super.foo'). // This scope uses "super" property ('super.foo').
@ -981,7 +1010,7 @@ class V8_EXPORT_PRIVATE DeclarationScope : public Scope {
Variable* arguments_; Variable* arguments_;
// For producing the scope allocation data during preparsing. // For producing the scope allocation data during preparsing.
ProducedPreParsedScopeData* produced_preparsed_scope_data_; PreParsedScopeDataBuilder* preparsed_scope_data_builder_;
struct RareData : public ZoneObject { struct RareData : public ZoneObject {
// Convenience variable; Subclass constructor only // Convenience variable; Subclass constructor only

View File

@ -181,7 +181,7 @@ class Variable final : public ZoneObject {
: kNeedsInitialization; : kNeedsInitialization;
} }
typedef ThreadedList<Variable> List; typedef base::ThreadedList<Variable> List;
private: private:
Scope* scope_; Scope* scope_;
@ -215,6 +215,7 @@ class Variable final : public ZoneObject {
ForceHoleInitializationField::kNext, 1> {}; ForceHoleInitializationField::kNext, 1> {};
Variable** next() { return &next_; } Variable** next() { return &next_; }
friend List; friend List;
friend base::ThreadedListTraits<Variable>;
}; };
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8

70
deps/v8/src/base/address-region.h vendored Normal file
View File

@ -0,0 +1,70 @@
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_BASE_ADDRESS_REGION_H_
#define V8_BASE_ADDRESS_REGION_H_
#include <iostream>
#include "src/base/macros.h"
namespace v8 {
namespace base {
// Helper class representing an address region of certain size.
class AddressRegion {
public:
typedef uintptr_t Address;
AddressRegion() = default;
AddressRegion(Address address, size_t size)
: address_(address), size_(size) {}
Address begin() const { return address_; }
Address end() const { return address_ + size_; }
size_t size() const { return size_; }
void set_size(size_t size) { size_ = size; }
bool is_empty() const { return size_ == 0; }
bool contains(Address address) const {
STATIC_ASSERT(std::is_unsigned<Address>::value);
return (address - begin()) < size();
}
bool contains(Address address, size_t size) const {
STATIC_ASSERT(std::is_unsigned<Address>::value);
Address offset = address - begin();
return (offset < size_) && (offset + size <= size_);
}
bool contains(AddressRegion region) const {
return contains(region.address_, region.size_);
}
bool operator==(AddressRegion other) const {
return address_ == other.address_ && size_ == other.size_;
}
bool operator!=(AddressRegion other) const {
return address_ != other.address_ || size_ != other.size_;
}
private:
Address address_ = 0;
size_t size_ = 0;
};
ASSERT_TRIVIALLY_COPYABLE(AddressRegion);
inline std::ostream& operator<<(std::ostream& out, AddressRegion region) {
return out << "[" << reinterpret_cast<void*>(region.begin()) << "+"
<< region.size() << "]";
}
} // namespace base
} // namespace v8
#endif // V8_BASE_ADDRESS_REGION_H_

View File

@ -377,6 +377,22 @@ class AtomicElement {
T value_; T value_;
}; };
template <typename T,
typename = typename std::enable_if<std::is_unsigned<T>::value>::type>
inline void CheckedIncrement(std::atomic<T>* number, T amount) {
const T old = number->fetch_add(amount);
DCHECK_GE(old + amount, old);
USE(old);
}
template <typename T,
typename = typename std::enable_if<std::is_unsigned<T>::value>::type>
inline void CheckedDecrement(std::atomic<T>* number, T amount) {
const T old = number->fetch_sub(amount);
DCHECK_GE(old, amount);
USE(old);
}
} // namespace base } // namespace base
} // namespace v8 } // namespace v8

View File

@ -146,6 +146,14 @@ constexpr inline bool IsPowerOfTwo(T value) {
V8_BASE_EXPORT uint32_t RoundUpToPowerOfTwo32(uint32_t value); V8_BASE_EXPORT uint32_t RoundUpToPowerOfTwo32(uint32_t value);
// Same for 64 bit integers. |value| must be <= 2^63 // Same for 64 bit integers. |value| must be <= 2^63
V8_BASE_EXPORT uint64_t RoundUpToPowerOfTwo64(uint64_t value); V8_BASE_EXPORT uint64_t RoundUpToPowerOfTwo64(uint64_t value);
// Same for size_t integers.
inline size_t RoundUpToPowerOfTwo(size_t value) {
if (sizeof(size_t) == sizeof(uint64_t)) {
return RoundUpToPowerOfTwo64(value);
} else {
return RoundUpToPowerOfTwo32(value);
}
}
// RoundDownToPowerOfTwo32(value) returns the greatest power of two which is // RoundDownToPowerOfTwo32(value) returns the greatest power of two which is
// less than or equal to |value|. If you pass in a |value| that is already a // less than or equal to |value|. If you pass in a |value| that is already a

View File

@ -0,0 +1,101 @@
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/base/bounded-page-allocator.h"
namespace v8 {
namespace base {
BoundedPageAllocator::BoundedPageAllocator(v8::PageAllocator* page_allocator,
Address start, size_t size,
size_t allocate_page_size)
: allocate_page_size_(allocate_page_size),
commit_page_size_(page_allocator->CommitPageSize()),
page_allocator_(page_allocator),
region_allocator_(start, size, allocate_page_size_) {
CHECK_NOT_NULL(page_allocator);
CHECK(IsAligned(allocate_page_size, page_allocator->AllocatePageSize()));
CHECK(IsAligned(allocate_page_size_, commit_page_size_));
}
BoundedPageAllocator::Address BoundedPageAllocator::begin() const {
return region_allocator_.begin();
}
size_t BoundedPageAllocator::size() const { return region_allocator_.size(); }
void* BoundedPageAllocator::AllocatePages(void* hint, size_t size,
size_t alignment,
PageAllocator::Permission access) {
LockGuard<Mutex> guard(&mutex_);
CHECK(IsAligned(alignment, region_allocator_.page_size()));
// Region allocator does not support alignments bigger than it's own
// allocation alignment.
CHECK_LE(alignment, allocate_page_size_);
// TODO(ishell): Consider using randomized version here.
Address address = region_allocator_.AllocateRegion(size);
if (address == RegionAllocator::kAllocationFailure) {
return nullptr;
}
CHECK(page_allocator_->SetPermissions(reinterpret_cast<void*>(address), size,
access));
return reinterpret_cast<void*>(address);
}
bool BoundedPageAllocator::FreePages(void* raw_address, size_t size) {
LockGuard<Mutex> guard(&mutex_);
Address address = reinterpret_cast<Address>(raw_address);
size_t freed_size = region_allocator_.FreeRegion(address);
if (freed_size != size) return false;
CHECK(page_allocator_->SetPermissions(raw_address, size,
PageAllocator::kNoAccess));
return true;
}
bool BoundedPageAllocator::ReleasePages(void* raw_address, size_t size,
size_t new_size) {
Address address = reinterpret_cast<Address>(raw_address);
CHECK(IsAligned(address, allocate_page_size_));
DCHECK_LT(new_size, size);
DCHECK(IsAligned(size - new_size, commit_page_size_));
// Check if we freed any allocatable pages by this release.
size_t allocated_size = RoundUp(size, allocate_page_size_);
size_t new_allocated_size = RoundUp(new_size, allocate_page_size_);
#ifdef DEBUG
{
// There must be an allocated region at given |address| of a size not
// smaller than |size|.
LockGuard<Mutex> guard(&mutex_);
CHECK_EQ(allocated_size, region_allocator_.CheckRegion(address));
}
#endif
if (new_allocated_size < allocated_size) {
LockGuard<Mutex> guard(&mutex_);
region_allocator_.TrimRegion(address, new_allocated_size);
}
// Keep the region in "used" state just uncommit some pages.
Address free_address = address + new_size;
size_t free_size = size - new_size;
return page_allocator_->SetPermissions(reinterpret_cast<void*>(free_address),
free_size, PageAllocator::kNoAccess);
}
bool BoundedPageAllocator::SetPermissions(void* address, size_t size,
PageAllocator::Permission access) {
DCHECK(IsAligned(reinterpret_cast<Address>(address), commit_page_size_));
DCHECK(IsAligned(size, commit_page_size_));
DCHECK(region_allocator_.contains(reinterpret_cast<Address>(address), size));
return page_allocator_->SetPermissions(address, size, access);
}
} // namespace base
} // namespace v8

View File

@ -0,0 +1,79 @@
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_BASE_BOUNDED_PAGE_ALLOCATOR_H_
#define V8_BASE_BOUNDED_PAGE_ALLOCATOR_H_
#include "include/v8-platform.h"
#include "src/base/platform/mutex.h"
#include "src/base/region-allocator.h"
namespace v8 {
namespace base {
// This is a v8::PageAllocator implementation that allocates pages within the
// pre-reserved region of virtual space. This class requires the virtual space
// to be kept reserved during the lifetime of this object.
// The main application of bounded page allocator are
// - V8 heap pointer compression which requires the whole V8 heap to be
// allocated within a contiguous range of virtual address space,
// - executable page allocation, which allows to use PC-relative 32-bit code
// displacement on certain 64-bit platforms.
// Bounded page allocator uses other page allocator instance for doing actual
// page allocations.
// The implementation is thread-safe.
class V8_BASE_EXPORT BoundedPageAllocator : public v8::PageAllocator {
public:
typedef uintptr_t Address;
BoundedPageAllocator(v8::PageAllocator* page_allocator, Address start,
size_t size, size_t allocate_page_size);
~BoundedPageAllocator() override = default;
// These functions are not inlined to avoid https://crbug.com/v8/8275.
Address begin() const;
size_t size() const;
// Returns true if given address is in the range controlled by the bounded
// page allocator instance.
bool contains(Address address) const {
return region_allocator_.contains(address);
}
size_t AllocatePageSize() override { return allocate_page_size_; }
size_t CommitPageSize() override { return commit_page_size_; }
void SetRandomMmapSeed(int64_t seed) override {
page_allocator_->SetRandomMmapSeed(seed);
}
void* GetRandomMmapAddr() override {
return page_allocator_->GetRandomMmapAddr();
}
void* AllocatePages(void* address, size_t size, size_t alignment,
PageAllocator::Permission access) override;
bool FreePages(void* address, size_t size) override;
bool ReleasePages(void* address, size_t size, size_t new_size) override;
bool SetPermissions(void* address, size_t size,
PageAllocator::Permission access) override;
private:
v8::base::Mutex mutex_;
const size_t allocate_page_size_;
const size_t commit_page_size_;
v8::PageAllocator* const page_allocator_;
v8::base::RegionAllocator region_allocator_;
DISALLOW_COPY_AND_ASSIGN(BoundedPageAllocator);
};
} // namespace base
} // namespace v8
#endif // V8_BASE_BOUNDED_PAGE_ALLOCATOR_H_

View File

@ -196,9 +196,9 @@
#endif #endif
#if defined(V8_TARGET_ARCH_IA32) || defined(V8_TARGET_ARCH_X64) #if defined(V8_TARGET_ARCH_IA32) || defined(V8_TARGET_ARCH_X64)
#define V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK 1 #define V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK true
#else #else
#define V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK 0 #define V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK false
#endif #endif
// Number of bits to represent the page size for paged spaces. The value of 19 // Number of bits to represent the page size for paged spaces. The value of 19

View File

@ -21,7 +21,7 @@ StackTrace::StackTrace(const void* const* trace, size_t count) {
count_ = count; count_ = count;
} }
StackTrace::~StackTrace() {} StackTrace::~StackTrace() = default;
const void* const* StackTrace::Addresses(size_t* count) const { const void* const* StackTrace::Addresses(size_t* count) const {
*count = count_; *count = count_;

View File

@ -61,7 +61,7 @@ char* itoa_r(intptr_t i, char* buf, size_t sz, int base, size_t padding);
namespace { namespace {
volatile sig_atomic_t in_signal_handler = 0; volatile sig_atomic_t in_signal_handler = 0;
bool dump_stack_in_signal_handler = 1; bool dump_stack_in_signal_handler = true;
// The prefix used for mangled symbols, per the Itanium C++ ABI: // The prefix used for mangled symbols, per the Itanium C++ ABI:
// http://www.codesourcery.com/cxx-abi/abi.html#mangling // http://www.codesourcery.com/cxx-abi/abi.html#mangling
@ -104,7 +104,7 @@ void DemangleSymbols(std::string* text) {
// Try to demangle the mangled symbol candidate. // Try to demangle the mangled symbol candidate.
int status = 0; int status = 0;
std::unique_ptr<char, FreeDeleter> demangled_symbol( std::unique_ptr<char, FreeDeleter> demangled_symbol(
abi::__cxa_demangle(mangled_symbol.c_str(), nullptr, 0, &status)); abi::__cxa_demangle(mangled_symbol.c_str(), nullptr, nullptr, &status));
if (status == 0) { // Demangling is successful. if (status == 0) { // Demangling is successful.
// Remove the mangled symbol. // Remove the mangled symbol.
text->erase(mangled_start, mangled_end - mangled_start); text->erase(mangled_start, mangled_end - mangled_start);
@ -125,7 +125,7 @@ class BacktraceOutputHandler {
virtual void HandleOutput(const char* output) = 0; virtual void HandleOutput(const char* output) = 0;
protected: protected:
virtual ~BacktraceOutputHandler() {} virtual ~BacktraceOutputHandler() = default;
}; };
#if HAVE_EXECINFO_H #if HAVE_EXECINFO_H
@ -266,7 +266,7 @@ void StackDumpSignalHandler(int signal, siginfo_t* info, void* void_context) {
class PrintBacktraceOutputHandler : public BacktraceOutputHandler { class PrintBacktraceOutputHandler : public BacktraceOutputHandler {
public: public:
PrintBacktraceOutputHandler() {} PrintBacktraceOutputHandler() = default;
void HandleOutput(const char* output) override { void HandleOutput(const char* output) override {
// NOTE: This code MUST be async-signal safe (it's used by in-process // NOTE: This code MUST be async-signal safe (it's used by in-process

View File

@ -7,13 +7,6 @@
#include "src/base/debug/stack_trace.h" #include "src/base/debug/stack_trace.h"
// This file can't use "src/base/win32-headers.h" because it defines symbols
// that lead to compilation errors. But `NOMINMAX` should be defined to disable
// defining of the `min` and `max` MACROS.
#ifndef NOMINMAX
#define NOMINMAX
#endif
#include <windows.h> #include <windows.h>
#include <dbghelp.h> #include <dbghelp.h>
#include <Shlwapi.h> #include <Shlwapi.h>

View File

@ -90,7 +90,7 @@ typedef union {
ew_u.value = (d); \ ew_u.value = (d); \
(ix0) = ew_u.parts.msw; \ (ix0) = ew_u.parts.msw; \
(ix1) = ew_u.parts.lsw; \ (ix1) = ew_u.parts.lsw; \
} while (0) } while (false)
/* Get a 64-bit int from a double. */ /* Get a 64-bit int from a double. */
#define EXTRACT_WORD64(ix, d) \ #define EXTRACT_WORD64(ix, d) \
@ -98,7 +98,7 @@ typedef union {
ieee_double_shape_type ew_u; \ ieee_double_shape_type ew_u; \
ew_u.value = (d); \ ew_u.value = (d); \
(ix) = ew_u.xparts.w; \ (ix) = ew_u.xparts.w; \
} while (0) } while (false)
/* Get the more significant 32 bit int from a double. */ /* Get the more significant 32 bit int from a double. */
@ -107,7 +107,7 @@ typedef union {
ieee_double_shape_type gh_u; \ ieee_double_shape_type gh_u; \
gh_u.value = (d); \ gh_u.value = (d); \
(i) = gh_u.parts.msw; \ (i) = gh_u.parts.msw; \
} while (0) } while (false)
/* Get the less significant 32 bit int from a double. */ /* Get the less significant 32 bit int from a double. */
@ -116,7 +116,7 @@ typedef union {
ieee_double_shape_type gl_u; \ ieee_double_shape_type gl_u; \
gl_u.value = (d); \ gl_u.value = (d); \
(i) = gl_u.parts.lsw; \ (i) = gl_u.parts.lsw; \
} while (0) } while (false)
/* Set a double from two 32 bit ints. */ /* Set a double from two 32 bit ints. */
@ -126,7 +126,7 @@ typedef union {
iw_u.parts.msw = (ix0); \ iw_u.parts.msw = (ix0); \
iw_u.parts.lsw = (ix1); \ iw_u.parts.lsw = (ix1); \
(d) = iw_u.value; \ (d) = iw_u.value; \
} while (0) } while (false)
/* Set a double from a 64-bit int. */ /* Set a double from a 64-bit int. */
#define INSERT_WORD64(d, ix) \ #define INSERT_WORD64(d, ix) \
@ -134,7 +134,7 @@ typedef union {
ieee_double_shape_type iw_u; \ ieee_double_shape_type iw_u; \
iw_u.xparts.w = (ix); \ iw_u.xparts.w = (ix); \
(d) = iw_u.value; \ (d) = iw_u.value; \
} while (0) } while (false)
/* Set the more significant 32 bits of a double from an int. */ /* Set the more significant 32 bits of a double from an int. */
@ -144,7 +144,7 @@ typedef union {
sh_u.value = (d); \ sh_u.value = (d); \
sh_u.parts.msw = (v); \ sh_u.parts.msw = (v); \
(d) = sh_u.value; \ (d) = sh_u.value; \
} while (0) } while (false)
/* Set the less significant 32 bits of a double from an int. */ /* Set the less significant 32 bits of a double from an int. */
@ -154,7 +154,7 @@ typedef union {
sl_u.value = (d); \ sl_u.value = (d); \
sl_u.parts.lsw = (v); \ sl_u.parts.lsw = (v); \
(d) = sl_u.value; \ (d) = sl_u.value; \
} while (0) } while (false)
/* Support macro. */ /* Support macro. */
@ -1210,9 +1210,9 @@ double atan(double x) {
if (ix > 0x7FF00000 || (ix == 0x7FF00000 && (low != 0))) if (ix > 0x7FF00000 || (ix == 0x7FF00000 && (low != 0)))
return x + x; /* NaN */ return x + x; /* NaN */
if (hx > 0) if (hx > 0)
return atanhi[3] + *(volatile double *)&atanlo[3]; return atanhi[3] + *const_cast<volatile double*>(&atanlo[3]);
else else
return -atanhi[3] - *(volatile double *)&atanlo[3]; return -atanhi[3] - *const_cast<volatile double*>(&atanlo[3]);
} }
if (ix < 0x3FDC0000) { /* |x| < 0.4375 */ if (ix < 0x3FDC0000) { /* |x| < 0.4375 */
if (ix < 0x3E400000) { /* |x| < 2^-27 */ if (ix < 0x3E400000) { /* |x| < 2^-27 */

View File

@ -49,7 +49,7 @@ V8_BASE_EXPORT void SetDcheckFunction(void (*dcheck_Function)(const char*, int,
if (V8_UNLIKELY(!(condition))) { \ if (V8_UNLIKELY(!(condition))) { \
FATAL("Check failed: %s.", message); \ FATAL("Check failed: %s.", message); \
} \ } \
} while (0) } while (false)
#define CHECK(condition) CHECK_WITH_MSG(condition, #condition) #define CHECK(condition) CHECK_WITH_MSG(condition, #condition)
#ifdef DEBUG #ifdef DEBUG
@ -59,7 +59,7 @@ V8_BASE_EXPORT void SetDcheckFunction(void (*dcheck_Function)(const char*, int,
if (V8_UNLIKELY(!(condition))) { \ if (V8_UNLIKELY(!(condition))) { \
V8_Dcheck(__FILE__, __LINE__, message); \ V8_Dcheck(__FILE__, __LINE__, message); \
} \ } \
} while (0) } while (false)
#define DCHECK(condition) DCHECK_WITH_MSG(condition, #condition) #define DCHECK(condition) DCHECK_WITH_MSG(condition, #condition)
// Helper macro for binary operators. // Helper macro for binary operators.
@ -73,7 +73,7 @@ V8_BASE_EXPORT void SetDcheckFunction(void (*dcheck_Function)(const char*, int,
FATAL("Check failed: %s.", _msg->c_str()); \ FATAL("Check failed: %s.", _msg->c_str()); \
delete _msg; \ delete _msg; \
} \ } \
} while (0) } while (false)
#define DCHECK_OP(name, op, lhs, rhs) \ #define DCHECK_OP(name, op, lhs, rhs) \
do { \ do { \
@ -84,7 +84,7 @@ V8_BASE_EXPORT void SetDcheckFunction(void (*dcheck_Function)(const char*, int,
V8_Dcheck(__FILE__, __LINE__, _msg->c_str()); \ V8_Dcheck(__FILE__, __LINE__, _msg->c_str()); \
delete _msg; \ delete _msg; \
} \ } \
} while (0) } while (false)
#else #else
@ -98,7 +98,7 @@ V8_BASE_EXPORT void SetDcheckFunction(void (*dcheck_Function)(const char*, int,
typename ::v8::base::pass_value_or_ref<decltype(rhs)>::type>((lhs), \ typename ::v8::base::pass_value_or_ref<decltype(rhs)>::type>((lhs), \
(rhs)); \ (rhs)); \
CHECK_WITH_MSG(_cmp, #lhs " " #op " " #rhs); \ CHECK_WITH_MSG(_cmp, #lhs " " #op " " #rhs); \
} while (0) } while (false)
#define DCHECK_WITH_MSG(condition, msg) void(0); #define DCHECK_WITH_MSG(condition, msg) void(0);

59
deps/v8/src/base/lsan-page-allocator.cc vendored Normal file
View File

@ -0,0 +1,59 @@
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/base/lsan-page-allocator.h"
#include "src/base/logging.h"
#if defined(LEAK_SANITIZER)
#include <sanitizer/lsan_interface.h>
#endif
namespace v8 {
namespace base {
LsanPageAllocator::LsanPageAllocator(v8::PageAllocator* page_allocator)
: page_allocator_(page_allocator),
allocate_page_size_(page_allocator_->AllocatePageSize()),
commit_page_size_(page_allocator_->CommitPageSize()) {
DCHECK_NOT_NULL(page_allocator);
}
void* LsanPageAllocator::AllocatePages(void* address, size_t size,
size_t alignment,
PageAllocator::Permission access) {
void* result =
page_allocator_->AllocatePages(address, size, alignment, access);
#if defined(LEAK_SANITIZER)
if (result != nullptr) {
__lsan_register_root_region(result, size);
}
#endif
return result;
}
bool LsanPageAllocator::FreePages(void* address, size_t size) {
bool result = page_allocator_->FreePages(address, size);
#if defined(LEAK_SANITIZER)
if (result) {
__lsan_unregister_root_region(address, size);
}
#endif
return result;
}
bool LsanPageAllocator::ReleasePages(void* address, size_t size,
size_t new_size) {
bool result = page_allocator_->ReleasePages(address, size, new_size);
#if defined(LEAK_SANITIZER)
if (result) {
__lsan_unregister_root_region(address, size);
__lsan_register_root_region(address, new_size);
}
#endif
return result;
}
} // namespace base
} // namespace v8

56
deps/v8/src/base/lsan-page-allocator.h vendored Normal file
View File

@ -0,0 +1,56 @@
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_BASE_LSAN_PAGE_ALLOCATOR_H_
#define V8_BASE_LSAN_PAGE_ALLOCATOR_H_
#include "include/v8-platform.h"
#include "src/base/base-export.h"
#include "src/base/compiler-specific.h"
namespace v8 {
namespace base {
// This is a v8::PageAllocator implementation that decorates provided page
// allocator object with leak sanitizer notifications when LEAK_SANITIZER
// is defined.
class V8_BASE_EXPORT LsanPageAllocator
: public NON_EXPORTED_BASE(::v8::PageAllocator) {
public:
LsanPageAllocator(v8::PageAllocator* page_allocator);
~LsanPageAllocator() override = default;
size_t AllocatePageSize() override { return allocate_page_size_; }
size_t CommitPageSize() override { return commit_page_size_; }
void SetRandomMmapSeed(int64_t seed) override {
return page_allocator_->SetRandomMmapSeed(seed);
}
void* GetRandomMmapAddr() override {
return page_allocator_->GetRandomMmapAddr();
}
void* AllocatePages(void* address, size_t size, size_t alignment,
PageAllocator::Permission access) override;
bool FreePages(void* address, size_t size) override;
bool ReleasePages(void* address, size_t size, size_t new_size) override;
bool SetPermissions(void* address, size_t size,
PageAllocator::Permission access) override {
return page_allocator_->SetPermissions(address, size, access);
}
private:
v8::PageAllocator* const page_allocator_;
const size_t allocate_page_size_;
const size_t commit_page_size_;
};
} // namespace base
} // namespace v8
#endif // V8_BASE_LSAN_PAGE_ALLOCATOR_H_

View File

@ -14,6 +14,9 @@
// No-op macro which is used to work around MSVC's funky VA_ARGS support. // No-op macro which is used to work around MSVC's funky VA_ARGS support.
#define EXPAND(x) x #define EXPAND(x) x
// This macro does nothing. That's all.
#define NOTHING(...)
// TODO(all) Replace all uses of this macro with C++'s offsetof. To do that, we // TODO(all) Replace all uses of this macro with C++'s offsetof. To do that, we
// have to make sure that only standard-layout types and simple field // have to make sure that only standard-layout types and simple field
// designators are used. // designators are used.
@ -195,8 +198,9 @@ V8_INLINE Dest bit_cast(Source const& source) {
#define V8_IMMEDIATE_CRASH() ((void(*)())0)() #define V8_IMMEDIATE_CRASH() ((void(*)())0)()
#endif #endif
// A convenience wrapper around static_assert without a string message argument.
// TODO(all) Replace all uses of this macro with static_assert, remove macro. // Once C++17 becomes the default, this macro can be removed in favor of the
// new static_assert(condition) overload.
#define STATIC_ASSERT(test) static_assert(test, #test) #define STATIC_ASSERT(test) static_assert(test, #test)
namespace v8 { namespace v8 {
@ -276,6 +280,12 @@ struct Use {
(void)unused_tmp_array_for_use_macro; \ (void)unused_tmp_array_for_use_macro; \
} while (false) } while (false)
// Evaluate the instantiations of an expression with parameter packs.
// Since USE has left-to-right evaluation order of it's arguments,
// the parameter pack is iterated from left to right and side effects
// have defined behavior.
#define ITERATE_PACK(...) USE(0, ((__VA_ARGS__), 0)...)
} // namespace base } // namespace base
} // namespace v8 } // namespace v8
@ -346,47 +356,37 @@ V8_INLINE A implicit_cast(A x) {
// write V8_2PART_UINT64_C(0x12345678,90123456); // write V8_2PART_UINT64_C(0x12345678,90123456);
#define V8_2PART_UINT64_C(a, b) (((static_cast<uint64_t>(a) << 32) + 0x##b##u)) #define V8_2PART_UINT64_C(a, b) (((static_cast<uint64_t>(a) << 32) + 0x##b##u))
// Compute the 0-relative offset of some absolute value x of type T.
// This allows conversion of Addresses and integral types into
// 0-relative int offsets.
template <typename T>
constexpr inline intptr_t OffsetFrom(T x) {
return x - static_cast<T>(0);
}
// Compute the absolute value of type T for some 0-relative offset x.
// This allows conversion of 0-relative int offsets into Addresses and
// integral types.
template <typename T>
constexpr inline T AddressFrom(intptr_t x) {
return static_cast<T>(static_cast<T>(0) + x);
}
// Return the largest multiple of m which is <= x. // Return the largest multiple of m which is <= x.
template <typename T> template <typename T>
inline T RoundDown(T x, intptr_t m) { inline T RoundDown(T x, intptr_t m) {
STATIC_ASSERT(std::is_integral<T>::value);
// m must be a power of two. // m must be a power of two.
DCHECK(m != 0 && ((m & (m - 1)) == 0)); DCHECK(m != 0 && ((m & (m - 1)) == 0));
return AddressFrom<T>(OffsetFrom(x) & -m); return x & -m;
} }
template <intptr_t m, typename T> template <intptr_t m, typename T>
constexpr inline T RoundDown(T x) { constexpr inline T RoundDown(T x) {
STATIC_ASSERT(std::is_integral<T>::value);
// m must be a power of two. // m must be a power of two.
STATIC_ASSERT(m != 0 && ((m & (m - 1)) == 0)); STATIC_ASSERT(m != 0 && ((m & (m - 1)) == 0));
return AddressFrom<T>(OffsetFrom(x) & -m); return x & -m;
} }
// Return the smallest multiple of m which is >= x. // Return the smallest multiple of m which is >= x.
template <typename T> template <typename T>
inline T RoundUp(T x, intptr_t m) { inline T RoundUp(T x, intptr_t m) {
STATIC_ASSERT(std::is_integral<T>::value);
return RoundDown<T>(static_cast<T>(x + m - 1), m); return RoundDown<T>(static_cast<T>(x + m - 1), m);
} }
template <intptr_t m, typename T> template <intptr_t m, typename T>
constexpr inline T RoundUp(T x) { constexpr inline T RoundUp(T x) {
return RoundDown<m, T>(static_cast<T>(x + m - 1)); STATIC_ASSERT(std::is_integral<T>::value);
return RoundDown<m, T>(static_cast<T>(x + (m - 1)));
}
template <typename T, typename U>
inline bool IsAligned(T value, U alignment) {
return (value & (alignment - 1)) == 0;
} }
inline void* AlignedAddress(void* address, size_t alignment) { inline void* AlignedAddress(void* address, size_t alignment) {

View File

@ -123,7 +123,7 @@ class Optional {
public: public:
using value_type = T; using value_type = T;
constexpr Optional() {} constexpr Optional() = default;
constexpr Optional(base::nullopt_t) {} // NOLINT(runtime/explicit) constexpr Optional(base::nullopt_t) {} // NOLINT(runtime/explicit)

View File

@ -24,11 +24,9 @@ STATIC_ASSERT_ENUM(PageAllocator::kReadExecute,
#undef STATIC_ASSERT_ENUM #undef STATIC_ASSERT_ENUM
size_t PageAllocator::AllocatePageSize() { PageAllocator::PageAllocator()
return base::OS::AllocatePageSize(); : allocate_page_size_(base::OS::AllocatePageSize()),
} commit_page_size_(base::OS::CommitPageSize()) {}
size_t PageAllocator::CommitPageSize() { return base::OS::CommitPageSize(); }
void PageAllocator::SetRandomMmapSeed(int64_t seed) { void PageAllocator::SetRandomMmapSeed(int64_t seed) {
base::OS::SetRandomMmapSeed(seed); base::OS::SetRandomMmapSeed(seed);

View File

@ -15,11 +15,12 @@ namespace base {
class V8_BASE_EXPORT PageAllocator class V8_BASE_EXPORT PageAllocator
: public NON_EXPORTED_BASE(::v8::PageAllocator) { : public NON_EXPORTED_BASE(::v8::PageAllocator) {
public: public:
virtual ~PageAllocator() = default; PageAllocator();
~PageAllocator() override = default;
size_t AllocatePageSize() override; size_t AllocatePageSize() override { return allocate_page_size_; }
size_t CommitPageSize() override; size_t CommitPageSize() override { return commit_page_size_; }
void SetRandomMmapSeed(int64_t seed) override; void SetRandomMmapSeed(int64_t seed) override;
@ -34,6 +35,10 @@ class V8_BASE_EXPORT PageAllocator
bool SetPermissions(void* address, size_t size, bool SetPermissions(void* address, size_t size,
PageAllocator::Permission access) override; PageAllocator::Permission access) override;
private:
const size_t allocate_page_size_;
const size_t commit_page_size_;
}; };
} // namespace base } // namespace base

View File

@ -3,4 +3,6 @@ set noparent
hpayer@chromium.org hpayer@chromium.org
mlippautz@chromium.org mlippautz@chromium.org
per-file platform-fuchsia.cc=wez@chromium.org
# COMPONENT: Blink>JavaScript # COMPONENT: Blink>JavaScript

View File

@ -57,8 +57,8 @@ void* OS::Allocate(void* address, size_t size, size_t alignment,
strlen(kVirtualMemoryName)); strlen(kVirtualMemoryName));
uintptr_t reservation; uintptr_t reservation;
uint32_t prot = GetProtectionFromMemoryPermission(access); uint32_t prot = GetProtectionFromMemoryPermission(access);
zx_status_t status = zx_vmar_map_old(zx_vmar_root_self(), 0, vmo, 0, zx_status_t status = zx_vmar_map(zx_vmar_root_self(), prot, 0, vmo, 0,
request_size, prot, &reservation); request_size, &reservation);
// Either the vmo is now referenced by the vmar, or we failed and are bailing, // Either the vmo is now referenced by the vmar, or we failed and are bailing,
// so close the vmo either way. // so close the vmo either way.
zx_handle_close(vmo); zx_handle_close(vmo);
@ -67,7 +67,8 @@ void* OS::Allocate(void* address, size_t size, size_t alignment,
} }
uint8_t* base = reinterpret_cast<uint8_t*>(reservation); uint8_t* base = reinterpret_cast<uint8_t*>(reservation);
uint8_t* aligned_base = RoundUp(base, alignment); uint8_t* aligned_base = reinterpret_cast<uint8_t*>(
RoundUp(reinterpret_cast<uintptr_t>(base), alignment));
// Unmap extra memory reserved before and after the desired block. // Unmap extra memory reserved before and after the desired block.
if (aligned_base != base) { if (aligned_base != base) {
@ -114,9 +115,8 @@ bool OS::SetPermissions(void* address, size_t size, MemoryPermission access) {
DCHECK_EQ(0, reinterpret_cast<uintptr_t>(address) % CommitPageSize()); DCHECK_EQ(0, reinterpret_cast<uintptr_t>(address) % CommitPageSize());
DCHECK_EQ(0, size % CommitPageSize()); DCHECK_EQ(0, size % CommitPageSize());
uint32_t prot = GetProtectionFromMemoryPermission(access); uint32_t prot = GetProtectionFromMemoryPermission(access);
return zx_vmar_protect_old(zx_vmar_root_self(), return zx_vmar_protect(zx_vmar_root_self(), prot,
reinterpret_cast<uintptr_t>(address), size, reinterpret_cast<uintptr_t>(address), size) == ZX_OK;
prot) == ZX_OK;
} }
// static // static

View File

@ -27,14 +27,6 @@
#include <sys/types.h> // mmap & munmap #include <sys/types.h> // mmap & munmap
#include <unistd.h> // sysconf #include <unistd.h> // sysconf
// GLibc on ARM defines mcontext_t has a typedef for 'struct sigcontext'.
// Old versions of the C library <signal.h> didn't define the type.
#if defined(__ANDROID__) && !defined(__BIONIC_HAVE_UCONTEXT_T) && \
(defined(__arm__) || defined(__aarch64__)) && \
!defined(__BIONIC_HAVE_STRUCT_SIGCONTEXT)
#include <asm/sigcontext.h> // NOLINT
#endif
#include <cmath> #include <cmath>
#undef MAP_TYPE #undef MAP_TYPE

View File

@ -15,7 +15,7 @@ class PosixDefaultTimezoneCache : public PosixTimezoneCache {
const char* LocalTimezone(double time_ms) override; const char* LocalTimezone(double time_ms) override;
double LocalTimeOffset(double time_ms, bool is_utc) override; double LocalTimeOffset(double time_ms, bool is_utc) override;
~PosixDefaultTimezoneCache() override {} ~PosixDefaultTimezoneCache() override = default;
}; };
} // namespace base } // namespace base

View File

@ -86,7 +86,7 @@ namespace base {
namespace { namespace {
// 0 is never a valid thread id. // 0 is never a valid thread id.
const pthread_t kNoThread = (pthread_t) 0; const pthread_t kNoThread = static_cast<pthread_t>(0);
bool g_hard_abort = false; bool g_hard_abort = false;
@ -254,10 +254,6 @@ void* OS::GetRandomMmapAddr() {
// Little-endian Linux: 46 bits of virtual addressing. // Little-endian Linux: 46 bits of virtual addressing.
raw_addr &= uint64_t{0x3FFFFFFF0000}; raw_addr &= uint64_t{0x3FFFFFFF0000};
#endif #endif
#elif V8_TARGET_ARCH_MIPS64
// We allocate code in 256 MB aligned segments because of optimizations using
// J instruction that require that all code is within a single 256 MB segment
raw_addr &= uint64_t{0x3FFFE0000000};
#elif V8_TARGET_ARCH_S390X #elif V8_TARGET_ARCH_S390X
// Linux on Z uses bits 22-32 for Region Indexing, which translates to 42 bits // Linux on Z uses bits 22-32 for Region Indexing, which translates to 42 bits
// of virtual addressing. Truncate to 40 bits to allow kernel chance to // of virtual addressing. Truncate to 40 bits to allow kernel chance to
@ -267,6 +263,10 @@ void* OS::GetRandomMmapAddr() {
// 31 bits of virtual addressing. Truncate to 29 bits to allow kernel chance // 31 bits of virtual addressing. Truncate to 29 bits to allow kernel chance
// to fulfill request. // to fulfill request.
raw_addr &= 0x1FFFF000; raw_addr &= 0x1FFFF000;
#elif V8_TARGET_ARCH_MIPS64
// 42 bits of virtual addressing. Truncate to 40 bits to allow kernel chance
// to fulfill request.
raw_addr &= uint64_t{0xFFFFFF0000};
#else #else
raw_addr &= 0x3FFFF000; raw_addr &= 0x3FFFF000;
@ -313,7 +313,8 @@ void* OS::Allocate(void* address, size_t size, size_t alignment,
// Unmap memory allocated before the aligned base address. // Unmap memory allocated before the aligned base address.
uint8_t* base = static_cast<uint8_t*>(result); uint8_t* base = static_cast<uint8_t*>(result);
uint8_t* aligned_base = RoundUp(base, alignment); uint8_t* aligned_base = reinterpret_cast<uint8_t*>(
RoundUp(reinterpret_cast<uintptr_t>(base), alignment));
if (aligned_base != base) { if (aligned_base != base) {
DCHECK_LT(base, aligned_base); DCHECK_LT(base, aligned_base);
size_t prefix_size = static_cast<size_t>(aligned_base - base); size_t prefix_size = static_cast<size_t>(aligned_base - base);

View File

@ -15,7 +15,7 @@ class PosixTimezoneCache : public TimezoneCache {
public: public:
double DaylightSavingsOffset(double time_ms) override; double DaylightSavingsOffset(double time_ms) override;
void Clear() override {} void Clear() override {}
~PosixTimezoneCache() override {} ~PosixTimezoneCache() override = default;
protected: protected:
static const int msPerSecond = 1000; static const int msPerSecond = 1000;

View File

@ -822,7 +822,8 @@ void* OS::Allocate(void* address, size_t size, size_t alignment,
if (base == nullptr) return nullptr; // Can't allocate, we're OOM. if (base == nullptr) return nullptr; // Can't allocate, we're OOM.
// If address is suitably aligned, we're done. // If address is suitably aligned, we're done.
uint8_t* aligned_base = RoundUp(base, alignment); uint8_t* aligned_base = reinterpret_cast<uint8_t*>(
RoundUp(reinterpret_cast<uintptr_t>(base), alignment));
if (base == aligned_base) return reinterpret_cast<void*>(base); if (base == aligned_base) return reinterpret_cast<void*>(base);
// Otherwise, free it and try a larger allocation. // Otherwise, free it and try a larger allocation.
@ -843,7 +844,8 @@ void* OS::Allocate(void* address, size_t size, size_t alignment,
// Try to trim the allocation by freeing the padded allocation and then // Try to trim the allocation by freeing the padded allocation and then
// calling VirtualAlloc at the aligned base. // calling VirtualAlloc at the aligned base.
CHECK(Free(base, padded_size)); CHECK(Free(base, padded_size));
aligned_base = RoundUp(base, alignment); aligned_base = reinterpret_cast<uint8_t*>(
RoundUp(reinterpret_cast<uintptr_t>(base), alignment));
base = reinterpret_cast<uint8_t*>( base = reinterpret_cast<uint8_t*>(
VirtualAlloc(aligned_base, size, flags, protect)); VirtualAlloc(aligned_base, size, flags, protect));
// We might not get the reduced allocation due to a race. In that case, // We might not get the reduced allocation due to a race. In that case,

View File

@ -188,7 +188,7 @@ class V8_BASE_EXPORT OS {
class V8_BASE_EXPORT MemoryMappedFile { class V8_BASE_EXPORT MemoryMappedFile {
public: public:
virtual ~MemoryMappedFile() {} virtual ~MemoryMappedFile() = default;
virtual void* memory() const = 0; virtual void* memory() const = 0;
virtual size_t size() const = 0; virtual size_t size() const = 0;

View File

@ -91,7 +91,9 @@ void Semaphore::Signal() {
// This check may fail with <libc-2.21, which we use on the try bots, if the // This check may fail with <libc-2.21, which we use on the try bots, if the
// semaphore is destroyed while sem_post is still executed. A work around is // semaphore is destroyed while sem_post is still executed. A work around is
// to extend the lifetime of the semaphore. // to extend the lifetime of the semaphore.
CHECK_EQ(0, result); if (result != 0) {
FATAL("Error when signaling semaphore, errno: %d", errno);
}
} }

View File

@ -105,10 +105,7 @@ class V8_BASE_EXPORT TimeDelta final {
static TimeDelta FromTimespec(struct timespec ts); static TimeDelta FromTimespec(struct timespec ts);
struct timespec ToTimespec() const; struct timespec ToTimespec() const;
TimeDelta& operator=(const TimeDelta& other) { TimeDelta& operator=(const TimeDelta& other) = default;
delta_ = other.delta_;
return *this;
}
// Computations with other deltas. // Computations with other deltas.
TimeDelta operator+(const TimeDelta& other) const { TimeDelta operator+(const TimeDelta& other) const {

291
deps/v8/src/base/region-allocator.cc vendored Normal file
View File

@ -0,0 +1,291 @@
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/base/region-allocator.h"
#include "src/base/bits.h"
#include "src/base/macros.h"
namespace v8 {
namespace base {
// If |free_size| < |region_size| * |kMaxLoadFactorForRandomization| stop trying
// to randomize region allocation.
constexpr double kMaxLoadFactorForRandomization = 0.40;
// Max number of attempts to allocate page at random address.
constexpr int kMaxRandomizationAttempts = 3;
RegionAllocator::RegionAllocator(Address memory_region_begin,
size_t memory_region_size, size_t page_size)
: whole_region_(memory_region_begin, memory_region_size, false),
region_size_in_pages_(size() / page_size),
max_load_for_randomization_(
static_cast<size_t>(size() * kMaxLoadFactorForRandomization)),
free_size_(0),
page_size_(page_size) {
CHECK_LT(begin(), end());
CHECK(base::bits::IsPowerOfTwo(page_size_));
CHECK(IsAligned(size(), page_size_));
CHECK(IsAligned(begin(), page_size_));
// Initial region.
Region* region = new Region(whole_region_);
all_regions_.insert(region);
FreeListAddRegion(region);
}
RegionAllocator::~RegionAllocator() {
for (Region* region : all_regions_) {
delete region;
}
}
RegionAllocator::AllRegionsSet::iterator RegionAllocator::FindRegion(
Address address) {
if (!whole_region_.contains(address)) return all_regions_.end();
Region key(address, 0, false);
AllRegionsSet::iterator iter = all_regions_.upper_bound(&key);
// Regions in |all_regions_| are compared by end() values and key's end()
// points exactly to the address we are querying, so the upper_bound will
// find the region whose |end()| is greater than the requested address.
DCHECK_NE(iter, all_regions_.end());
DCHECK((*iter)->contains(address));
return iter;
}
void RegionAllocator::FreeListAddRegion(Region* region) {
free_size_ += region->size();
free_regions_.insert(region);
}
RegionAllocator::Region* RegionAllocator::FreeListFindRegion(size_t size) {
Region key(0, size, false);
auto iter = free_regions_.lower_bound(&key);
return iter == free_regions_.end() ? nullptr : *iter;
}
void RegionAllocator::FreeListRemoveRegion(Region* region) {
DCHECK(!region->is_used());
auto iter = free_regions_.find(region);
DCHECK_NE(iter, free_regions_.end());
DCHECK_EQ(region, *iter);
DCHECK_LE(region->size(), free_size_);
free_size_ -= region->size();
free_regions_.erase(iter);
}
RegionAllocator::Region* RegionAllocator::Split(Region* region,
size_t new_size) {
DCHECK(IsAligned(new_size, page_size_));
DCHECK_NE(new_size, 0);
DCHECK_GT(region->size(), new_size);
// Create new region and put it to the lists after the |region|.
bool used = region->is_used();
Region* new_region =
new Region(region->begin() + new_size, region->size() - new_size, used);
if (!used) {
// Remove region from the free list before updating it's size.
FreeListRemoveRegion(region);
}
region->set_size(new_size);
all_regions_.insert(new_region);
if (!used) {
FreeListAddRegion(region);
FreeListAddRegion(new_region);
}
return new_region;
}
void RegionAllocator::Merge(AllRegionsSet::iterator prev_iter,
AllRegionsSet::iterator next_iter) {
Region* prev = *prev_iter;
Region* next = *next_iter;
DCHECK_EQ(prev->end(), next->begin());
prev->set_size(prev->size() + next->size());
all_regions_.erase(next_iter); // prev_iter stays valid.
// The |next| region must already not be in the free list.
DCHECK_EQ(free_regions_.find(next), free_regions_.end());
delete next;
}
RegionAllocator::Address RegionAllocator::AllocateRegion(size_t size) {
DCHECK_NE(size, 0);
DCHECK(IsAligned(size, page_size_));
Region* region = FreeListFindRegion(size);
if (region == nullptr) return kAllocationFailure;
if (region->size() != size) {
Split(region, size);
}
DCHECK(IsAligned(region->begin(), page_size_));
DCHECK_EQ(region->size(), size);
// Mark region as used.
FreeListRemoveRegion(region);
region->set_is_used(true);
return region->begin();
}
RegionAllocator::Address RegionAllocator::AllocateRegion(
RandomNumberGenerator* rng, size_t size) {
if (free_size() >= max_load_for_randomization_) {
// There is enough free space for trying to randomize the address.
size_t random = 0;
for (int i = 0; i < kMaxRandomizationAttempts; i++) {
rng->NextBytes(&random, sizeof(random));
size_t random_offset = page_size_ * (random % region_size_in_pages_);
Address address = begin() + random_offset;
if (AllocateRegionAt(address, size)) {
return address;
}
}
// Fall back to free list allocation.
}
return AllocateRegion(size);
}
bool RegionAllocator::AllocateRegionAt(Address requested_address, size_t size) {
DCHECK(IsAligned(requested_address, page_size_));
DCHECK_NE(size, 0);
DCHECK(IsAligned(size, page_size_));
Address requested_end = requested_address + size;
DCHECK_LE(requested_end, end());
Region* region;
{
AllRegionsSet::iterator region_iter = FindRegion(requested_address);
if (region_iter == all_regions_.end()) {
return false;
}
region = *region_iter;
}
if (region->is_used() || region->end() < requested_end) {
return false;
}
// Found free region that includes the requested one.
if (region->begin() != requested_address) {
// Split the region at the |requested_address| boundary.
size_t new_size = requested_address - region->begin();
DCHECK(IsAligned(new_size, page_size_));
region = Split(region, new_size);
}
if (region->end() != requested_end) {
// Split the region at the |requested_end| boundary.
Split(region, size);
}
DCHECK_EQ(region->begin(), requested_address);
DCHECK_EQ(region->size(), size);
// Mark region as used.
FreeListRemoveRegion(region);
region->set_is_used(true);
return true;
}
size_t RegionAllocator::TrimRegion(Address address, size_t new_size) {
DCHECK(IsAligned(new_size, page_size_));
AllRegionsSet::iterator region_iter = FindRegion(address);
if (region_iter == all_regions_.end()) {
return 0;
}
Region* region = *region_iter;
if (region->begin() != address || !region->is_used()) {
return 0;
}
// The region must not be in the free list.
DCHECK_EQ(free_regions_.find(*region_iter), free_regions_.end());
if (new_size > 0) {
region = Split(region, new_size);
++region_iter;
}
size_t size = region->size();
region->set_is_used(false);
// Merge current region with the surrounding ones if they are free.
if (region->end() != whole_region_.end()) {
// There must be a range after the current one.
AllRegionsSet::iterator next_iter = std::next(region_iter);
DCHECK_NE(next_iter, all_regions_.end());
if (!(*next_iter)->is_used()) {
// |next| region object will be deleted during merge, remove it from
// the free list.
FreeListRemoveRegion(*next_iter);
Merge(region_iter, next_iter);
}
}
if (new_size == 0 && region->begin() != whole_region_.begin()) {
// There must be a range before the current one.
AllRegionsSet::iterator prev_iter = std::prev(region_iter);
DCHECK_NE(prev_iter, all_regions_.end());
if (!(*prev_iter)->is_used()) {
// |prev| region's size will change, we'll have to re-insert it into
// the proper place of the free list.
FreeListRemoveRegion(*prev_iter);
Merge(prev_iter, region_iter);
// |prev| region becomes the current region.
region_iter = prev_iter;
region = *region_iter;
}
}
FreeListAddRegion(region);
return size;
}
size_t RegionAllocator::CheckRegion(Address address) {
AllRegionsSet::iterator region_iter = FindRegion(address);
if (region_iter == all_regions_.end()) {
return 0;
}
Region* region = *region_iter;
if (region->begin() != address || !region->is_used()) {
return 0;
}
return region->size();
}
void RegionAllocator::Region::Print(std::ostream& os) const {
std::ios::fmtflags flags = os.flags(std::ios::hex | std::ios::showbase);
os << "[" << begin() << ", " << end() << "), size: " << size();
os << ", " << (is_used() ? "used" : "free");
os.flags(flags);
}
void RegionAllocator::Print(std::ostream& os) const {
std::ios::fmtflags flags = os.flags(std::ios::hex | std::ios::showbase);
os << "RegionAllocator: [" << begin() << ", " << end() << ")";
os << "\nsize: " << size();
os << "\nfree_size: " << free_size();
os << "\npage_size: " << page_size_;
os << "\nall regions: ";
for (const Region* region : all_regions_) {
os << "\n ";
region->Print(os);
}
os << "\nfree regions: ";
for (const Region* region : free_regions_) {
os << "\n ";
region->Print(os);
}
os << "\n";
os.flags(flags);
}
} // namespace base
} // namespace v8

164
deps/v8/src/base/region-allocator.h vendored Normal file
View File

@ -0,0 +1,164 @@
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_BASE_REGION_ALLOCATOR_H_
#define V8_BASE_REGION_ALLOCATOR_H_
#include <set>
#include "src/base/address-region.h"
#include "src/base/utils/random-number-generator.h"
#include "testing/gtest/include/gtest/gtest_prod.h" // nogncheck
namespace v8 {
namespace base {
// Helper class for managing used/free regions within [address, address+size)
// region. Minimum allocation unit is |page_size|. Requested allocation size
// is rounded up to |page_size|.
// The region allocation algorithm implements best-fit with coalescing strategy:
// it tries to find a smallest suitable free region upon allocation and tries
// to merge region with its neighbors upon freeing.
//
// This class does not perform any actual region reservation.
// Not thread-safe.
class V8_BASE_EXPORT RegionAllocator final {
public:
typedef uintptr_t Address;
static constexpr Address kAllocationFailure = static_cast<Address>(-1);
RegionAllocator(Address address, size_t size, size_t page_size);
~RegionAllocator();
// Allocates region of |size| (must be |page_size|-aligned). Returns
// the address of the region on success or kAllocationFailure.
Address AllocateRegion(size_t size);
// Same as above but tries to randomize the region displacement.
Address AllocateRegion(RandomNumberGenerator* rng, size_t size);
// Allocates region of |size| at |requested_address| if it's free. Both the
// address and the size must be |page_size|-aligned. On success returns
// true.
// This kind of allocation is supposed to be used during setup phase to mark
// certain regions as used or for randomizing regions displacement.
bool AllocateRegionAt(Address requested_address, size_t size);
// Frees region at given |address|, returns the size of the region.
// There must be a used region starting at given address otherwise nothing
// will be freed and 0 will be returned.
size_t FreeRegion(Address address) { return TrimRegion(address, 0); }
// Decreases size of the previously allocated region at |address|, returns
// freed size. |new_size| must be |page_size|-aligned and
// less than or equal to current region's size. Setting new size to zero
// frees the region.
size_t TrimRegion(Address address, size_t new_size);
// If there is a used region starting at given address returns its size
// otherwise 0.
size_t CheckRegion(Address address);
Address begin() const { return whole_region_.begin(); }
Address end() const { return whole_region_.end(); }
size_t size() const { return whole_region_.size(); }
bool contains(Address address) const {
return whole_region_.contains(address);
}
bool contains(Address address, size_t size) const {
return whole_region_.contains(address, size);
}
// Total size of not yet aquired regions.
size_t free_size() const { return free_size_; }
// The alignment of the allocated region's addresses and granularity of
// the allocated region's sizes.
size_t page_size() const { return page_size_; }
void Print(std::ostream& os) const;
private:
class Region : public AddressRegion {
public:
Region(Address address, size_t size, bool is_used)
: AddressRegion(address, size), is_used_(is_used) {}
bool is_used() const { return is_used_; }
void set_is_used(bool used) { is_used_ = used; }
void Print(std::ostream& os) const;
private:
bool is_used_;
};
// The whole region.
const Region whole_region_;
// Number of |page_size_| in the whole region.
const size_t region_size_in_pages_;
// If the free size is less than this value - stop trying to randomize the
// allocation addresses.
const size_t max_load_for_randomization_;
// Size of all free regions.
size_t free_size_;
// Minimum region size. Must be a pow of 2.
const size_t page_size_;
struct AddressEndOrder {
bool operator()(const Region* a, const Region* b) const {
return a->end() < b->end();
}
};
// All regions ordered by addresses.
typedef std::set<Region*, AddressEndOrder> AllRegionsSet;
AllRegionsSet all_regions_;
struct SizeAddressOrder {
bool operator()(const Region* a, const Region* b) const {
if (a->size() != b->size()) return a->size() < b->size();
return a->begin() < b->begin();
}
};
// Free regions ordered by sizes and addresses.
std::set<Region*, SizeAddressOrder> free_regions_;
// Returns region containing given address or nullptr.
AllRegionsSet::iterator FindRegion(Address address);
// Adds given region to the set of free regions.
void FreeListAddRegion(Region* region);
// Finds best-fit free region for given size.
Region* FreeListFindRegion(size_t size);
// Removes given region from the set of free regions.
void FreeListRemoveRegion(Region* region);
// Splits given |region| into two: one of |new_size| size and a new one
// having the rest. The new region is returned.
Region* Split(Region* region, size_t new_size);
// For two coalescing regions merges |next| to |prev| and deletes |next|.
void Merge(AllRegionsSet::iterator prev_iter,
AllRegionsSet::iterator next_iter);
FRIEND_TEST(RegionAllocatorTest, AllocateRegionRandom);
FRIEND_TEST(RegionAllocatorTest, Fragmentation);
FRIEND_TEST(RegionAllocatorTest, FindRegion);
FRIEND_TEST(RegionAllocatorTest, Contains);
DISALLOW_COPY_AND_ASSIGN(RegionAllocator);
};
} // namespace base
} // namespace v8
#endif // V8_BASE_REGION_ALLOCATOR_H_

View File

@ -49,7 +49,7 @@ class CheckedNumeric {
public: public:
typedef T type; typedef T type;
CheckedNumeric() {} CheckedNumeric() = default;
// Copy constructor. // Copy constructor.
template <typename Src> template <typename Src>

267
deps/v8/src/base/threaded-list.h vendored Normal file
View File

@ -0,0 +1,267 @@
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_BASE_THREADED_LIST_H_
#define V8_BASE_THREADED_LIST_H_
#include <iterator>
#include "src/base/compiler-specific.h"
#include "src/base/macros.h"
namespace v8 {
namespace base {
template <typename T>
struct ThreadedListTraits {
static T** next(T* t) { return t->next(); }
};
// Represents a linked list that threads through the nodes in the linked list.
// Entries in the list are pointers to nodes. By default nodes need to have a
// T** next() method that returns the location where the next value is stored.
// The default can be overwritten by providing a ThreadedTraits class.
template <typename T, typename BaseClass,
typename TLTraits = ThreadedListTraits<T>>
class ThreadedListBase final : public BaseClass {
public:
ThreadedListBase() : head_(nullptr), tail_(&head_) {}
void Add(T* v) {
DCHECK_NULL(*tail_);
DCHECK_NULL(*TLTraits::next(v));
*tail_ = v;
tail_ = TLTraits::next(v);
}
void AddFront(T* v) {
DCHECK_NULL(*TLTraits::next(v));
DCHECK_NOT_NULL(v);
T** const next = TLTraits::next(v);
*next = head_;
if (head_ == nullptr) tail_ = next;
head_ = v;
}
// Reinitializing the head to a new node, this costs O(n).
void ReinitializeHead(T* v) {
head_ = v;
T* current = v;
if (current != nullptr) { // Find tail
T* tmp;
while ((tmp = *TLTraits::next(current))) {
current = tmp;
}
tail_ = TLTraits::next(current);
} else {
tail_ = &head_;
}
}
void DropHead() {
DCHECK_NOT_NULL(head_);
T* old_head = head_;
head_ = *TLTraits::next(head_);
if (head_ == nullptr) tail_ = &head_;
*TLTraits::next(old_head) = nullptr;
}
void Append(ThreadedListBase&& list) {
*tail_ = list.head_;
tail_ = list.tail_;
list.Clear();
}
void Prepend(ThreadedListBase&& list) {
if (list.head_ == nullptr) return;
T* new_head = list.head_;
*list.tail_ = head_;
if (head_ == nullptr) {
tail_ = list.tail_;
}
head_ = new_head;
list.Clear();
}
void Clear() {
head_ = nullptr;
tail_ = &head_;
}
ThreadedListBase& operator=(ThreadedListBase&& other) V8_NOEXCEPT {
head_ = other.head_;
tail_ = other.head_ ? other.tail_ : &head_;
#ifdef DEBUG
other.Clear();
#endif
return *this;
}
ThreadedListBase(ThreadedListBase&& other) V8_NOEXCEPT
: head_(other.head_),
tail_(other.head_ ? other.tail_ : &head_) {
#ifdef DEBUG
other.Clear();
#endif
}
bool Remove(T* v) {
T* current = first();
if (current == v) {
DropHead();
return true;
}
while (current != nullptr) {
T* next = *TLTraits::next(current);
if (next == v) {
*TLTraits::next(current) = *TLTraits::next(next);
*TLTraits::next(next) = nullptr;
if (TLTraits::next(next) == tail_) {
tail_ = TLTraits::next(current);
}
return true;
}
current = next;
}
return false;
}
class Iterator final {
public:
using iterator_category = std::forward_iterator_tag;
using difference_type = std::ptrdiff_t;
using value_type = T*;
using reference = value_type;
using pointer = value_type*;
public:
Iterator& operator++() {
entry_ = TLTraits::next(*entry_);
return *this;
}
bool operator==(const Iterator& other) const {
return entry_ == other.entry_;
}
bool operator!=(const Iterator& other) const {
return entry_ != other.entry_;
}
T* operator*() { return *entry_; }
T* operator->() { return *entry_; }
Iterator& operator=(T* entry) {
T* next = *TLTraits::next(*entry_);
*TLTraits::next(entry) = next;
*entry_ = entry;
return *this;
}
private:
explicit Iterator(T** entry) : entry_(entry) {}
T** entry_;
friend class ThreadedListBase;
};
class ConstIterator final {
public:
using iterator_category = std::forward_iterator_tag;
using difference_type = std::ptrdiff_t;
using value_type = T*;
using reference = const value_type;
using pointer = const value_type*;
public:
ConstIterator& operator++() {
entry_ = TLTraits::next(*entry_);
return *this;
}
bool operator==(const ConstIterator& other) const {
return entry_ == other.entry_;
}
bool operator!=(const ConstIterator& other) const {
return entry_ != other.entry_;
}
const T* operator*() const { return *entry_; }
private:
explicit ConstIterator(T* const* entry) : entry_(entry) {}
T* const* entry_;
friend class ThreadedListBase;
};
Iterator begin() { return Iterator(&head_); }
Iterator end() { return Iterator(tail_); }
ConstIterator begin() const { return ConstIterator(&head_); }
ConstIterator end() const { return ConstIterator(tail_); }
// Rewinds the list's tail to the reset point, i.e., cutting of the rest of
// the list, including the reset_point.
void Rewind(Iterator reset_point) {
tail_ = reset_point.entry_;
*tail_ = nullptr;
}
// Moves the tail of the from_list, starting at the from_location, to the end
// of this list.
void MoveTail(ThreadedListBase* from_list, Iterator from_location) {
if (from_list->end() != from_location) {
DCHECK_NULL(*tail_);
*tail_ = *from_location;
tail_ = from_list->tail_;
from_list->Rewind(from_location);
}
}
bool is_empty() const { return head_ == nullptr; }
T* first() const { return head_; }
// Slow. For testing purposes.
int LengthForTest() {
int result = 0;
for (Iterator t = begin(); t != end(); ++t) ++result;
return result;
}
T* AtForTest(int i) {
Iterator t = begin();
while (i-- > 0) ++t;
return *t;
}
bool Verify() {
T* last = this->first();
if (last == nullptr) {
CHECK_EQ(&head_, tail_);
} else {
while (*TLTraits::next(last) != nullptr) {
last = *TLTraits::next(last);
}
CHECK_EQ(TLTraits::next(last), tail_);
}
return true;
}
private:
T* head_;
T** tail_;
DISALLOW_COPY_AND_ASSIGN(ThreadedListBase);
};
struct EmptyBase {};
template <typename T, typename TLTraits = ThreadedListTraits<T>>
using ThreadedList = ThreadedListBase<T, EmptyBase, TLTraits>;
} // namespace base
} // namespace v8
#endif // V8_BASE_THREADED_LIST_H_

Some files were not shown because too many files have changed in this diff Show More