Bug Summary

File:out/../deps/v8/src/base/bit-field.h
Warning:line 56, column 34
The result of the '<<' expression is undefined

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-unknown-linux-gnu -analyze -disable-free -clear-ast-before-backend -disable-llvm-verifier -discard-value-names -main-file-name code-stub-assembler.cc -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -mrelocation-model pic -pic-level 2 -pic-is-pie -mframe-pointer=all -relaxed-aliasing -fmath-errno -ffp-contract=on -fno-rounding-math -mconstructor-aliases -funwind-tables=2 -target-cpu x86-64 -tune-cpu generic -debugger-tuning=gdb -ffunction-sections -fdata-sections -fcoverage-compilation-dir=/home/maurizio/node-v18.6.0/out -resource-dir /usr/local/lib/clang/16.0.0 -D _GLIBCXX_USE_CXX11_ABI=1 -D NODE_OPENSSL_CONF_NAME=nodejs_conf -D NODE_OPENSSL_HAS_QUIC -D V8_GYP_BUILD -D V8_TYPED_ARRAY_MAX_SIZE_IN_HEAP=64 -D __STDC_FORMAT_MACROS -D OPENSSL_NO_PINSHARED -D OPENSSL_THREADS -D V8_TARGET_ARCH_X64 -D V8_HAVE_TARGET_OS -D V8_TARGET_OS_LINUX -D V8_EMBEDDER_STRING="-node.8" -D ENABLE_DISASSEMBLER -D V8_PROMISE_INTERNAL_FIELD_COUNT=1 -D V8_SHORT_BUILTIN_CALLS -D OBJECT_PRINT -D V8_INTL_SUPPORT -D V8_ATOMIC_OBJECT_FIELD_WRITES -D V8_ENABLE_LAZY_SOURCE_POSITIONS -D V8_USE_SIPHASH -D V8_SHARED_RO_HEAP -D V8_WIN64_UNWINDING_INFO -D V8_ENABLE_REGEXP_INTERPRETER_THREADED_DISPATCH -D V8_SNAPSHOT_COMPRESSION -D V8_ENABLE_WEBASSEMBLY -D V8_ENABLE_JAVASCRIPT_PROMISE_HOOKS -D V8_ALLOCATION_FOLDING -D V8_ALLOCATION_SITE_TRACKING -D V8_SCRIPTORMODULE_LEGACY_LIFETIME -D V8_ADVANCED_BIGINT_ALGORITHMS -D UCONFIG_NO_SERVICE=1 -D U_ENABLE_DYLOAD=0 -D U_STATIC_IMPLEMENTATION=1 -D U_HAVE_STD_STRING=1 -D UCONFIG_NO_BREAK_ITERATION=0 -I ../deps/v8 -I ../deps/v8/include -I /home/maurizio/node-v18.6.0/out/Release/obj/gen -I /home/maurizio/node-v18.6.0/out/Release/obj/gen/generate-bytecode-output-root -I ../deps/icu-small/source/i18n -I ../deps/icu-small/source/common -internal-isystem /usr/lib/gcc/x86_64-redhat-linux/8/../../../../include/c++/8 -internal-isystem /usr/lib/gcc/x86_64-redhat-linux/8/../../../../include/c++/8/x86_64-redhat-linux -internal-isystem /usr/lib/gcc/x86_64-redhat-linux/8/../../../../include/c++/8/backward -internal-isystem /usr/local/lib/clang/16.0.0/include -internal-isystem /usr/local/include -internal-isystem /usr/lib/gcc/x86_64-redhat-linux/8/../../../../x86_64-redhat-linux/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O3 -Wno-unused-parameter -Wno-return-type -std=gnu++17 -fdeprecated-macro -fdebug-compilation-dir=/home/maurizio/node-v18.6.0/out -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=html -faddrsig -D__GCC_HAVE_DWARF2_CFI_ASM=1 -o /tmp/scan-build-2022-08-22-142216-507842-1 -x c++ ../deps/v8/src/codegen/code-stub-assembler.cc

../deps/v8/src/codegen/code-stub-assembler.cc

1// Copyright 2016 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/codegen/code-stub-assembler.h"
6
7#include <functional>
8
9#include "include/v8-internal.h"
10#include "src/base/macros.h"
11#include "src/codegen/code-factory.h"
12#include "src/codegen/tnode.h"
13#include "src/common/globals.h"
14#include "src/execution/frames-inl.h"
15#include "src/execution/frames.h"
16#include "src/execution/protectors.h"
17#include "src/heap/heap-inl.h" // For MemoryChunk. TODO(jkummerow): Drop.
18#include "src/heap/memory-chunk.h"
19#include "src/logging/counters.h"
20#include "src/numbers/integer-literal-inl.h"
21#include "src/objects/api-callbacks.h"
22#include "src/objects/cell.h"
23#include "src/objects/descriptor-array.h"
24#include "src/objects/function-kind.h"
25#include "src/objects/heap-number.h"
26#include "src/objects/instance-type.h"
27#include "src/objects/js-generator.h"
28#include "src/objects/oddball.h"
29#include "src/objects/ordered-hash-table-inl.h"
30#include "src/objects/property-cell.h"
31#include "src/roots/roots.h"
32
33#if V8_ENABLE_WEBASSEMBLY1
34#include "src/wasm/wasm-objects.h"
35#endif // V8_ENABLE_WEBASSEMBLY
36
37namespace v8 {
38namespace internal {
39
40CodeStubAssembler::CodeStubAssembler(compiler::CodeAssemblerState* state)
41 : compiler::CodeAssembler(state),
42 TorqueGeneratedExportedMacrosAssembler(state) {
43 if (DEBUG_BOOLfalse && FLAG_csa_trap_on_node != nullptr) {
44 HandleBreakOnNode();
45 }
46}
47
48void CodeStubAssembler::HandleBreakOnNode() {
49 // FLAG_csa_trap_on_node should be in a form "STUB,NODE" where STUB is a
50 // string specifying the name of a stub and NODE is number specifying node id.
51 const char* name = state()->name();
52 size_t name_length = strlen(name);
53 if (strncmp(FLAG_csa_trap_on_node, name, name_length) != 0) {
54 // Different name.
55 return;
56 }
57 size_t option_length = strlen(FLAG_csa_trap_on_node);
58 if (option_length < name_length + 2 ||
59 FLAG_csa_trap_on_node[name_length] != ',') {
60 // Option is too short.
61 return;
62 }
63 const char* start = &FLAG_csa_trap_on_node[name_length + 1];
64 char* end;
65 int node_id = static_cast<int>(strtol(start, &end, 10));
66 if (start == end) {
67 // Bad node id.
68 return;
69 }
70 BreakOnNode(node_id);
71}
72
73void CodeStubAssembler::Dcheck(const BranchGenerator& branch,
74 const char* message, const char* file, int line,
75 std::initializer_list<ExtraNode> extra_nodes) {
76#if defined(DEBUG)
77 if (FLAG_debug_code) {
78 Check(branch, message, file, line, extra_nodes);
79 }
80#endif
81}
82
83void CodeStubAssembler::Dcheck(const NodeGenerator<BoolT>& condition_body,
84 const char* message, const char* file, int line,
85 std::initializer_list<ExtraNode> extra_nodes) {
86#if defined(DEBUG)
87 if (FLAG_debug_code) {
88 Check(condition_body, message, file, line, extra_nodes);
89 }
90#endif
91}
92
93void CodeStubAssembler::Dcheck(TNode<Word32T> condition_node,
94 const char* message, const char* file, int line,
95 std::initializer_list<ExtraNode> extra_nodes) {
96#if defined(DEBUG)
97 if (FLAG_debug_code) {
98 Check(condition_node, message, file, line, extra_nodes);
99 }
100#endif
101}
102
103void CodeStubAssembler::Check(const BranchGenerator& branch,
104 const char* message, const char* file, int line,
105 std::initializer_list<ExtraNode> extra_nodes) {
106 Label ok(this);
107 Label not_ok(this, Label::kDeferred);
108 if (message != nullptr) {
109 Comment("[ Assert: ", message);
110 } else {
111 Comment("[ Assert");
112 }
113 branch(&ok, &not_ok);
114
115 BIND(&not_ok)Bind(&not_ok);
116 std::vector<FileAndLine> file_and_line;
117 if (file != nullptr) {
118 file_and_line.push_back({file, line});
119 }
120 FailAssert(message, file_and_line, extra_nodes);
121
122 BIND(&ok)Bind(&ok);
123 Comment("] Assert");
124}
125
126void CodeStubAssembler::Check(const NodeGenerator<BoolT>& condition_body,
127 const char* message, const char* file, int line,
128 std::initializer_list<ExtraNode> extra_nodes) {
129 BranchGenerator branch = [=](Label* ok, Label* not_ok) {
130 TNode<BoolT> condition = condition_body();
131 Branch(condition, ok, not_ok);
132 };
133
134 Check(branch, message, file, line, extra_nodes);
135}
136
137void CodeStubAssembler::Check(TNode<Word32T> condition_node,
138 const char* message, const char* file, int line,
139 std::initializer_list<ExtraNode> extra_nodes) {
140 BranchGenerator branch = [=](Label* ok, Label* not_ok) {
141 Branch(condition_node, ok, not_ok);
142 };
143
144 Check(branch, message, file, line, extra_nodes);
145}
146
147void CodeStubAssembler::IncrementCallCount(
148 TNode<FeedbackVector> feedback_vector, TNode<UintPtrT> slot_id) {
149 Comment("increment call count");
150 TNode<Smi> call_count =
151 CAST(LoadFeedbackVectorSlot(feedback_vector, slot_id, kTaggedSize))Cast(LoadFeedbackVectorSlot(feedback_vector, slot_id, kTaggedSize
))
;
152 // The lowest {FeedbackNexus::CallCountField::kShift} bits of the call
153 // count are used as flags. To increment the call count by 1 we hence
154 // have to increment by 1 << {FeedbackNexus::CallCountField::kShift}.
155 TNode<Smi> new_count = SmiAdd(
156 call_count, SmiConstant(1 << FeedbackNexus::CallCountField::kShift));
157 // Count is Smi, so we don't need a write barrier.
158 StoreFeedbackVectorSlot(feedback_vector, slot_id, new_count,
159 SKIP_WRITE_BARRIER, kTaggedSize);
160}
161
162void CodeStubAssembler::FastCheck(TNode<BoolT> condition) {
163 Label ok(this), not_ok(this, Label::kDeferred);
164 Branch(condition, &ok, &not_ok);
165 BIND(&not_ok)Bind(&not_ok);
166 Unreachable();
167 BIND(&ok)Bind(&ok);
168}
169
170void CodeStubAssembler::FailAssert(
171 const char* message, const std::vector<FileAndLine>& files_and_lines,
172 std::initializer_list<ExtraNode> extra_nodes) {
173 DCHECK_NOT_NULL(message)((void) 0);
174 base::EmbeddedVector<char, 1024> chars;
175 std::stringstream stream;
176 for (auto it = files_and_lines.rbegin(); it != files_and_lines.rend(); ++it) {
177 if (it->first != nullptr) {
178 stream << " [" << it->first << ":" << it->second << "]";
179#ifndef DEBUG
180 // To limit the size of these strings in release builds, we include only
181 // the innermost macro's file name and line number.
182 break;
183#endif
184 }
185 }
186 std::string files_and_lines_text = stream.str();
187 if (files_and_lines_text.size() != 0) {
188 SNPrintF(chars, "%s%s", message, files_and_lines_text.c_str());
189 message = chars.begin();
190 }
191 TNode<String> message_node = StringConstant(message);
192
193#ifdef DEBUG
194 // Only print the extra nodes in debug builds.
195 for (auto& node : extra_nodes) {
196 CallRuntime(Runtime::kPrintWithNameForAssert, SmiConstant(0),
197 StringConstant(node.second), node.first);
198 }
199#endif
200
201 AbortCSADcheck(message_node);
202 Unreachable();
203}
204
205TNode<Int32T> CodeStubAssembler::SelectInt32Constant(TNode<BoolT> condition,
206 int true_value,
207 int false_value) {
208 return SelectConstant<Int32T>(condition, Int32Constant(true_value),
209 Int32Constant(false_value));
210}
211
212TNode<IntPtrT> CodeStubAssembler::SelectIntPtrConstant(TNode<BoolT> condition,
213 int true_value,
214 int false_value) {
215 return SelectConstant<IntPtrT>(condition, IntPtrConstant(true_value),
216 IntPtrConstant(false_value));
217}
218
219TNode<Oddball> CodeStubAssembler::SelectBooleanConstant(
220 TNode<BoolT> condition) {
221 return SelectConstant<Oddball>(condition, TrueConstant(), FalseConstant());
222}
223
224TNode<Smi> CodeStubAssembler::SelectSmiConstant(TNode<BoolT> condition,
225 Smi true_value,
226 Smi false_value) {
227 return SelectConstant<Smi>(condition, SmiConstant(true_value),
228 SmiConstant(false_value));
229}
230
231TNode<Smi> CodeStubAssembler::NoContextConstant() {
232 return SmiConstant(Context::kNoContext);
233}
234
235#define HEAP_CONSTANT_ACCESSOR(rootIndexName, rootAccessorName, name) \
236 TNode<std::remove_pointer<std::remove_reference<decltype( \
237 std::declval<Heap>().rootAccessorName())>::type>::type> \
238 CodeStubAssembler::name##Constant() { \
239 return UncheckedCast<std::remove_pointer<std::remove_reference<decltype( \
240 std::declval<Heap>().rootAccessorName())>::type>::type>( \
241 LoadRoot(RootIndex::k##rootIndexName)); \
242 }
243HEAP_MUTABLE_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_ACCESSOR)HEAP_CONSTANT_ACCESSOR(ArrayIteratorProtector, array_iterator_protector
, ArrayIteratorProtector) HEAP_CONSTANT_ACCESSOR(ArraySpeciesProtector
, array_species_protector, ArraySpeciesProtector) HEAP_CONSTANT_ACCESSOR
(AsyncFunctionAwaitRejectSharedFun, async_function_await_reject_shared_fun
, AsyncFunctionAwaitRejectSharedFun) HEAP_CONSTANT_ACCESSOR(AsyncFunctionAwaitResolveSharedFun
, async_function_await_resolve_shared_fun, AsyncFunctionAwaitResolveSharedFun
) HEAP_CONSTANT_ACCESSOR(AsyncGeneratorAwaitRejectSharedFun, async_generator_await_reject_shared_fun
, AsyncGeneratorAwaitRejectSharedFun) HEAP_CONSTANT_ACCESSOR(
AsyncGeneratorAwaitResolveSharedFun, async_generator_await_resolve_shared_fun
, AsyncGeneratorAwaitResolveSharedFun) HEAP_CONSTANT_ACCESSOR
(AsyncGeneratorReturnClosedRejectSharedFun, async_generator_return_closed_reject_shared_fun
, AsyncGeneratorReturnClosedRejectSharedFun) HEAP_CONSTANT_ACCESSOR
(AsyncGeneratorReturnClosedResolveSharedFun, async_generator_return_closed_resolve_shared_fun
, AsyncGeneratorReturnClosedResolveSharedFun) HEAP_CONSTANT_ACCESSOR
(AsyncGeneratorReturnResolveSharedFun, async_generator_return_resolve_shared_fun
, AsyncGeneratorReturnResolveSharedFun) HEAP_CONSTANT_ACCESSOR
(AsyncGeneratorYieldResolveSharedFun, async_generator_yield_resolve_shared_fun
, AsyncGeneratorYieldResolveSharedFun) HEAP_CONSTANT_ACCESSOR
(AsyncIteratorValueUnwrapSharedFun, async_iterator_value_unwrap_shared_fun
, AsyncIteratorValueUnwrapSharedFun) HEAP_CONSTANT_ACCESSOR(IsConcatSpreadableProtector
, is_concat_spreadable_protector, IsConcatSpreadableProtector
) HEAP_CONSTANT_ACCESSOR(MapIteratorProtector, map_iterator_protector
, MapIteratorProtector) HEAP_CONSTANT_ACCESSOR(NoElementsProtector
, no_elements_protector, NoElementsProtector) HEAP_CONSTANT_ACCESSOR
(MegaDOMProtector, mega_dom_protector, MegaDOMProtector) HEAP_CONSTANT_ACCESSOR
(NumberStringCache, number_string_cache, NumberStringCache) HEAP_CONSTANT_ACCESSOR
(PromiseAllResolveElementSharedFun, promise_all_resolve_element_shared_fun
, PromiseAllResolveElementSharedFun) HEAP_CONSTANT_ACCESSOR(PromiseAllSettledRejectElementSharedFun
, promise_all_settled_reject_element_shared_fun, PromiseAllSettledRejectElementSharedFun
) HEAP_CONSTANT_ACCESSOR(PromiseAllSettledResolveElementSharedFun
, promise_all_settled_resolve_element_shared_fun, PromiseAllSettledResolveElementSharedFun
) HEAP_CONSTANT_ACCESSOR(PromiseAnyRejectElementSharedFun, promise_any_reject_element_shared_fun
, PromiseAnyRejectElementSharedFun) HEAP_CONSTANT_ACCESSOR(PromiseCapabilityDefaultRejectSharedFun
, promise_capability_default_reject_shared_fun, PromiseCapabilityDefaultRejectSharedFun
) HEAP_CONSTANT_ACCESSOR(PromiseCapabilityDefaultResolveSharedFun
, promise_capability_default_resolve_shared_fun, PromiseCapabilityDefaultResolveSharedFun
) HEAP_CONSTANT_ACCESSOR(PromiseCatchFinallySharedFun, promise_catch_finally_shared_fun
, PromiseCatchFinallySharedFun) HEAP_CONSTANT_ACCESSOR(PromiseGetCapabilitiesExecutorSharedFun
, promise_get_capabilities_executor_shared_fun, PromiseGetCapabilitiesExecutorSharedFun
) HEAP_CONSTANT_ACCESSOR(PromiseResolveProtector, promise_resolve_protector
, PromiseResolveProtector) HEAP_CONSTANT_ACCESSOR(PromiseSpeciesProtector
, promise_species_protector, PromiseSpeciesProtector) HEAP_CONSTANT_ACCESSOR
(PromiseThenFinallySharedFun, promise_then_finally_shared_fun
, PromiseThenFinallySharedFun) HEAP_CONSTANT_ACCESSOR(PromiseThenProtector
, promise_then_protector, PromiseThenProtector) HEAP_CONSTANT_ACCESSOR
(PromiseThrowerFinallySharedFun, promise_thrower_finally_shared_fun
, PromiseThrowerFinallySharedFun) HEAP_CONSTANT_ACCESSOR(PromiseValueThunkFinallySharedFun
, promise_value_thunk_finally_shared_fun, PromiseValueThunkFinallySharedFun
) HEAP_CONSTANT_ACCESSOR(ProxyRevokeSharedFun, proxy_revoke_shared_fun
, ProxyRevokeSharedFun) HEAP_CONSTANT_ACCESSOR(RegExpSpeciesProtector
, regexp_species_protector, RegExpSpeciesProtector) HEAP_CONSTANT_ACCESSOR
(SetIteratorProtector, set_iterator_protector, SetIteratorProtector
) HEAP_CONSTANT_ACCESSOR(SingleCharacterStringCache, single_character_string_cache
, SingleCharacterStringCache) HEAP_CONSTANT_ACCESSOR(StringIteratorProtector
, string_iterator_protector, StringIteratorProtector) HEAP_CONSTANT_ACCESSOR
(TypedArraySpeciesProtector, typed_array_species_protector, TypedArraySpeciesProtector
)
244#undef HEAP_CONSTANT_ACCESSOR
245
246#define HEAP_CONSTANT_ACCESSOR(rootIndexName, rootAccessorName, name) \
247 TNode<std::remove_pointer<std::remove_reference<decltype( \
248 std::declval<ReadOnlyRoots>().rootAccessorName())>::type>::type> \
249 CodeStubAssembler::name##Constant() { \
250 return UncheckedCast<std::remove_pointer<std::remove_reference<decltype( \
251 std::declval<ReadOnlyRoots>().rootAccessorName())>::type>::type>( \
252 LoadRoot(RootIndex::k##rootIndexName)); \
253 }
254HEAP_IMMUTABLE_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_ACCESSOR)HEAP_CONSTANT_ACCESSOR(AllocationSiteWithoutWeakNextMap, allocation_site_without_weaknext_map
, AllocationSiteWithoutWeakNextMap) HEAP_CONSTANT_ACCESSOR(AllocationSiteWithWeakNextMap
, allocation_site_map, AllocationSiteMap) HEAP_CONSTANT_ACCESSOR
(arguments_to_string, arguments_to_string, ArgumentsToString)
HEAP_CONSTANT_ACCESSOR(Array_string, Array_string, ArrayString
) HEAP_CONSTANT_ACCESSOR(array_to_string, array_to_string, ArrayToString
) HEAP_CONSTANT_ACCESSOR(BooleanMap, boolean_map, BooleanMap)
HEAP_CONSTANT_ACCESSOR(boolean_to_string, boolean_to_string,
BooleanToString) HEAP_CONSTANT_ACCESSOR(ConsOneByteStringMap
, cons_one_byte_string_map, ConsOneByteStringMap) HEAP_CONSTANT_ACCESSOR
(ConsStringMap, cons_string_map, ConsStringMap) HEAP_CONSTANT_ACCESSOR
(constructor_string, constructor_string, ConstructorString) HEAP_CONSTANT_ACCESSOR
(date_to_string, date_to_string, DateToString) HEAP_CONSTANT_ACCESSOR
(default_string, default_string, DefaultString) HEAP_CONSTANT_ACCESSOR
(EmptyByteArray, empty_byte_array, EmptyByteArray) HEAP_CONSTANT_ACCESSOR
(EmptyFixedArray, empty_fixed_array, EmptyFixedArray) HEAP_CONSTANT_ACCESSOR
(EmptyScopeInfo, empty_scope_info, EmptyScopeInfo) HEAP_CONSTANT_ACCESSOR
(EmptyPropertyDictionary, empty_property_dictionary, EmptyPropertyDictionary
) HEAP_CONSTANT_ACCESSOR(EmptyOrderedPropertyDictionary, empty_ordered_property_dictionary
, EmptyOrderedPropertyDictionary) HEAP_CONSTANT_ACCESSOR(EmptySwissPropertyDictionary
, empty_swiss_property_dictionary, EmptySwissPropertyDictionary
) HEAP_CONSTANT_ACCESSOR(EmptySlowElementDictionary, empty_slow_element_dictionary
, EmptySlowElementDictionary) HEAP_CONSTANT_ACCESSOR(empty_string
, empty_string, EmptyString) HEAP_CONSTANT_ACCESSOR(error_to_string
, error_to_string, ErrorToString) HEAP_CONSTANT_ACCESSOR(errors_string
, errors_string, ErrorsString) HEAP_CONSTANT_ACCESSOR(FalseValue
, false_value, False) HEAP_CONSTANT_ACCESSOR(FixedArrayMap, fixed_array_map
, FixedArrayMap) HEAP_CONSTANT_ACCESSOR(FixedCOWArrayMap, fixed_cow_array_map
, FixedCOWArrayMap) HEAP_CONSTANT_ACCESSOR(Function_string, function_string
, FunctionString) HEAP_CONSTANT_ACCESSOR(function_to_string, function_to_string
, FunctionToString) HEAP_CONSTANT_ACCESSOR(GlobalPropertyCellMap
, global_property_cell_map, PropertyCellMap) HEAP_CONSTANT_ACCESSOR
(has_instance_symbol, has_instance_symbol, HasInstanceSymbol)
HEAP_CONSTANT_ACCESSOR(Infinity_string, Infinity_string, InfinityString
) HEAP_CONSTANT_ACCESSOR(is_concat_spreadable_symbol, is_concat_spreadable_symbol
, IsConcatSpreadableSymbol) HEAP_CONSTANT_ACCESSOR(iterator_symbol
, iterator_symbol, IteratorSymbol) HEAP_CONSTANT_ACCESSOR(length_string
, length_string, LengthString) HEAP_CONSTANT_ACCESSOR(ManyClosuresCellMap
, many_closures_cell_map, ManyClosuresCellMap) HEAP_CONSTANT_ACCESSOR
(match_symbol, match_symbol, MatchSymbol) HEAP_CONSTANT_ACCESSOR
(megamorphic_symbol, megamorphic_symbol, MegamorphicSymbol) HEAP_CONSTANT_ACCESSOR
(mega_dom_symbol, mega_dom_symbol, MegaDOMSymbol) HEAP_CONSTANT_ACCESSOR
(message_string, message_string, MessageString) HEAP_CONSTANT_ACCESSOR
(minus_Infinity_string, minus_Infinity_string, MinusInfinityString
) HEAP_CONSTANT_ACCESSOR(MinusZeroValue, minus_zero_value, MinusZero
) HEAP_CONSTANT_ACCESSOR(name_string, name_string, NameString
) HEAP_CONSTANT_ACCESSOR(NanValue, nan_value, Nan) HEAP_CONSTANT_ACCESSOR
(NaN_string, NaN_string, NaNString) HEAP_CONSTANT_ACCESSOR(next_string
, next_string, NextString) HEAP_CONSTANT_ACCESSOR(NoClosuresCellMap
, no_closures_cell_map, NoClosuresCellMap) HEAP_CONSTANT_ACCESSOR
(null_to_string, null_to_string, NullToString) HEAP_CONSTANT_ACCESSOR
(NullValue, null_value, Null) HEAP_CONSTANT_ACCESSOR(number_string
, number_string, NumberString) HEAP_CONSTANT_ACCESSOR(number_to_string
, number_to_string, NumberToString) HEAP_CONSTANT_ACCESSOR(Object_string
, Object_string, ObjectString) HEAP_CONSTANT_ACCESSOR(object_to_string
, object_to_string, ObjectToString) HEAP_CONSTANT_ACCESSOR(OneByteStringMap
, one_byte_string_map, OneByteStringMap) HEAP_CONSTANT_ACCESSOR
(OneClosureCellMap, one_closure_cell_map, OneClosureCellMap) HEAP_CONSTANT_ACCESSOR
(OnePointerFillerMap, one_pointer_filler_map, OnePointerFillerMap
) HEAP_CONSTANT_ACCESSOR(PromiseCapabilityMap, promise_capability_map
, PromiseCapabilityMap) HEAP_CONSTANT_ACCESSOR(promise_forwarding_handler_symbol
, promise_forwarding_handler_symbol, PromiseForwardingHandlerSymbol
) HEAP_CONSTANT_ACCESSOR(PromiseFulfillReactionJobTaskMap, promise_fulfill_reaction_job_task_map
, PromiseFulfillReactionJobTaskMap) HEAP_CONSTANT_ACCESSOR(promise_handled_by_symbol
, promise_handled_by_symbol, PromiseHandledBySymbol) HEAP_CONSTANT_ACCESSOR
(PromiseReactionMap, promise_reaction_map, PromiseReactionMap
) HEAP_CONSTANT_ACCESSOR(PromiseRejectReactionJobTaskMap, promise_reject_reaction_job_task_map
, PromiseRejectReactionJobTaskMap) HEAP_CONSTANT_ACCESSOR(PromiseResolveThenableJobTaskMap
, promise_resolve_thenable_job_task_map, PromiseResolveThenableJobTaskMap
) HEAP_CONSTANT_ACCESSOR(prototype_string, prototype_string, PrototypeString
) HEAP_CONSTANT_ACCESSOR(replace_symbol, replace_symbol, ReplaceSymbol
) HEAP_CONSTANT_ACCESSOR(regexp_to_string, regexp_to_string, RegexpToString
) HEAP_CONSTANT_ACCESSOR(resolve_string, resolve_string, ResolveString
) HEAP_CONSTANT_ACCESSOR(return_string, return_string, ReturnString
) HEAP_CONSTANT_ACCESSOR(search_symbol, search_symbol, SearchSymbol
) HEAP_CONSTANT_ACCESSOR(species_symbol, species_symbol, SpeciesSymbol
) HEAP_CONSTANT_ACCESSOR(StaleRegister, stale_register, StaleRegister
) HEAP_CONSTANT_ACCESSOR(StoreHandler0Map, store_handler0_map
, StoreHandler0Map) HEAP_CONSTANT_ACCESSOR(string_string, string_string
, StringString) HEAP_CONSTANT_ACCESSOR(string_to_string, string_to_string
, StringToString) HEAP_CONSTANT_ACCESSOR(StringMap, string_map
, StringMap) HEAP_CONSTANT_ACCESSOR(TheHoleValue, the_hole_value
, TheHole) HEAP_CONSTANT_ACCESSOR(then_string, then_string, ThenString
) HEAP_CONSTANT_ACCESSOR(toString_string, toString_string, ToStringString
) HEAP_CONSTANT_ACCESSOR(to_primitive_symbol, to_primitive_symbol
, ToPrimitiveSymbol) HEAP_CONSTANT_ACCESSOR(to_string_tag_symbol
, to_string_tag_symbol, ToStringTagSymbol) HEAP_CONSTANT_ACCESSOR
(TrueValue, true_value, True) HEAP_CONSTANT_ACCESSOR(undefined_to_string
, undefined_to_string, UndefinedToString) HEAP_CONSTANT_ACCESSOR
(UndefinedValue, undefined_value, Undefined) HEAP_CONSTANT_ACCESSOR
(uninitialized_symbol, uninitialized_symbol, UninitializedSymbol
) HEAP_CONSTANT_ACCESSOR(valueOf_string, valueOf_string, ValueOfString
) HEAP_CONSTANT_ACCESSOR(wasm_wrapped_object_symbol, wasm_wrapped_object_symbol
, WasmWrappedObjectSymbol) HEAP_CONSTANT_ACCESSOR(zero_string
, zero_string, ZeroString) HEAP_CONSTANT_ACCESSOR(AccessorInfoMap
, accessor_info_map, AccessorInfoMap) HEAP_CONSTANT_ACCESSOR(
AccessorPairMap, accessor_pair_map, AccessorPairMap) HEAP_CONSTANT_ACCESSOR
(AllocationMementoMap, allocation_memento_map, AllocationMementoMap
) HEAP_CONSTANT_ACCESSOR(ArrayBoilerplateDescriptionMap, array_boilerplate_description_map
, ArrayBoilerplateDescriptionMap) HEAP_CONSTANT_ACCESSOR(BreakPointMap
, break_point_map, BreakPointMap) HEAP_CONSTANT_ACCESSOR(BreakPointInfoMap
, break_point_info_map, BreakPointInfoMap) HEAP_CONSTANT_ACCESSOR
(BytecodeArrayMap, bytecode_array_map, BytecodeArrayMap) HEAP_CONSTANT_ACCESSOR
(CachedTemplateObjectMap, cached_template_object_map, CachedTemplateObjectMap
) HEAP_CONSTANT_ACCESSOR(CellMap, cell_map, CellMap) HEAP_CONSTANT_ACCESSOR
(WeakCellMap, weak_cell_map, WeakCellMap) HEAP_CONSTANT_ACCESSOR
(CodeMap, code_map, CodeMap) HEAP_CONSTANT_ACCESSOR(CodeDataContainerMap
, code_data_container_map, CodeDataContainerMap) HEAP_CONSTANT_ACCESSOR
(CoverageInfoMap, coverage_info_map, CoverageInfoMap) HEAP_CONSTANT_ACCESSOR
(DebugInfoMap, debug_info_map, DebugInfoMap) HEAP_CONSTANT_ACCESSOR
(FreeSpaceMap, free_space_map, FreeSpaceMap) HEAP_CONSTANT_ACCESSOR
(FeedbackVectorMap, feedback_vector_map, FeedbackVectorMap) HEAP_CONSTANT_ACCESSOR
(FixedDoubleArrayMap, fixed_double_array_map, FixedDoubleArrayMap
) HEAP_CONSTANT_ACCESSOR(FunctionTemplateInfoMap, function_template_info_map
, FunctionTemplateInfoMap) HEAP_CONSTANT_ACCESSOR(MegaDomHandlerMap
, mega_dom_handler_map, MegaDomHandlerMap) HEAP_CONSTANT_ACCESSOR
(MetaMap, meta_map, MapMap) HEAP_CONSTANT_ACCESSOR(PreparseDataMap
, preparse_data_map, PreparseDataMap) HEAP_CONSTANT_ACCESSOR(
PropertyArrayMap, property_array_map, PropertyArrayMap) HEAP_CONSTANT_ACCESSOR
(PrototypeInfoMap, prototype_info_map, PrototypeInfoMap) HEAP_CONSTANT_ACCESSOR
(SharedFunctionInfoMap, shared_function_info_map, SharedFunctionInfoMap
) HEAP_CONSTANT_ACCESSOR(SmallOrderedHashSetMap, small_ordered_hash_set_map
, SmallOrderedHashSetMap) HEAP_CONSTANT_ACCESSOR(SmallOrderedHashMapMap
, small_ordered_hash_map_map, SmallOrderedHashMapMap) HEAP_CONSTANT_ACCESSOR
(SmallOrderedNameDictionaryMap, small_ordered_name_dictionary_map
, SmallOrderedNameDictionaryMap) HEAP_CONSTANT_ACCESSOR(SwissNameDictionaryMap
, swiss_name_dictionary_map, SwissNameDictionaryMap) HEAP_CONSTANT_ACCESSOR
(SymbolMap, symbol_map, SymbolMap) HEAP_CONSTANT_ACCESSOR(TransitionArrayMap
, transition_array_map, TransitionArrayMap) HEAP_CONSTANT_ACCESSOR
(Tuple2Map, tuple2_map, Tuple2Map) HEAP_CONSTANT_ACCESSOR(HeapNumberMap
, heap_number_map, HeapNumberMap) HEAP_CONSTANT_ACCESSOR(WeakFixedArrayMap
, weak_fixed_array_map, WeakFixedArrayMap) HEAP_CONSTANT_ACCESSOR
(SloppyArgumentsElementsMap, sloppy_arguments_elements_map, SloppyArgumentsElementsMap
) HEAP_CONSTANT_ACCESSOR(DescriptorArrayMap, descriptor_array_map
, DescriptorArrayMap) HEAP_CONSTANT_ACCESSOR(StrongDescriptorArrayMap
, strong_descriptor_array_map, StrongDescriptorArrayMap) HEAP_CONSTANT_ACCESSOR
(UncompiledDataWithoutPreparseDataMap, uncompiled_data_without_preparse_data_map
, UncompiledDataWithoutPreparseDataMap) HEAP_CONSTANT_ACCESSOR
(UncompiledDataWithPreparseDataMap, uncompiled_data_with_preparse_data_map
, UncompiledDataWithPreparseDataMap) HEAP_CONSTANT_ACCESSOR(UncompiledDataWithoutPreparseDataWithJobMap
, uncompiled_data_without_preparse_data_with_job_map, UncompiledDataWithoutPreparseDataWithJobMap
) HEAP_CONSTANT_ACCESSOR(UncompiledDataWithPreparseDataAndJobMap
, uncompiled_data_with_preparse_data_and_job_map, UncompiledDataWithPreparseDataAndJobMap
) HEAP_CONSTANT_ACCESSOR(OnHeapBasicBlockProfilerDataMap, on_heap_basic_block_profiler_data_map
, OnHeapBasicBlockProfilerDataMap) HEAP_CONSTANT_ACCESSOR(TurbofanBitsetTypeMap
, turbofan_bitset_type_map, TurbofanBitsetTypeMap) HEAP_CONSTANT_ACCESSOR
(TurbofanUnionTypeMap, turbofan_union_type_map, TurbofanUnionTypeMap
) HEAP_CONSTANT_ACCESSOR(TurbofanRangeTypeMap, turbofan_range_type_map
, TurbofanRangeTypeMap) HEAP_CONSTANT_ACCESSOR(TurbofanHeapConstantTypeMap
, turbofan_heap_constant_type_map, TurbofanHeapConstantTypeMap
) HEAP_CONSTANT_ACCESSOR(TurbofanOtherNumberConstantTypeMap, turbofan_other_number_constant_type_map
, TurbofanOtherNumberConstantTypeMap) HEAP_CONSTANT_ACCESSOR(
InternalClassMap, internal_class_map, InternalClassMap) HEAP_CONSTANT_ACCESSOR
(SmiPairMap, smi_pair_map, SmiPairMap) HEAP_CONSTANT_ACCESSOR
(SmiBoxMap, smi_box_map, SmiBoxMap) HEAP_CONSTANT_ACCESSOR(ExportedSubClassBaseMap
, exported_sub_class_base_map, ExportedSubClassBaseMap) HEAP_CONSTANT_ACCESSOR
(ExportedSubClassMap, exported_sub_class_map, ExportedSubClassMap
) HEAP_CONSTANT_ACCESSOR(AbstractInternalClassSubclass1Map, abstract_internal_class_subclass1_map
, AbstractInternalClassSubclass1Map) HEAP_CONSTANT_ACCESSOR(AbstractInternalClassSubclass2Map
, abstract_internal_class_subclass2_map, AbstractInternalClassSubclass2Map
) HEAP_CONSTANT_ACCESSOR(InternalClassWithSmiElementsMap, internal_class_with_smi_elements_map
, InternalClassWithSmiElementsMap) HEAP_CONSTANT_ACCESSOR(InternalClassWithStructElementsMap
, internal_class_with_struct_elements_map, InternalClassWithStructElementsMap
) HEAP_CONSTANT_ACCESSOR(ExportedSubClass2Map, exported_sub_class2_map
, ExportedSubClass2Map) HEAP_CONSTANT_ACCESSOR(SortStateMap, sort_state_map
, SortStateMap)
255#undef HEAP_CONSTANT_ACCESSOR
256
257#define HEAP_CONSTANT_TEST(rootIndexName, rootAccessorName, name) \
258 TNode<BoolT> CodeStubAssembler::Is##name(TNode<Object> value) { \
259 return TaggedEqual(value, name##Constant()); \
260 } \
261 TNode<BoolT> CodeStubAssembler::IsNot##name(TNode<Object> value) { \
262 return TaggedNotEqual(value, name##Constant()); \
263 }
264HEAP_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_TEST)HEAP_CONSTANT_TEST(ArrayIteratorProtector, array_iterator_protector
, ArrayIteratorProtector) HEAP_CONSTANT_TEST(ArraySpeciesProtector
, array_species_protector, ArraySpeciesProtector) HEAP_CONSTANT_TEST
(AsyncFunctionAwaitRejectSharedFun, async_function_await_reject_shared_fun
, AsyncFunctionAwaitRejectSharedFun) HEAP_CONSTANT_TEST(AsyncFunctionAwaitResolveSharedFun
, async_function_await_resolve_shared_fun, AsyncFunctionAwaitResolveSharedFun
) HEAP_CONSTANT_TEST(AsyncGeneratorAwaitRejectSharedFun, async_generator_await_reject_shared_fun
, AsyncGeneratorAwaitRejectSharedFun) HEAP_CONSTANT_TEST(AsyncGeneratorAwaitResolveSharedFun
, async_generator_await_resolve_shared_fun, AsyncGeneratorAwaitResolveSharedFun
) HEAP_CONSTANT_TEST(AsyncGeneratorReturnClosedRejectSharedFun
, async_generator_return_closed_reject_shared_fun, AsyncGeneratorReturnClosedRejectSharedFun
) HEAP_CONSTANT_TEST(AsyncGeneratorReturnClosedResolveSharedFun
, async_generator_return_closed_resolve_shared_fun, AsyncGeneratorReturnClosedResolveSharedFun
) HEAP_CONSTANT_TEST(AsyncGeneratorReturnResolveSharedFun, async_generator_return_resolve_shared_fun
, AsyncGeneratorReturnResolveSharedFun) HEAP_CONSTANT_TEST(AsyncGeneratorYieldResolveSharedFun
, async_generator_yield_resolve_shared_fun, AsyncGeneratorYieldResolveSharedFun
) HEAP_CONSTANT_TEST(AsyncIteratorValueUnwrapSharedFun, async_iterator_value_unwrap_shared_fun
, AsyncIteratorValueUnwrapSharedFun) HEAP_CONSTANT_TEST(IsConcatSpreadableProtector
, is_concat_spreadable_protector, IsConcatSpreadableProtector
) HEAP_CONSTANT_TEST(MapIteratorProtector, map_iterator_protector
, MapIteratorProtector) HEAP_CONSTANT_TEST(NoElementsProtector
, no_elements_protector, NoElementsProtector) HEAP_CONSTANT_TEST
(MegaDOMProtector, mega_dom_protector, MegaDOMProtector) HEAP_CONSTANT_TEST
(NumberStringCache, number_string_cache, NumberStringCache) HEAP_CONSTANT_TEST
(PromiseAllResolveElementSharedFun, promise_all_resolve_element_shared_fun
, PromiseAllResolveElementSharedFun) HEAP_CONSTANT_TEST(PromiseAllSettledRejectElementSharedFun
, promise_all_settled_reject_element_shared_fun, PromiseAllSettledRejectElementSharedFun
) HEAP_CONSTANT_TEST(PromiseAllSettledResolveElementSharedFun
, promise_all_settled_resolve_element_shared_fun, PromiseAllSettledResolveElementSharedFun
) HEAP_CONSTANT_TEST(PromiseAnyRejectElementSharedFun, promise_any_reject_element_shared_fun
, PromiseAnyRejectElementSharedFun) HEAP_CONSTANT_TEST(PromiseCapabilityDefaultRejectSharedFun
, promise_capability_default_reject_shared_fun, PromiseCapabilityDefaultRejectSharedFun
) HEAP_CONSTANT_TEST(PromiseCapabilityDefaultResolveSharedFun
, promise_capability_default_resolve_shared_fun, PromiseCapabilityDefaultResolveSharedFun
) HEAP_CONSTANT_TEST(PromiseCatchFinallySharedFun, promise_catch_finally_shared_fun
, PromiseCatchFinallySharedFun) HEAP_CONSTANT_TEST(PromiseGetCapabilitiesExecutorSharedFun
, promise_get_capabilities_executor_shared_fun, PromiseGetCapabilitiesExecutorSharedFun
) HEAP_CONSTANT_TEST(PromiseResolveProtector, promise_resolve_protector
, PromiseResolveProtector) HEAP_CONSTANT_TEST(PromiseSpeciesProtector
, promise_species_protector, PromiseSpeciesProtector) HEAP_CONSTANT_TEST
(PromiseThenFinallySharedFun, promise_then_finally_shared_fun
, PromiseThenFinallySharedFun) HEAP_CONSTANT_TEST(PromiseThenProtector
, promise_then_protector, PromiseThenProtector) HEAP_CONSTANT_TEST
(PromiseThrowerFinallySharedFun, promise_thrower_finally_shared_fun
, PromiseThrowerFinallySharedFun) HEAP_CONSTANT_TEST(PromiseValueThunkFinallySharedFun
, promise_value_thunk_finally_shared_fun, PromiseValueThunkFinallySharedFun
) HEAP_CONSTANT_TEST(ProxyRevokeSharedFun, proxy_revoke_shared_fun
, ProxyRevokeSharedFun) HEAP_CONSTANT_TEST(RegExpSpeciesProtector
, regexp_species_protector, RegExpSpeciesProtector) HEAP_CONSTANT_TEST
(SetIteratorProtector, set_iterator_protector, SetIteratorProtector
) HEAP_CONSTANT_TEST(SingleCharacterStringCache, single_character_string_cache
, SingleCharacterStringCache) HEAP_CONSTANT_TEST(StringIteratorProtector
, string_iterator_protector, StringIteratorProtector) HEAP_CONSTANT_TEST
(TypedArraySpeciesProtector, typed_array_species_protector, TypedArraySpeciesProtector
) HEAP_CONSTANT_TEST(AllocationSiteWithoutWeakNextMap, allocation_site_without_weaknext_map
, AllocationSiteWithoutWeakNextMap) HEAP_CONSTANT_TEST(AllocationSiteWithWeakNextMap
, allocation_site_map, AllocationSiteMap) HEAP_CONSTANT_TEST(
arguments_to_string, arguments_to_string, ArgumentsToString) HEAP_CONSTANT_TEST
(Array_string, Array_string, ArrayString) HEAP_CONSTANT_TEST(
array_to_string, array_to_string, ArrayToString) HEAP_CONSTANT_TEST
(BooleanMap, boolean_map, BooleanMap) HEAP_CONSTANT_TEST(boolean_to_string
, boolean_to_string, BooleanToString) HEAP_CONSTANT_TEST(ConsOneByteStringMap
, cons_one_byte_string_map, ConsOneByteStringMap) HEAP_CONSTANT_TEST
(ConsStringMap, cons_string_map, ConsStringMap) HEAP_CONSTANT_TEST
(constructor_string, constructor_string, ConstructorString) HEAP_CONSTANT_TEST
(date_to_string, date_to_string, DateToString) HEAP_CONSTANT_TEST
(default_string, default_string, DefaultString) HEAP_CONSTANT_TEST
(EmptyByteArray, empty_byte_array, EmptyByteArray) HEAP_CONSTANT_TEST
(EmptyFixedArray, empty_fixed_array, EmptyFixedArray) HEAP_CONSTANT_TEST
(EmptyScopeInfo, empty_scope_info, EmptyScopeInfo) HEAP_CONSTANT_TEST
(EmptyPropertyDictionary, empty_property_dictionary, EmptyPropertyDictionary
) HEAP_CONSTANT_TEST(EmptyOrderedPropertyDictionary, empty_ordered_property_dictionary
, EmptyOrderedPropertyDictionary) HEAP_CONSTANT_TEST(EmptySwissPropertyDictionary
, empty_swiss_property_dictionary, EmptySwissPropertyDictionary
) HEAP_CONSTANT_TEST(EmptySlowElementDictionary, empty_slow_element_dictionary
, EmptySlowElementDictionary) HEAP_CONSTANT_TEST(empty_string
, empty_string, EmptyString) HEAP_CONSTANT_TEST(error_to_string
, error_to_string, ErrorToString) HEAP_CONSTANT_TEST(errors_string
, errors_string, ErrorsString) HEAP_CONSTANT_TEST(FalseValue,
false_value, False) HEAP_CONSTANT_TEST(FixedArrayMap, fixed_array_map
, FixedArrayMap) HEAP_CONSTANT_TEST(FixedCOWArrayMap, fixed_cow_array_map
, FixedCOWArrayMap) HEAP_CONSTANT_TEST(Function_string, function_string
, FunctionString) HEAP_CONSTANT_TEST(function_to_string, function_to_string
, FunctionToString) HEAP_CONSTANT_TEST(GlobalPropertyCellMap,
global_property_cell_map, PropertyCellMap) HEAP_CONSTANT_TEST
(has_instance_symbol, has_instance_symbol, HasInstanceSymbol)
HEAP_CONSTANT_TEST(Infinity_string, Infinity_string, InfinityString
) HEAP_CONSTANT_TEST(is_concat_spreadable_symbol, is_concat_spreadable_symbol
, IsConcatSpreadableSymbol) HEAP_CONSTANT_TEST(iterator_symbol
, iterator_symbol, IteratorSymbol) HEAP_CONSTANT_TEST(length_string
, length_string, LengthString) HEAP_CONSTANT_TEST(ManyClosuresCellMap
, many_closures_cell_map, ManyClosuresCellMap) HEAP_CONSTANT_TEST
(match_symbol, match_symbol, MatchSymbol) HEAP_CONSTANT_TEST(
megamorphic_symbol, megamorphic_symbol, MegamorphicSymbol) HEAP_CONSTANT_TEST
(mega_dom_symbol, mega_dom_symbol, MegaDOMSymbol) HEAP_CONSTANT_TEST
(message_string, message_string, MessageString) HEAP_CONSTANT_TEST
(minus_Infinity_string, minus_Infinity_string, MinusInfinityString
) HEAP_CONSTANT_TEST(MinusZeroValue, minus_zero_value, MinusZero
) HEAP_CONSTANT_TEST(name_string, name_string, NameString) HEAP_CONSTANT_TEST
(NanValue, nan_value, Nan) HEAP_CONSTANT_TEST(NaN_string, NaN_string
, NaNString) HEAP_CONSTANT_TEST(next_string, next_string, NextString
) HEAP_CONSTANT_TEST(NoClosuresCellMap, no_closures_cell_map,
NoClosuresCellMap) HEAP_CONSTANT_TEST(null_to_string, null_to_string
, NullToString) HEAP_CONSTANT_TEST(NullValue, null_value, Null
) HEAP_CONSTANT_TEST(number_string, number_string, NumberString
) HEAP_CONSTANT_TEST(number_to_string, number_to_string, NumberToString
) HEAP_CONSTANT_TEST(Object_string, Object_string, ObjectString
) HEAP_CONSTANT_TEST(object_to_string, object_to_string, ObjectToString
) HEAP_CONSTANT_TEST(OneByteStringMap, one_byte_string_map, OneByteStringMap
) HEAP_CONSTANT_TEST(OneClosureCellMap, one_closure_cell_map,
OneClosureCellMap) HEAP_CONSTANT_TEST(OnePointerFillerMap, one_pointer_filler_map
, OnePointerFillerMap) HEAP_CONSTANT_TEST(PromiseCapabilityMap
, promise_capability_map, PromiseCapabilityMap) HEAP_CONSTANT_TEST
(promise_forwarding_handler_symbol, promise_forwarding_handler_symbol
, PromiseForwardingHandlerSymbol) HEAP_CONSTANT_TEST(PromiseFulfillReactionJobTaskMap
, promise_fulfill_reaction_job_task_map, PromiseFulfillReactionJobTaskMap
) HEAP_CONSTANT_TEST(promise_handled_by_symbol, promise_handled_by_symbol
, PromiseHandledBySymbol) HEAP_CONSTANT_TEST(PromiseReactionMap
, promise_reaction_map, PromiseReactionMap) HEAP_CONSTANT_TEST
(PromiseRejectReactionJobTaskMap, promise_reject_reaction_job_task_map
, PromiseRejectReactionJobTaskMap) HEAP_CONSTANT_TEST(PromiseResolveThenableJobTaskMap
, promise_resolve_thenable_job_task_map, PromiseResolveThenableJobTaskMap
) HEAP_CONSTANT_TEST(prototype_string, prototype_string, PrototypeString
) HEAP_CONSTANT_TEST(replace_symbol, replace_symbol, ReplaceSymbol
) HEAP_CONSTANT_TEST(regexp_to_string, regexp_to_string, RegexpToString
) HEAP_CONSTANT_TEST(resolve_string, resolve_string, ResolveString
) HEAP_CONSTANT_TEST(return_string, return_string, ReturnString
) HEAP_CONSTANT_TEST(search_symbol, search_symbol, SearchSymbol
) HEAP_CONSTANT_TEST(species_symbol, species_symbol, SpeciesSymbol
) HEAP_CONSTANT_TEST(StaleRegister, stale_register, StaleRegister
) HEAP_CONSTANT_TEST(StoreHandler0Map, store_handler0_map, StoreHandler0Map
) HEAP_CONSTANT_TEST(string_string, string_string, StringString
) HEAP_CONSTANT_TEST(string_to_string, string_to_string, StringToString
) HEAP_CONSTANT_TEST(StringMap, string_map, StringMap) HEAP_CONSTANT_TEST
(TheHoleValue, the_hole_value, TheHole) HEAP_CONSTANT_TEST(then_string
, then_string, ThenString) HEAP_CONSTANT_TEST(toString_string
, toString_string, ToStringString) HEAP_CONSTANT_TEST(to_primitive_symbol
, to_primitive_symbol, ToPrimitiveSymbol) HEAP_CONSTANT_TEST(
to_string_tag_symbol, to_string_tag_symbol, ToStringTagSymbol
) HEAP_CONSTANT_TEST(TrueValue, true_value, True) HEAP_CONSTANT_TEST
(undefined_to_string, undefined_to_string, UndefinedToString)
HEAP_CONSTANT_TEST(UndefinedValue, undefined_value, Undefined
) HEAP_CONSTANT_TEST(uninitialized_symbol, uninitialized_symbol
, UninitializedSymbol) HEAP_CONSTANT_TEST(valueOf_string, valueOf_string
, ValueOfString) HEAP_CONSTANT_TEST(wasm_wrapped_object_symbol
, wasm_wrapped_object_symbol, WasmWrappedObjectSymbol) HEAP_CONSTANT_TEST
(zero_string, zero_string, ZeroString) HEAP_CONSTANT_TEST(AccessorInfoMap
, accessor_info_map, AccessorInfoMap) HEAP_CONSTANT_TEST(AccessorPairMap
, accessor_pair_map, AccessorPairMap) HEAP_CONSTANT_TEST(AllocationMementoMap
, allocation_memento_map, AllocationMementoMap) HEAP_CONSTANT_TEST
(ArrayBoilerplateDescriptionMap, array_boilerplate_description_map
, ArrayBoilerplateDescriptionMap) HEAP_CONSTANT_TEST(BreakPointMap
, break_point_map, BreakPointMap) HEAP_CONSTANT_TEST(BreakPointInfoMap
, break_point_info_map, BreakPointInfoMap) HEAP_CONSTANT_TEST
(BytecodeArrayMap, bytecode_array_map, BytecodeArrayMap) HEAP_CONSTANT_TEST
(CachedTemplateObjectMap, cached_template_object_map, CachedTemplateObjectMap
) HEAP_CONSTANT_TEST(CellMap, cell_map, CellMap) HEAP_CONSTANT_TEST
(WeakCellMap, weak_cell_map, WeakCellMap) HEAP_CONSTANT_TEST(
CodeMap, code_map, CodeMap) HEAP_CONSTANT_TEST(CodeDataContainerMap
, code_data_container_map, CodeDataContainerMap) HEAP_CONSTANT_TEST
(CoverageInfoMap, coverage_info_map, CoverageInfoMap) HEAP_CONSTANT_TEST
(DebugInfoMap, debug_info_map, DebugInfoMap) HEAP_CONSTANT_TEST
(FreeSpaceMap, free_space_map, FreeSpaceMap) HEAP_CONSTANT_TEST
(FeedbackVectorMap, feedback_vector_map, FeedbackVectorMap) HEAP_CONSTANT_TEST
(FixedDoubleArrayMap, fixed_double_array_map, FixedDoubleArrayMap
) HEAP_CONSTANT_TEST(FunctionTemplateInfoMap, function_template_info_map
, FunctionTemplateInfoMap) HEAP_CONSTANT_TEST(MegaDomHandlerMap
, mega_dom_handler_map, MegaDomHandlerMap) HEAP_CONSTANT_TEST
(MetaMap, meta_map, MapMap) HEAP_CONSTANT_TEST(PreparseDataMap
, preparse_data_map, PreparseDataMap) HEAP_CONSTANT_TEST(PropertyArrayMap
, property_array_map, PropertyArrayMap) HEAP_CONSTANT_TEST(PrototypeInfoMap
, prototype_info_map, PrototypeInfoMap) HEAP_CONSTANT_TEST(SharedFunctionInfoMap
, shared_function_info_map, SharedFunctionInfoMap) HEAP_CONSTANT_TEST
(SmallOrderedHashSetMap, small_ordered_hash_set_map, SmallOrderedHashSetMap
) HEAP_CONSTANT_TEST(SmallOrderedHashMapMap, small_ordered_hash_map_map
, SmallOrderedHashMapMap) HEAP_CONSTANT_TEST(SmallOrderedNameDictionaryMap
, small_ordered_name_dictionary_map, SmallOrderedNameDictionaryMap
) HEAP_CONSTANT_TEST(SwissNameDictionaryMap, swiss_name_dictionary_map
, SwissNameDictionaryMap) HEAP_CONSTANT_TEST(SymbolMap, symbol_map
, SymbolMap) HEAP_CONSTANT_TEST(TransitionArrayMap, transition_array_map
, TransitionArrayMap) HEAP_CONSTANT_TEST(Tuple2Map, tuple2_map
, Tuple2Map) HEAP_CONSTANT_TEST(HeapNumberMap, heap_number_map
, HeapNumberMap) HEAP_CONSTANT_TEST(WeakFixedArrayMap, weak_fixed_array_map
, WeakFixedArrayMap) HEAP_CONSTANT_TEST(SloppyArgumentsElementsMap
, sloppy_arguments_elements_map, SloppyArgumentsElementsMap) HEAP_CONSTANT_TEST
(DescriptorArrayMap, descriptor_array_map, DescriptorArrayMap
) HEAP_CONSTANT_TEST(StrongDescriptorArrayMap, strong_descriptor_array_map
, StrongDescriptorArrayMap) HEAP_CONSTANT_TEST(UncompiledDataWithoutPreparseDataMap
, uncompiled_data_without_preparse_data_map, UncompiledDataWithoutPreparseDataMap
) HEAP_CONSTANT_TEST(UncompiledDataWithPreparseDataMap, uncompiled_data_with_preparse_data_map
, UncompiledDataWithPreparseDataMap) HEAP_CONSTANT_TEST(UncompiledDataWithoutPreparseDataWithJobMap
, uncompiled_data_without_preparse_data_with_job_map, UncompiledDataWithoutPreparseDataWithJobMap
) HEAP_CONSTANT_TEST(UncompiledDataWithPreparseDataAndJobMap,
uncompiled_data_with_preparse_data_and_job_map, UncompiledDataWithPreparseDataAndJobMap
) HEAP_CONSTANT_TEST(OnHeapBasicBlockProfilerDataMap, on_heap_basic_block_profiler_data_map
, OnHeapBasicBlockProfilerDataMap) HEAP_CONSTANT_TEST(TurbofanBitsetTypeMap
, turbofan_bitset_type_map, TurbofanBitsetTypeMap) HEAP_CONSTANT_TEST
(TurbofanUnionTypeMap, turbofan_union_type_map, TurbofanUnionTypeMap
) HEAP_CONSTANT_TEST(TurbofanRangeTypeMap, turbofan_range_type_map
, TurbofanRangeTypeMap) HEAP_CONSTANT_TEST(TurbofanHeapConstantTypeMap
, turbofan_heap_constant_type_map, TurbofanHeapConstantTypeMap
) HEAP_CONSTANT_TEST(TurbofanOtherNumberConstantTypeMap, turbofan_other_number_constant_type_map
, TurbofanOtherNumberConstantTypeMap) HEAP_CONSTANT_TEST(InternalClassMap
, internal_class_map, InternalClassMap) HEAP_CONSTANT_TEST(SmiPairMap
, smi_pair_map, SmiPairMap) HEAP_CONSTANT_TEST(SmiBoxMap, smi_box_map
, SmiBoxMap) HEAP_CONSTANT_TEST(ExportedSubClassBaseMap, exported_sub_class_base_map
, ExportedSubClassBaseMap) HEAP_CONSTANT_TEST(ExportedSubClassMap
, exported_sub_class_map, ExportedSubClassMap) HEAP_CONSTANT_TEST
(AbstractInternalClassSubclass1Map, abstract_internal_class_subclass1_map
, AbstractInternalClassSubclass1Map) HEAP_CONSTANT_TEST(AbstractInternalClassSubclass2Map
, abstract_internal_class_subclass2_map, AbstractInternalClassSubclass2Map
) HEAP_CONSTANT_TEST(InternalClassWithSmiElementsMap, internal_class_with_smi_elements_map
, InternalClassWithSmiElementsMap) HEAP_CONSTANT_TEST(InternalClassWithStructElementsMap
, internal_class_with_struct_elements_map, InternalClassWithStructElementsMap
) HEAP_CONSTANT_TEST(ExportedSubClass2Map, exported_sub_class2_map
, ExportedSubClass2Map) HEAP_CONSTANT_TEST(SortStateMap, sort_state_map
, SortStateMap)
265#undef HEAP_CONSTANT_TEST
266
267TNode<BInt> CodeStubAssembler::BIntConstant(int value) {
268#if defined(BINT_IS_SMI)
269 return SmiConstant(value);
270#elif defined(BINT_IS_INTPTR)
271 return IntPtrConstant(value);
272#else
273#error Unknown architecture.
274#endif
275}
276
277template <>
278TNode<Smi> CodeStubAssembler::IntPtrOrSmiConstant<Smi>(int value) {
279 return SmiConstant(value);
280}
281
282template <>
283TNode<IntPtrT> CodeStubAssembler::IntPtrOrSmiConstant<IntPtrT>(int value) {
284 return IntPtrConstant(value);
285}
286
287template <>
288TNode<UintPtrT> CodeStubAssembler::IntPtrOrSmiConstant<UintPtrT>(int value) {
289 return Unsigned(IntPtrConstant(value));
290}
291
292template <>
293TNode<RawPtrT> CodeStubAssembler::IntPtrOrSmiConstant<RawPtrT>(int value) {
294 return ReinterpretCast<RawPtrT>(IntPtrConstant(value));
295}
296
297bool CodeStubAssembler::TryGetIntPtrOrSmiConstantValue(
298 TNode<Smi> maybe_constant, int* value) {
299 Smi smi_constant;
300 if (TryToSmiConstant(maybe_constant, &smi_constant)) {
301 *value = Smi::ToInt(smi_constant);
302 return true;
303 }
304 return false;
305}
306
307bool CodeStubAssembler::TryGetIntPtrOrSmiConstantValue(
308 TNode<IntPtrT> maybe_constant, int* value) {
309 int32_t int32_constant;
310 if (TryToInt32Constant(maybe_constant, &int32_constant)) {
311 *value = int32_constant;
312 return true;
313 }
314 return false;
315}
316
317TNode<IntPtrT> CodeStubAssembler::IntPtrRoundUpToPowerOfTwo32(
318 TNode<IntPtrT> value) {
319 Comment("IntPtrRoundUpToPowerOfTwo32");
320 CSA_DCHECK(this, UintPtrLessThanOrEqual(value, IntPtrConstant(0x80000000u)))((void)0);
321 value = Signed(IntPtrSub(value, IntPtrConstant(1)));
322 for (int i = 1; i <= 16; i *= 2) {
323 value = Signed(WordOr(value, WordShr(value, IntPtrConstant(i))));
324 }
325 return Signed(IntPtrAdd(value, IntPtrConstant(1)));
326}
327
328TNode<BoolT> CodeStubAssembler::WordIsPowerOfTwo(TNode<IntPtrT> value) {
329 intptr_t constant;
330 if (TryToIntPtrConstant(value, &constant)) {
331 return BoolConstant(base::bits::IsPowerOfTwo(constant));
332 }
333 // value && !(value & (value - 1))
334 return IntPtrEqual(
335 Select<IntPtrT>(
336 IntPtrEqual(value, IntPtrConstant(0)),
337 [=] { return IntPtrConstant(1); },
338 [=] { return WordAnd(value, IntPtrSub(value, IntPtrConstant(1))); }),
339 IntPtrConstant(0));
340}
341
342TNode<Float64T> CodeStubAssembler::Float64Round(TNode<Float64T> x) {
343 TNode<Float64T> one = Float64Constant(1.0);
344 TNode<Float64T> one_half = Float64Constant(0.5);
345
346 Label return_x(this);
347
348 // Round up {x} towards Infinity.
349 TVARIABLE(Float64T, var_x, Float64Ceil(x))TVariable<Float64T> var_x(Float64Ceil(x), this);
350
351 GotoIf(Float64LessThanOrEqual(Float64Sub(var_x.value(), one_half), x),
352 &return_x);
353 var_x = Float64Sub(var_x.value(), one);
354 Goto(&return_x);
355
356 BIND(&return_x)Bind(&return_x);
357 return var_x.value();
358}
359
360TNode<Float64T> CodeStubAssembler::Float64Ceil(TNode<Float64T> x) {
361 if (IsFloat64RoundUpSupported()) {
362 return Float64RoundUp(x);
363 }
364
365 TNode<Float64T> one = Float64Constant(1.0);
366 TNode<Float64T> zero = Float64Constant(0.0);
367 TNode<Float64T> two_52 = Float64Constant(4503599627370496.0E0);
368 TNode<Float64T> minus_two_52 = Float64Constant(-4503599627370496.0E0);
369
370 TVARIABLE(Float64T, var_x, x)TVariable<Float64T> var_x(x, this);
371 Label return_x(this), return_minus_x(this);
372
373 // Check if {x} is greater than zero.
374 Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
375 Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
376 &if_xnotgreaterthanzero);
377
378 BIND(&if_xgreaterthanzero)Bind(&if_xgreaterthanzero);
379 {
380 // Just return {x} unless it's in the range ]0,2^52[.
381 GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
382
383 // Round positive {x} towards Infinity.
384 var_x = Float64Sub(Float64Add(two_52, x), two_52);
385 GotoIfNot(Float64LessThan(var_x.value(), x), &return_x);
386 var_x = Float64Add(var_x.value(), one);
387 Goto(&return_x);
388 }
389
390 BIND(&if_xnotgreaterthanzero)Bind(&if_xnotgreaterthanzero);
391 {
392 // Just return {x} unless it's in the range ]-2^52,0[
393 GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
394 GotoIfNot(Float64LessThan(x, zero), &return_x);
395
396 // Round negated {x} towards Infinity and return the result negated.
397 TNode<Float64T> minus_x = Float64Neg(x);
398 var_x = Float64Sub(Float64Add(two_52, minus_x), two_52);
399 GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
400 var_x = Float64Sub(var_x.value(), one);
401 Goto(&return_minus_x);
402 }
403
404 BIND(&return_minus_x)Bind(&return_minus_x);
405 var_x = Float64Neg(var_x.value());
406 Goto(&return_x);
407
408 BIND(&return_x)Bind(&return_x);
409 return var_x.value();
410}
411
412TNode<Float64T> CodeStubAssembler::Float64Floor(TNode<Float64T> x) {
413 if (IsFloat64RoundDownSupported()) {
414 return Float64RoundDown(x);
415 }
416
417 TNode<Float64T> one = Float64Constant(1.0);
418 TNode<Float64T> zero = Float64Constant(0.0);
419 TNode<Float64T> two_52 = Float64Constant(4503599627370496.0E0);
420 TNode<Float64T> minus_two_52 = Float64Constant(-4503599627370496.0E0);
421
422 TVARIABLE(Float64T, var_x, x)TVariable<Float64T> var_x(x, this);
423 Label return_x(this), return_minus_x(this);
424
425 // Check if {x} is greater than zero.
426 Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
427 Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
428 &if_xnotgreaterthanzero);
429
430 BIND(&if_xgreaterthanzero)Bind(&if_xgreaterthanzero);
431 {
432 // Just return {x} unless it's in the range ]0,2^52[.
433 GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
434
435 // Round positive {x} towards -Infinity.
436 var_x = Float64Sub(Float64Add(two_52, x), two_52);
437 GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
438 var_x = Float64Sub(var_x.value(), one);
439 Goto(&return_x);
440 }
441
442 BIND(&if_xnotgreaterthanzero)Bind(&if_xnotgreaterthanzero);
443 {
444 // Just return {x} unless it's in the range ]-2^52,0[
445 GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
446 GotoIfNot(Float64LessThan(x, zero), &return_x);
447
448 // Round negated {x} towards -Infinity and return the result negated.
449 TNode<Float64T> minus_x = Float64Neg(x);
450 var_x = Float64Sub(Float64Add(two_52, minus_x), two_52);
451 GotoIfNot(Float64LessThan(var_x.value(), minus_x), &return_minus_x);
452 var_x = Float64Add(var_x.value(), one);
453 Goto(&return_minus_x);
454 }
455
456 BIND(&return_minus_x)Bind(&return_minus_x);
457 var_x = Float64Neg(var_x.value());
458 Goto(&return_x);
459
460 BIND(&return_x)Bind(&return_x);
461 return var_x.value();
462}
463
464TNode<Float64T> CodeStubAssembler::Float64RoundToEven(TNode<Float64T> x) {
465 if (IsFloat64RoundTiesEvenSupported()) {
466 return Float64RoundTiesEven(x);
467 }
468 // See ES#sec-touint8clamp for details.
469 TNode<Float64T> f = Float64Floor(x);
470 TNode<Float64T> f_and_half = Float64Add(f, Float64Constant(0.5));
471
472 TVARIABLE(Float64T, var_result)TVariable<Float64T> var_result(this);
473 Label return_f(this), return_f_plus_one(this), done(this);
474
475 GotoIf(Float64LessThan(f_and_half, x), &return_f_plus_one);
476 GotoIf(Float64LessThan(x, f_and_half), &return_f);
477 {
478 TNode<Float64T> f_mod_2 = Float64Mod(f, Float64Constant(2.0));
479 Branch(Float64Equal(f_mod_2, Float64Constant(0.0)), &return_f,
480 &return_f_plus_one);
481 }
482
483 BIND(&return_f)Bind(&return_f);
484 var_result = f;
485 Goto(&done);
486
487 BIND(&return_f_plus_one)Bind(&return_f_plus_one);
488 var_result = Float64Add(f, Float64Constant(1.0));
489 Goto(&done);
490
491 BIND(&done)Bind(&done);
492 return var_result.value();
493}
494
495TNode<Float64T> CodeStubAssembler::Float64Trunc(TNode<Float64T> x) {
496 if (IsFloat64RoundTruncateSupported()) {
497 return Float64RoundTruncate(x);
498 }
499
500 TNode<Float64T> one = Float64Constant(1.0);
501 TNode<Float64T> zero = Float64Constant(0.0);
502 TNode<Float64T> two_52 = Float64Constant(4503599627370496.0E0);
503 TNode<Float64T> minus_two_52 = Float64Constant(-4503599627370496.0E0);
504
505 TVARIABLE(Float64T, var_x, x)TVariable<Float64T> var_x(x, this);
506 Label return_x(this), return_minus_x(this);
507
508 // Check if {x} is greater than 0.
509 Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
510 Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
511 &if_xnotgreaterthanzero);
512
513 BIND(&if_xgreaterthanzero)Bind(&if_xgreaterthanzero);
514 {
515 if (IsFloat64RoundDownSupported()) {
516 var_x = Float64RoundDown(x);
517 } else {
518 // Just return {x} unless it's in the range ]0,2^52[.
519 GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
520
521 // Round positive {x} towards -Infinity.
522 var_x = Float64Sub(Float64Add(two_52, x), two_52);
523 GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
524 var_x = Float64Sub(var_x.value(), one);
525 }
526 Goto(&return_x);
527 }
528
529 BIND(&if_xnotgreaterthanzero)Bind(&if_xnotgreaterthanzero);
530 {
531 if (IsFloat64RoundUpSupported()) {
532 var_x = Float64RoundUp(x);
533 Goto(&return_x);
534 } else {
535 // Just return {x} unless its in the range ]-2^52,0[.
536 GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
537 GotoIfNot(Float64LessThan(x, zero), &return_x);
538
539 // Round negated {x} towards -Infinity and return result negated.
540 TNode<Float64T> minus_x = Float64Neg(x);
541 var_x = Float64Sub(Float64Add(two_52, minus_x), two_52);
542 GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
543 var_x = Float64Sub(var_x.value(), one);
544 Goto(&return_minus_x);
545 }
546 }
547
548 BIND(&return_minus_x)Bind(&return_minus_x);
549 var_x = Float64Neg(var_x.value());
550 Goto(&return_x);
551
552 BIND(&return_x)Bind(&return_x);
553 return var_x.value();
554}
555
556TNode<IntPtrT> CodeStubAssembler::PopulationCountFallback(
557 TNode<UintPtrT> value) {
558 // Taken from slow path of base::bits::CountPopulation, the comments here show
559 // C++ code and comments from there for reference.
560 // Fall back to divide-and-conquer popcount (see "Hacker's Delight" by Henry
561 // S. Warren, Jr.), chapter 5-1.
562 constexpr uintptr_t mask[] = {static_cast<uintptr_t>(0x5555555555555555),
563 static_cast<uintptr_t>(0x3333333333333333),
564 static_cast<uintptr_t>(0x0f0f0f0f0f0f0f0f)};
565
566 // TNode<UintPtrT> value = Unsigned(value_word);
567 TNode<UintPtrT> lhs, rhs;
568
569 // Start with 64 buckets of 1 bits, holding values from [0,1].
570 // {value = ((value >> 1) & mask[0]) + (value & mask[0])}
571 lhs = WordAnd(WordShr(value, UintPtrConstant(1)), UintPtrConstant(mask[0]));
572 rhs = WordAnd(value, UintPtrConstant(mask[0]));
573 value = UintPtrAdd(lhs, rhs);
574
575 // Having 32 buckets of 2 bits, holding values from [0,2] now.
576 // {value = ((value >> 2) & mask[1]) + (value & mask[1])}
577 lhs = WordAnd(WordShr(value, UintPtrConstant(2)), UintPtrConstant(mask[1]));
578 rhs = WordAnd(value, UintPtrConstant(mask[1]));
579 value = UintPtrAdd(lhs, rhs);
580
581 // Having 16 buckets of 4 bits, holding values from [0,4] now.
582 // {value = ((value >> 4) & mask[2]) + (value & mask[2])}
583 lhs = WordAnd(WordShr(value, UintPtrConstant(4)), UintPtrConstant(mask[2]));
584 rhs = WordAnd(value, UintPtrConstant(mask[2]));
585 value = UintPtrAdd(lhs, rhs);
586
587 // Having 8 buckets of 8 bits, holding values from [0,8] now.
588 // From this point on, the buckets are bigger than the number of bits
589 // required to hold the values, and the buckets are bigger the maximum
590 // result, so there's no need to mask value anymore, since there's no
591 // more risk of overflow between buckets.
592 // {value = (value >> 8) + value}
593 lhs = WordShr(value, UintPtrConstant(8));
594 value = UintPtrAdd(lhs, value);
595
596 // Having 4 buckets of 16 bits, holding values from [0,16] now.
597 // {value = (value >> 16) + value}
598 lhs = WordShr(value, UintPtrConstant(16));
599 value = UintPtrAdd(lhs, value);
600
601 if (Is64()) {
602 // Having 2 buckets of 32 bits, holding values from [0,32] now.
603 // {value = (value >> 32) + value}
604 lhs = WordShr(value, UintPtrConstant(32));
605 value = UintPtrAdd(lhs, value);
606 }
607
608 // Having 1 buckets of sizeof(intptr_t) bits, holding values from [0,64] now.
609 // {return static_cast<unsigned>(value & 0xff)}
610 return Signed(WordAnd(value, UintPtrConstant(0xff)));
611}
612
613TNode<Int64T> CodeStubAssembler::PopulationCount64(TNode<Word64T> value) {
614 if (IsWord64PopcntSupported()) {
615 return Word64Popcnt(value);
616 }
617
618 if (Is32()) {
619 // Unsupported.
620 UNREACHABLE()V8_Fatal("unreachable code");
621 }
622
623 return ReinterpretCast<Int64T>(
624 PopulationCountFallback(ReinterpretCast<UintPtrT>(value)));
625}
626
627TNode<Int32T> CodeStubAssembler::PopulationCount32(TNode<Word32T> value) {
628 if (IsWord32PopcntSupported()) {
629 return Word32Popcnt(value);
630 }
631
632 if (Is32()) {
633 TNode<IntPtrT> res =
634 PopulationCountFallback(ReinterpretCast<UintPtrT>(value));
635 return ReinterpretCast<Int32T>(res);
636 } else {
637 TNode<IntPtrT> res = PopulationCountFallback(
638 ReinterpretCast<UintPtrT>(ChangeUint32ToUint64(value)));
639 return TruncateInt64ToInt32(ReinterpretCast<Int64T>(res));
640 }
641}
642
643TNode<Int64T> CodeStubAssembler::CountTrailingZeros64(TNode<Word64T> value) {
644 if (IsWord64CtzSupported()) {
645 return Word64Ctz(value);
646 }
647
648 if (Is32()) {
649 // Unsupported.
650 UNREACHABLE()V8_Fatal("unreachable code");
651 }
652
653 // Same fallback as in base::bits::CountTrailingZeros.
654 // Fall back to popcount (see "Hacker's Delight" by Henry S. Warren, Jr.),
655 // chapter 5-4. On x64, since is faster than counting in a loop and faster
656 // than doing binary search.
657 TNode<Word64T> lhs = Word64Not(value);
658 TNode<Word64T> rhs = Uint64Sub(Unsigned(value), Uint64Constant(1));
659 return PopulationCount64(Word64And(lhs, rhs));
660}
661
662TNode<Int32T> CodeStubAssembler::CountTrailingZeros32(TNode<Word32T> value) {
663 if (IsWord32CtzSupported()) {
664 return Word32Ctz(value);
665 }
666
667 if (Is32()) {
668 // Same fallback as in Word64CountTrailingZeros.
669 TNode<Word32T> lhs = Word32BitwiseNot(value);
670 TNode<Word32T> rhs = Int32Sub(Signed(value), Int32Constant(1));
671 return PopulationCount32(Word32And(lhs, rhs));
672 } else {
673 TNode<Int64T> res64 = CountTrailingZeros64(ChangeUint32ToUint64(value));
674 return TruncateInt64ToInt32(Signed(res64));
675 }
676}
677
678TNode<Int64T> CodeStubAssembler::CountLeadingZeros64(TNode<Word64T> value) {
679 return Word64Clz(value);
680}
681
682TNode<Int32T> CodeStubAssembler::CountLeadingZeros32(TNode<Word32T> value) {
683 return Word32Clz(value);
684}
685
686template <>
687TNode<Smi> CodeStubAssembler::TaggedToParameter(TNode<Smi> value) {
688 return value;
689}
690
691template <>
692TNode<IntPtrT> CodeStubAssembler::TaggedToParameter(TNode<Smi> value) {
693 return SmiUntag(value);
694}
695
696TNode<IntPtrT> CodeStubAssembler::TaggedIndexToIntPtr(
697 TNode<TaggedIndex> value) {
698 return Signed(WordSarShiftOutZeros(BitcastTaggedToWordForTagAndSmiBits(value),
699 IntPtrConstant(kSmiTagSize)));
700}
701
702TNode<TaggedIndex> CodeStubAssembler::IntPtrToTaggedIndex(
703 TNode<IntPtrT> value) {
704 return ReinterpretCast<TaggedIndex>(
705 BitcastWordToTaggedSigned(WordShl(value, IntPtrConstant(kSmiTagSize))));
706}
707
708TNode<Smi> CodeStubAssembler::TaggedIndexToSmi(TNode<TaggedIndex> value) {
709 if (SmiValuesAre32Bits()) {
710 DCHECK_EQ(kSmiShiftSize, 31)((void) 0);
711 return BitcastWordToTaggedSigned(
712 WordShl(BitcastTaggedToWordForTagAndSmiBits(value),
713 IntPtrConstant(kSmiShiftSize)));
714 }
715 DCHECK(SmiValuesAre31Bits())((void) 0);
716 DCHECK_EQ(kSmiShiftSize, 0)((void) 0);
717 return ReinterpretCast<Smi>(value);
718}
719
720TNode<TaggedIndex> CodeStubAssembler::SmiToTaggedIndex(TNode<Smi> value) {
721 if (kSystemPointerSize == kInt32Size) {
722 return ReinterpretCast<TaggedIndex>(value);
723 }
724 if (SmiValuesAre32Bits()) {
725 DCHECK_EQ(kSmiShiftSize, 31)((void) 0);
726 return ReinterpretCast<TaggedIndex>(BitcastWordToTaggedSigned(
727 WordSar(BitcastTaggedToWordForTagAndSmiBits(value),
728 IntPtrConstant(kSmiShiftSize))));
729 }
730 DCHECK(SmiValuesAre31Bits())((void) 0);
731 DCHECK_EQ(kSmiShiftSize, 0)((void) 0);
732 // Just sign-extend the lower 32 bits.
733 TNode<Int32T> raw =
734 TruncateWordToInt32(BitcastTaggedToWordForTagAndSmiBits(value));
735 return ReinterpretCast<TaggedIndex>(
736 BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(raw)));
737}
738
739TNode<Smi> CodeStubAssembler::NormalizeSmiIndex(TNode<Smi> smi_index) {
740 if (COMPRESS_POINTERS_BOOLfalse) {
741 TNode<Int32T> raw =
742 TruncateWordToInt32(BitcastTaggedToWordForTagAndSmiBits(smi_index));
743 smi_index = BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(raw));
744 }
745 return smi_index;
746}
747
748TNode<Smi> CodeStubAssembler::SmiFromInt32(TNode<Int32T> value) {
749 if (COMPRESS_POINTERS_BOOLfalse) {
750 static_assert(!COMPRESS_POINTERS_BOOLfalse || (kSmiShiftSize + kSmiTagSize == 1),
751 "Use shifting instead of add");
752 return BitcastWordToTaggedSigned(
753 ChangeUint32ToWord(Int32Add(value, value)));
754 }
755 return SmiTag(ChangeInt32ToIntPtr(value));
756}
757
758TNode<Smi> CodeStubAssembler::SmiFromUint32(TNode<Uint32T> value) {
759 CSA_DCHECK(this, IntPtrLessThan(ChangeUint32ToWord(value),((void)0)
760 IntPtrConstant(Smi::kMaxValue)))((void)0);
761 return SmiFromInt32(Signed(value));
762}
763
764TNode<BoolT> CodeStubAssembler::IsValidPositiveSmi(TNode<IntPtrT> value) {
765 intptr_t constant_value;
766 if (TryToIntPtrConstant(value, &constant_value)) {
767 return (static_cast<uintptr_t>(constant_value) <=
768 static_cast<uintptr_t>(Smi::kMaxValue))
769 ? Int32TrueConstant()
770 : Int32FalseConstant();
771 }
772
773 return UintPtrLessThanOrEqual(value, IntPtrConstant(Smi::kMaxValue));
774}
775
776TNode<Smi> CodeStubAssembler::SmiTag(TNode<IntPtrT> value) {
777 int32_t constant_value;
778 if (TryToInt32Constant(value, &constant_value) &&
779 Smi::IsValid(constant_value)) {
780 return SmiConstant(constant_value);
781 }
782 if (COMPRESS_POINTERS_BOOLfalse) {
783 return SmiFromInt32(TruncateIntPtrToInt32(value));
784 }
785 TNode<Smi> smi =
786 BitcastWordToTaggedSigned(WordShl(value, SmiShiftBitsConstant()));
787 return smi;
788}
789
790TNode<IntPtrT> CodeStubAssembler::SmiUntag(TNode<Smi> value) {
791 intptr_t constant_value;
792 if (TryToIntPtrConstant(value, &constant_value)) {
793 return IntPtrConstant(constant_value >> (kSmiShiftSize + kSmiTagSize));
794 }
795 TNode<IntPtrT> raw_bits = BitcastTaggedToWordForTagAndSmiBits(value);
796 if (COMPRESS_POINTERS_BOOLfalse) {
797 // Clear the upper half using sign-extension.
798 raw_bits = ChangeInt32ToIntPtr(TruncateIntPtrToInt32(raw_bits));
799 }
800 return Signed(WordSarShiftOutZeros(raw_bits, SmiShiftBitsConstant()));
801}
802
803TNode<Int32T> CodeStubAssembler::SmiToInt32(TNode<Smi> value) {
804 if (COMPRESS_POINTERS_BOOLfalse) {
805 return Signed(Word32SarShiftOutZeros(
806 TruncateIntPtrToInt32(BitcastTaggedToWordForTagAndSmiBits(value)),
807 SmiShiftBitsConstant32()));
808 }
809 TNode<IntPtrT> result = SmiUntag(value);
810 return TruncateIntPtrToInt32(result);
811}
812
813TNode<Float64T> CodeStubAssembler::SmiToFloat64(TNode<Smi> value) {
814 return ChangeInt32ToFloat64(SmiToInt32(value));
815}
816
817TNode<Smi> CodeStubAssembler::SmiMax(TNode<Smi> a, TNode<Smi> b) {
818 return SelectConstant<Smi>(SmiLessThan(a, b), b, a);
819}
820
821TNode<Smi> CodeStubAssembler::SmiMin(TNode<Smi> a, TNode<Smi> b) {
822 return SelectConstant<Smi>(SmiLessThan(a, b), a, b);
823}
824
825TNode<IntPtrT> CodeStubAssembler::TryIntPtrAdd(TNode<IntPtrT> a,
826 TNode<IntPtrT> b,
827 Label* if_overflow) {
828 TNode<PairT<IntPtrT, BoolT>> pair = IntPtrAddWithOverflow(a, b);
829 TNode<BoolT> overflow = Projection<1>(pair);
830 GotoIf(overflow, if_overflow);
831 return Projection<0>(pair);
832}
833
834TNode<IntPtrT> CodeStubAssembler::TryIntPtrSub(TNode<IntPtrT> a,
835 TNode<IntPtrT> b,
836 Label* if_overflow) {
837 TNode<PairT<IntPtrT, BoolT>> pair = IntPtrSubWithOverflow(a, b);
838 TNode<BoolT> overflow = Projection<1>(pair);
839 GotoIf(overflow, if_overflow);
840 return Projection<0>(pair);
841}
842
843TNode<Int32T> CodeStubAssembler::TryInt32Mul(TNode<Int32T> a, TNode<Int32T> b,
844 Label* if_overflow) {
845 TNode<PairT<Int32T, BoolT>> pair = Int32MulWithOverflow(a, b);
846 TNode<BoolT> overflow = Projection<1>(pair);
847 GotoIf(overflow, if_overflow);
848 return Projection<0>(pair);
849}
850
851TNode<Smi> CodeStubAssembler::TrySmiAdd(TNode<Smi> lhs, TNode<Smi> rhs,
852 Label* if_overflow) {
853 if (SmiValuesAre32Bits()) {
854 return BitcastWordToTaggedSigned(
855 TryIntPtrAdd(BitcastTaggedToWordForTagAndSmiBits(lhs),
856 BitcastTaggedToWordForTagAndSmiBits(rhs), if_overflow));
857 } else {
858 DCHECK(SmiValuesAre31Bits())((void) 0);
859 TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(
860 TruncateIntPtrToInt32(BitcastTaggedToWordForTagAndSmiBits(lhs)),
861 TruncateIntPtrToInt32(BitcastTaggedToWordForTagAndSmiBits(rhs)));
862 TNode<BoolT> overflow = Projection<1>(pair);
863 GotoIf(overflow, if_overflow);
864 TNode<Int32T> result = Projection<0>(pair);
865 return BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(result));
866 }
867}
868
869TNode<Smi> CodeStubAssembler::TrySmiSub(TNode<Smi> lhs, TNode<Smi> rhs,
870 Label* if_overflow) {
871 if (SmiValuesAre32Bits()) {
872 TNode<PairT<IntPtrT, BoolT>> pair =
873 IntPtrSubWithOverflow(BitcastTaggedToWordForTagAndSmiBits(lhs),
874 BitcastTaggedToWordForTagAndSmiBits(rhs));
875 TNode<BoolT> overflow = Projection<1>(pair);
876 GotoIf(overflow, if_overflow);
877 TNode<IntPtrT> result = Projection<0>(pair);
878 return BitcastWordToTaggedSigned(result);
879 } else {
880 DCHECK(SmiValuesAre31Bits())((void) 0);
881 TNode<PairT<Int32T, BoolT>> pair = Int32SubWithOverflow(
882 TruncateIntPtrToInt32(BitcastTaggedToWordForTagAndSmiBits(lhs)),
883 TruncateIntPtrToInt32(BitcastTaggedToWordForTagAndSmiBits(rhs)));
884 TNode<BoolT> overflow = Projection<1>(pair);
885 GotoIf(overflow, if_overflow);
886 TNode<Int32T> result = Projection<0>(pair);
887 return BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(result));
888 }
889}
890
891TNode<Smi> CodeStubAssembler::TrySmiAbs(TNode<Smi> a, Label* if_overflow) {
892 if (SmiValuesAre32Bits()) {
893 TNode<PairT<IntPtrT, BoolT>> pair =
894 IntPtrAbsWithOverflow(BitcastTaggedToWordForTagAndSmiBits(a));
895 TNode<BoolT> overflow = Projection<1>(pair);
896 GotoIf(overflow, if_overflow);
897 TNode<IntPtrT> result = Projection<0>(pair);
898 return BitcastWordToTaggedSigned(result);
899 } else {
900 CHECK(SmiValuesAre31Bits())do { if ((__builtin_expect(!!(!(SmiValuesAre31Bits())), 0))) {
V8_Fatal("Check failed: %s.", "SmiValuesAre31Bits()"); } } while
(false)
;
901 CHECK(IsInt32AbsWithOverflowSupported())do { if ((__builtin_expect(!!(!(IsInt32AbsWithOverflowSupported
())), 0))) { V8_Fatal("Check failed: %s.", "IsInt32AbsWithOverflowSupported()"
); } } while (false)
;
902 TNode<PairT<Int32T, BoolT>> pair = Int32AbsWithOverflow(
903 TruncateIntPtrToInt32(BitcastTaggedToWordForTagAndSmiBits(a)));
904 TNode<BoolT> overflow = Projection<1>(pair);
905 GotoIf(overflow, if_overflow);
906 TNode<Int32T> result = Projection<0>(pair);
907 return BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(result));
908 }
909}
910
911TNode<Number> CodeStubAssembler::NumberMax(TNode<Number> a, TNode<Number> b) {
912 // TODO(danno): This could be optimized by specifically handling smi cases.
913 TVARIABLE(Number, result)TVariable<Number> result(this);
914 Label done(this), greater_than_equal_a(this), greater_than_equal_b(this);
915 GotoIfNumberGreaterThanOrEqual(a, b, &greater_than_equal_a);
916 GotoIfNumberGreaterThanOrEqual(b, a, &greater_than_equal_b);
917 result = NanConstant();
918 Goto(&done);
919 BIND(&greater_than_equal_a)Bind(&greater_than_equal_a);
920 result = a;
921 Goto(&done);
922 BIND(&greater_than_equal_b)Bind(&greater_than_equal_b);
923 result = b;
924 Goto(&done);
925 BIND(&done)Bind(&done);
926 return result.value();
927}
928
929TNode<Number> CodeStubAssembler::NumberMin(TNode<Number> a, TNode<Number> b) {
930 // TODO(danno): This could be optimized by specifically handling smi cases.
931 TVARIABLE(Number, result)TVariable<Number> result(this);
932 Label done(this), greater_than_equal_a(this), greater_than_equal_b(this);
933 GotoIfNumberGreaterThanOrEqual(a, b, &greater_than_equal_a);
934 GotoIfNumberGreaterThanOrEqual(b, a, &greater_than_equal_b);
935 result = NanConstant();
936 Goto(&done);
937 BIND(&greater_than_equal_a)Bind(&greater_than_equal_a);
938 result = b;
939 Goto(&done);
940 BIND(&greater_than_equal_b)Bind(&greater_than_equal_b);
941 result = a;
942 Goto(&done);
943 BIND(&done)Bind(&done);
944 return result.value();
945}
946
947TNode<Number> CodeStubAssembler::SmiMod(TNode<Smi> a, TNode<Smi> b) {
948 TVARIABLE(Number, var_result)TVariable<Number> var_result(this);
949 Label return_result(this, &var_result),
950 return_minuszero(this, Label::kDeferred),
951 return_nan(this, Label::kDeferred);
952
953 // Untag {a} and {b}.
954 TNode<Int32T> int_a = SmiToInt32(a);
955 TNode<Int32T> int_b = SmiToInt32(b);
956
957 // Return NaN if {b} is zero.
958 GotoIf(Word32Equal(int_b, Int32Constant(0)), &return_nan);
959
960 // Check if {a} is non-negative.
961 Label if_aisnotnegative(this), if_aisnegative(this, Label::kDeferred);
962 Branch(Int32LessThanOrEqual(Int32Constant(0), int_a), &if_aisnotnegative,
963 &if_aisnegative);
964
965 BIND(&if_aisnotnegative)Bind(&if_aisnotnegative);
966 {
967 // Fast case, don't need to check any other edge cases.
968 TNode<Int32T> r = Int32Mod(int_a, int_b);
969 var_result = SmiFromInt32(r);
970 Goto(&return_result);
971 }
972
973 BIND(&if_aisnegative)Bind(&if_aisnegative);
974 {
975 if (SmiValuesAre32Bits()) {
976 // Check if {a} is kMinInt and {b} is -1 (only relevant if the
977 // kMinInt is actually representable as a Smi).
978 Label join(this);
979 GotoIfNot(Word32Equal(int_a, Int32Constant(kMinInt)), &join);
980 GotoIf(Word32Equal(int_b, Int32Constant(-1)), &return_minuszero);
981 Goto(&join);
982 BIND(&join)Bind(&join);
983 }
984
985 // Perform the integer modulus operation.
986 TNode<Int32T> r = Int32Mod(int_a, int_b);
987
988 // Check if {r} is zero, and if so return -0, because we have to
989 // take the sign of the left hand side {a}, which is negative.
990 GotoIf(Word32Equal(r, Int32Constant(0)), &return_minuszero);
991
992 // The remainder {r} can be outside the valid Smi range on 32bit
993 // architectures, so we cannot just say SmiFromInt32(r) here.
994 var_result = ChangeInt32ToTagged(r);
995 Goto(&return_result);
996 }
997
998 BIND(&return_minuszero)Bind(&return_minuszero);
999 var_result = MinusZeroConstant();
1000 Goto(&return_result);
1001
1002 BIND(&return_nan)Bind(&return_nan);
1003 var_result = NanConstant();
1004 Goto(&return_result);
1005
1006 BIND(&return_result)Bind(&return_result);
1007 return var_result.value();
1008}
1009
1010TNode<Number> CodeStubAssembler::SmiMul(TNode<Smi> a, TNode<Smi> b) {
1011 TVARIABLE(Number, var_result)TVariable<Number> var_result(this);
1012 TVARIABLE(Float64T, var_lhs_float64)TVariable<Float64T> var_lhs_float64(this);
1013 TVARIABLE(Float64T, var_rhs_float64)TVariable<Float64T> var_rhs_float64(this);
1014 Label return_result(this, &var_result);
1015
1016 // Both {a} and {b} are Smis. Convert them to integers and multiply.
1017 TNode<Int32T> lhs32 = SmiToInt32(a);
1018 TNode<Int32T> rhs32 = SmiToInt32(b);
1019 auto pair = Int32MulWithOverflow(lhs32, rhs32);
1020
1021 TNode<BoolT> overflow = Projection<1>(pair);
1022
1023 // Check if the multiplication overflowed.
1024 Label if_overflow(this, Label::kDeferred), if_notoverflow(this);
1025 Branch(overflow, &if_overflow, &if_notoverflow);
1026 BIND(&if_notoverflow)Bind(&if_notoverflow);
1027 {
1028 // If the answer is zero, we may need to return -0.0, depending on the
1029 // input.
1030 Label answer_zero(this), answer_not_zero(this);
1031 TNode<Int32T> answer = Projection<0>(pair);
1032 TNode<Int32T> zero = Int32Constant(0);
1033 Branch(Word32Equal(answer, zero), &answer_zero, &answer_not_zero);
1034 BIND(&answer_not_zero)Bind(&answer_not_zero);
1035 {
1036 var_result = ChangeInt32ToTagged(answer);
1037 Goto(&return_result);
1038 }
1039 BIND(&answer_zero)Bind(&answer_zero);
1040 {
1041 TNode<Int32T> or_result = Word32Or(lhs32, rhs32);
1042 Label if_should_be_negative_zero(this), if_should_be_zero(this);
1043 Branch(Int32LessThan(or_result, zero), &if_should_be_negative_zero,
1044 &if_should_be_zero);
1045 BIND(&if_should_be_negative_zero)Bind(&if_should_be_negative_zero);
1046 {
1047 var_result = MinusZeroConstant();
1048 Goto(&return_result);
1049 }
1050 BIND(&if_should_be_zero)Bind(&if_should_be_zero);
1051 {
1052 var_result = SmiConstant(0);
1053 Goto(&return_result);
1054 }
1055 }
1056 }
1057 BIND(&if_overflow)Bind(&if_overflow);
1058 {
1059 var_lhs_float64 = SmiToFloat64(a);
1060 var_rhs_float64 = SmiToFloat64(b);
1061 TNode<Float64T> value =
1062 Float64Mul(var_lhs_float64.value(), var_rhs_float64.value());
1063 var_result = AllocateHeapNumberWithValue(value);
1064 Goto(&return_result);
1065 }
1066
1067 BIND(&return_result)Bind(&return_result);
1068 return var_result.value();
1069}
1070
1071TNode<Smi> CodeStubAssembler::TrySmiDiv(TNode<Smi> dividend, TNode<Smi> divisor,
1072 Label* bailout) {
1073 // Both {a} and {b} are Smis. Bailout to floating point division if {divisor}
1074 // is zero.
1075 GotoIf(TaggedEqual(divisor, SmiConstant(0)), bailout);
1076
1077 // Do floating point division if {dividend} is zero and {divisor} is
1078 // negative.
1079 Label dividend_is_zero(this), dividend_is_not_zero(this);
1080 Branch(TaggedEqual(dividend, SmiConstant(0)), &dividend_is_zero,
1081 &dividend_is_not_zero);
1082
1083 BIND(&dividend_is_zero)Bind(&dividend_is_zero);
1084 {
1085 GotoIf(SmiLessThan(divisor, SmiConstant(0)), bailout);
1086 Goto(&dividend_is_not_zero);
1087 }
1088 BIND(&dividend_is_not_zero)Bind(&dividend_is_not_zero);
1089
1090 TNode<Int32T> untagged_divisor = SmiToInt32(divisor);
1091 TNode<Int32T> untagged_dividend = SmiToInt32(dividend);
1092
1093 // Do floating point division if {dividend} is kMinInt (or kMinInt - 1
1094 // if the Smi size is 31) and {divisor} is -1.
1095 Label divisor_is_minus_one(this), divisor_is_not_minus_one(this);
1096 Branch(Word32Equal(untagged_divisor, Int32Constant(-1)),
1097 &divisor_is_minus_one, &divisor_is_not_minus_one);
1098
1099 BIND(&divisor_is_minus_one)Bind(&divisor_is_minus_one);
1100 {
1101 GotoIf(Word32Equal(
1102 untagged_dividend,
1103 Int32Constant(kSmiValueSize == 32 ? kMinInt : (kMinInt >> 1))),
1104 bailout);
1105 Goto(&divisor_is_not_minus_one);
1106 }
1107 BIND(&divisor_is_not_minus_one)Bind(&divisor_is_not_minus_one);
1108
1109 TNode<Int32T> untagged_result = Int32Div(untagged_dividend, untagged_divisor);
1110 TNode<Int32T> truncated = Int32Mul(untagged_result, untagged_divisor);
1111
1112 // Do floating point division if the remainder is not 0.
1113 GotoIf(Word32NotEqual(untagged_dividend, truncated), bailout);
1114
1115 return SmiFromInt32(untagged_result);
1116}
1117
1118TNode<Smi> CodeStubAssembler::SmiLexicographicCompare(TNode<Smi> x,
1119 TNode<Smi> y) {
1120 TNode<ExternalReference> smi_lexicographic_compare =
1121 ExternalConstant(ExternalReference::smi_lexicographic_compare_function());
1122 TNode<ExternalReference> isolate_ptr =
1123 ExternalConstant(ExternalReference::isolate_address(isolate()));
1124 return CAST(CallCFunction(smi_lexicographic_compare, MachineType::AnyTagged(),Cast(CallCFunction(smi_lexicographic_compare, MachineType::AnyTagged
(), std::make_pair(MachineType::Pointer(), isolate_ptr), std::
make_pair(MachineType::AnyTagged(), x), std::make_pair(MachineType
::AnyTagged(), y)))
1125 std::make_pair(MachineType::Pointer(), isolate_ptr),Cast(CallCFunction(smi_lexicographic_compare, MachineType::AnyTagged
(), std::make_pair(MachineType::Pointer(), isolate_ptr), std::
make_pair(MachineType::AnyTagged(), x), std::make_pair(MachineType
::AnyTagged(), y)))
1126 std::make_pair(MachineType::AnyTagged(), x),Cast(CallCFunction(smi_lexicographic_compare, MachineType::AnyTagged
(), std::make_pair(MachineType::Pointer(), isolate_ptr), std::
make_pair(MachineType::AnyTagged(), x), std::make_pair(MachineType
::AnyTagged(), y)))
1127 std::make_pair(MachineType::AnyTagged(), y)))Cast(CallCFunction(smi_lexicographic_compare, MachineType::AnyTagged
(), std::make_pair(MachineType::Pointer(), isolate_ptr), std::
make_pair(MachineType::AnyTagged(), x), std::make_pair(MachineType
::AnyTagged(), y)))
;
1128}
1129
1130TNode<Int32T> CodeStubAssembler::TruncateWordToInt32(TNode<WordT> value) {
1131 if (Is64()) {
1132 return TruncateInt64ToInt32(ReinterpretCast<Int64T>(value));
1133 }
1134 return ReinterpretCast<Int32T>(value);
1135}
1136
1137TNode<Int32T> CodeStubAssembler::TruncateIntPtrToInt32(TNode<IntPtrT> value) {
1138 if (Is64()) {
1139 return TruncateInt64ToInt32(ReinterpretCast<Int64T>(value));
1140 }
1141 return ReinterpretCast<Int32T>(value);
1142}
1143
1144TNode<BoolT> CodeStubAssembler::TaggedIsSmi(TNode<MaybeObject> a) {
1145 STATIC_ASSERT(kSmiTagMask < kMaxUInt32)static_assert(kSmiTagMask < kMaxUInt32, "kSmiTagMask < kMaxUInt32"
)
;
1146 return Word32Equal(
1147 Word32And(TruncateIntPtrToInt32(BitcastTaggedToWordForTagAndSmiBits(a)),
1148 Int32Constant(kSmiTagMask)),
1149 Int32Constant(0));
1150}
1151
1152TNode<BoolT> CodeStubAssembler::TaggedIsNotSmi(TNode<MaybeObject> a) {
1153 return Word32BinaryNot(TaggedIsSmi(a));
1154}
1155
1156TNode<BoolT> CodeStubAssembler::TaggedIsPositiveSmi(TNode<Object> a) {
1157#if defined(V8_HOST_ARCH_32_BIT) || defined(V8_31BIT_SMIS_ON_64BIT_ARCH)
1158 return Word32Equal(
1159 Word32And(
1160 TruncateIntPtrToInt32(BitcastTaggedToWordForTagAndSmiBits(a)),
1161 Uint32Constant(static_cast<uint32_t>(kSmiTagMask | kSmiSignMask))),
1162 Int32Constant(0));
1163#else
1164 return WordEqual(WordAnd(BitcastTaggedToWordForTagAndSmiBits(a),
1165 IntPtrConstant(kSmiTagMask | kSmiSignMask)),
1166 IntPtrConstant(0));
1167#endif
1168}
1169
1170TNode<BoolT> CodeStubAssembler::WordIsAligned(TNode<WordT> word,
1171 size_t alignment) {
1172 DCHECK(base::bits::IsPowerOfTwo(alignment))((void) 0);
1173 DCHECK_LE(alignment, kMaxUInt32)((void) 0);
1174 return Word32Equal(
1175 Int32Constant(0),
1176 Word32And(TruncateWordToInt32(word),
1177 Uint32Constant(static_cast<uint32_t>(alignment) - 1)));
1178}
1179
1180#if DEBUG
1181void CodeStubAssembler::Bind(Label* label, AssemblerDebugInfo debug_info) {
1182 CodeAssembler::Bind(label, debug_info);
1183}
1184#endif // DEBUG
1185
1186void CodeStubAssembler::Bind(Label* label) { CodeAssembler::Bind(label); }
1187
1188TNode<Float64T> CodeStubAssembler::LoadDoubleWithHoleCheck(
1189 TNode<FixedDoubleArray> array, TNode<IntPtrT> index, Label* if_hole) {
1190 return LoadFixedDoubleArrayElement(array, index, if_hole);
1191}
1192
1193void CodeStubAssembler::BranchIfJSReceiver(TNode<Object> object, Label* if_true,
1194 Label* if_false) {
1195 GotoIf(TaggedIsSmi(object), if_false);
1196 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE)static_assert(LAST_JS_RECEIVER_TYPE == LAST_TYPE, "LAST_JS_RECEIVER_TYPE == LAST_TYPE"
)
;
1197 Branch(IsJSReceiver(CAST(object)Cast(object)), if_true, if_false);
1198}
1199
1200void CodeStubAssembler::GotoIfForceSlowPath(Label* if_true) {
1201#ifdef V8_ENABLE_FORCE_SLOW_PATH
1202 const TNode<ExternalReference> force_slow_path_addr =
1203 ExternalConstant(ExternalReference::force_slow_path(isolate()));
1204 const TNode<Uint8T> force_slow = Load<Uint8T>(force_slow_path_addr);
1205
1206 GotoIf(force_slow, if_true);
1207#endif
1208}
1209
1210TNode<HeapObject> CodeStubAssembler::AllocateRaw(TNode<IntPtrT> size_in_bytes,
1211 AllocationFlags flags,
1212 TNode<RawPtrT> top_address,
1213 TNode<RawPtrT> limit_address) {
1214 Label if_out_of_memory(this, Label::kDeferred);
1215
1216 // TODO(jgruber,jkummerow): Extract the slow paths (= probably everything
1217 // but bump pointer allocation) into a builtin to save code space. The
1218 // size_in_bytes check may be moved there as well since a non-smi
1219 // size_in_bytes probably doesn't fit into the bump pointer region
1220 // (double-check that).
1221
1222 intptr_t size_in_bytes_constant;
1223 bool size_in_bytes_is_constant = false;
1224 if (TryToIntPtrConstant(size_in_bytes, &size_in_bytes_constant)) {
2
Assuming the condition is true
3
Taking true branch
1225 size_in_bytes_is_constant = true;
1226 CHECK(Internals::IsValidSmi(size_in_bytes_constant))do { if ((__builtin_expect(!!(!(Internals::IsValidSmi(size_in_bytes_constant
))), 0))) { V8_Fatal("Check failed: %s.", "Internals::IsValidSmi(size_in_bytes_constant)"
); } } while (false)
;
4
Taking false branch
5
Loop condition is false. Exiting loop
1227 CHECK_GT(size_in_bytes_constant, 0)do { bool _cmp = ::v8::base::CmpGTImpl< typename ::v8::base
::pass_value_or_ref<decltype(size_in_bytes_constant)>::
type, typename ::v8::base::pass_value_or_ref<decltype(0)>
::type>((size_in_bytes_constant), (0)); do { if ((__builtin_expect
(!!(!(_cmp)), 0))) { V8_Fatal("Check failed: %s.", "size_in_bytes_constant"
" " ">" " " "0"); } } while (false); } while (false)
;
6
Taking false branch
7
Loop condition is false. Exiting loop
8
Loop condition is false. Exiting loop
1228 } else {
1229 GotoIfNot(IsValidPositiveSmi(size_in_bytes), &if_out_of_memory);
1230 }
1231
1232 TNode<RawPtrT> top = Load<RawPtrT>(top_address);
1233 TNode<RawPtrT> limit = Load<RawPtrT>(limit_address);
1234
1235 // If there's not enough space, call the runtime.
1236 TVARIABLE(Object, result)TVariable<Object> result(this);
1237 Label runtime_call(this, Label::kDeferred), no_runtime_call(this), out(this);
1238
1239 bool needs_double_alignment = flags & AllocationFlag::kDoubleAlignment;
1240 bool allow_large_object_allocation =
1241 flags & AllocationFlag::kAllowLargeObjectAllocation;
1242
1243 if (allow_large_object_allocation) {
9
Assuming 'allow_large_object_allocation' is false
10
Taking false branch
1244 Label next(this);
1245 GotoIf(IsRegularHeapObjectSize(size_in_bytes), &next);
1246
1247 TNode<Smi> runtime_flags = SmiConstant(Smi::FromInt(
1248 AllocateDoubleAlignFlag::encode(needs_double_alignment) |
1249 AllowLargeObjectAllocationFlag::encode(allow_large_object_allocation)));
1250 result =
1251 CallRuntime(Runtime::kAllocateInYoungGeneration, NoContextConstant(),
1252 SmiTag(size_in_bytes), runtime_flags);
1253 Goto(&out);
1254
1255 BIND(&next)Bind(&next);
1256 }
1257
1258 TVARIABLE(IntPtrT, adjusted_size, size_in_bytes)TVariable<IntPtrT> adjusted_size(size_in_bytes, this);
1259
1260 if (needs_double_alignment) {
11
Assuming 'needs_double_alignment' is false
12
Taking false branch
1261 Label next(this);
1262 GotoIfNot(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), &next);
1263
1264 adjusted_size = IntPtrAdd(size_in_bytes, IntPtrConstant(4));
1265 Goto(&next);
1266
1267 BIND(&next)Bind(&next);
1268 }
1269
1270 TNode<IntPtrT> new_top =
1271 IntPtrAdd(UncheckedCast<IntPtrT>(top), adjusted_size.value());
1272
1273 Branch(UintPtrGreaterThanOrEqual(new_top, limit), &runtime_call,
1274 &no_runtime_call);
1275
1276 BIND(&runtime_call)Bind(&runtime_call);
1277 {
1278 TNode<Smi> runtime_flags = SmiConstant(Smi::FromInt(
1279 AllocateDoubleAlignFlag::encode(needs_double_alignment) |
1280 AllowLargeObjectAllocationFlag::encode(allow_large_object_allocation)));
13
Calling 'BitField::encode'
1281 if (flags & AllocationFlag::kPretenured) {
1282 result =
1283 CallRuntime(Runtime::kAllocateInOldGeneration, NoContextConstant(),
1284 SmiTag(size_in_bytes), runtime_flags);
1285 } else {
1286 result =
1287 CallRuntime(Runtime::kAllocateInYoungGeneration, NoContextConstant(),
1288 SmiTag(size_in_bytes), runtime_flags);
1289 }
1290 Goto(&out);
1291 }
1292
1293 // When there is enough space, return `top' and bump it up.
1294 BIND(&no_runtime_call)Bind(&no_runtime_call);
1295 {
1296 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
1297 new_top);
1298
1299 TVARIABLE(IntPtrT, address, UncheckedCast<IntPtrT>(top))TVariable<IntPtrT> address(UncheckedCast<IntPtrT>
(top), this)
;
1300
1301 if (needs_double_alignment) {
1302 Label next(this);
1303 GotoIf(IntPtrEqual(adjusted_size.value(), size_in_bytes), &next);
1304
1305 // Store a filler and increase the address by 4.
1306 StoreNoWriteBarrier(MachineRepresentation::kTagged, top,
1307 OnePointerFillerMapConstant());
1308 address = IntPtrAdd(UncheckedCast<IntPtrT>(top), IntPtrConstant(4));
1309 Goto(&next);
1310
1311 BIND(&next)Bind(&next);
1312 }
1313
1314 result = BitcastWordToTagged(
1315 IntPtrAdd(address.value(), IntPtrConstant(kHeapObjectTag)));
1316 Goto(&out);
1317 }
1318
1319 if (!size_in_bytes_is_constant) {
1320 BIND(&if_out_of_memory)Bind(&if_out_of_memory);
1321 CallRuntime(Runtime::kFatalProcessOutOfMemoryInAllocateRaw,
1322 NoContextConstant());
1323 Unreachable();
1324 }
1325
1326 BIND(&out)Bind(&out);
1327 return UncheckedCast<HeapObject>(result.value());
1328}
1329
1330TNode<HeapObject> CodeStubAssembler::AllocateRawUnaligned(
1331 TNode<IntPtrT> size_in_bytes, AllocationFlags flags,
1332 TNode<RawPtrT> top_address, TNode<RawPtrT> limit_address) {
1333 DCHECK_EQ(flags & AllocationFlag::kDoubleAlignment, 0)((void) 0);
1334 return AllocateRaw(size_in_bytes, flags, top_address, limit_address);
1
Calling 'CodeStubAssembler::AllocateRaw'
1335}
1336
1337TNode<HeapObject> CodeStubAssembler::AllocateRawDoubleAligned(
1338 TNode<IntPtrT> size_in_bytes, AllocationFlags flags,
1339 TNode<RawPtrT> top_address, TNode<RawPtrT> limit_address) {
1340#if defined(V8_HOST_ARCH_32_BIT)
1341 return AllocateRaw(size_in_bytes, flags | AllocationFlag::kDoubleAlignment,
1342 top_address, limit_address);
1343#elif defined(V8_HOST_ARCH_64_BIT1)
1344#ifdef V8_COMPRESS_POINTERS
1345// TODO(ishell, v8:8875): Consider using aligned allocations once the
1346// allocation alignment inconsistency is fixed. For now we keep using
1347// unaligned access since both x64 and arm64 architectures (where pointer
1348// compression is supported) allow unaligned access to doubles and full words.
1349#endif // V8_COMPRESS_POINTERS
1350 // Allocation on 64 bit machine is naturally double aligned
1351 return AllocateRaw(size_in_bytes, flags & ~AllocationFlag::kDoubleAlignment,
1352 top_address, limit_address);
1353#else
1354#error Architecture not supported
1355#endif
1356}
1357
1358TNode<HeapObject> CodeStubAssembler::AllocateInNewSpace(
1359 TNode<IntPtrT> size_in_bytes, AllocationFlags flags) {
1360 DCHECK(flags == AllocationFlag::kNone ||((void) 0)
1361 flags == AllocationFlag::kDoubleAlignment)((void) 0);
1362 CSA_DCHECK(this, IsRegularHeapObjectSize(size_in_bytes))((void)0);
1363 return Allocate(size_in_bytes, flags);
1364}
1365
1366TNode<HeapObject> CodeStubAssembler::Allocate(TNode<IntPtrT> size_in_bytes,
1367 AllocationFlags flags) {
1368 Comment("Allocate");
1369 if (FLAG_single_generation) flags |= AllocationFlag::kPretenured;
1370 bool const new_space = !(flags & AllocationFlag::kPretenured);
1371 bool const allow_large_objects =
1372 flags & AllocationFlag::kAllowLargeObjectAllocation;
1373 if (!allow_large_objects) {
1374 intptr_t size_constant;
1375 if (TryToIntPtrConstant(size_in_bytes, &size_constant)) {
1376 CHECK_LE(size_constant, kMaxRegularHeapObjectSize)do { bool _cmp = ::v8::base::CmpLEImpl< typename ::v8::base
::pass_value_or_ref<decltype(size_constant)>::type, typename
::v8::base::pass_value_or_ref<decltype(kMaxRegularHeapObjectSize
)>::type>((size_constant), (kMaxRegularHeapObjectSize))
; do { if ((__builtin_expect(!!(!(_cmp)), 0))) { V8_Fatal("Check failed: %s."
, "size_constant" " " "<=" " " "kMaxRegularHeapObjectSize"
); } } while (false); } while (false)
;
1377 } else {
1378 CSA_DCHECK(this, IsRegularHeapObjectSize(size_in_bytes))((void)0);
1379 }
1380 }
1381 if (!(flags & AllocationFlag::kDoubleAlignment)) {
1382 return OptimizedAllocate(
1383 size_in_bytes,
1384 new_space ? AllocationType::kYoung : AllocationType::kOld,
1385 allow_large_objects ? AllowLargeObjects::kTrue
1386 : AllowLargeObjects::kFalse);
1387 }
1388 TNode<ExternalReference> top_address = ExternalConstant(
1389 new_space
1390 ? ExternalReference::new_space_allocation_top_address(isolate())
1391 : ExternalReference::old_space_allocation_top_address(isolate()));
1392
1393#ifdef DEBUG
1394 // New space is optional and if disabled both top and limit return
1395 // kNullAddress.
1396 if (ExternalReference::new_space_allocation_top_address(isolate())
1397 .address() != kNullAddress) {
1398 Address raw_top_address =
1399 ExternalReference::new_space_allocation_top_address(isolate())
1400 .address();
1401 Address raw_limit_address =
1402 ExternalReference::new_space_allocation_limit_address(isolate())
1403 .address();
1404
1405 CHECK_EQ(kSystemPointerSize, raw_limit_address - raw_top_address)do { bool _cmp = ::v8::base::CmpEQImpl< typename ::v8::base
::pass_value_or_ref<decltype(kSystemPointerSize)>::type
, typename ::v8::base::pass_value_or_ref<decltype(raw_limit_address
- raw_top_address)>::type>((kSystemPointerSize), (raw_limit_address
- raw_top_address)); do { if ((__builtin_expect(!!(!(_cmp)),
0))) { V8_Fatal("Check failed: %s.", "kSystemPointerSize" " "
"==" " " "raw_limit_address - raw_top_address"); } } while (
false); } while (false)
;
1406 }
1407
1408 DCHECK_EQ(kSystemPointerSize,((void) 0)
1409 ExternalReference::old_space_allocation_limit_address(isolate())((void) 0)
1410 .address() -((void) 0)
1411 ExternalReference::old_space_allocation_top_address(isolate())((void) 0)
1412 .address())((void) 0);
1413#endif
1414
1415 TNode<IntPtrT> limit_address =
1416 IntPtrAdd(ReinterpretCast<IntPtrT>(top_address),
1417 IntPtrConstant(kSystemPointerSize));
1418
1419 if (flags & AllocationFlag::kDoubleAlignment) {
1420 return AllocateRawDoubleAligned(size_in_bytes, flags,
1421 ReinterpretCast<RawPtrT>(top_address),
1422 ReinterpretCast<RawPtrT>(limit_address));
1423 } else {
1424 return AllocateRawUnaligned(size_in_bytes, flags,
1425 ReinterpretCast<RawPtrT>(top_address),
1426 ReinterpretCast<RawPtrT>(limit_address));
1427 }
1428}
1429
1430TNode<HeapObject> CodeStubAssembler::AllocateInNewSpace(int size_in_bytes,
1431 AllocationFlags flags) {
1432 CHECK(flags == AllocationFlag::kNone ||do { if ((__builtin_expect(!!(!(flags == AllocationFlag::kNone
|| flags == AllocationFlag::kDoubleAlignment)), 0))) { V8_Fatal
("Check failed: %s.", "flags == AllocationFlag::kNone || flags == AllocationFlag::kDoubleAlignment"
); } } while (false)
1433 flags == AllocationFlag::kDoubleAlignment)do { if ((__builtin_expect(!!(!(flags == AllocationFlag::kNone
|| flags == AllocationFlag::kDoubleAlignment)), 0))) { V8_Fatal
("Check failed: %s.", "flags == AllocationFlag::kNone || flags == AllocationFlag::kDoubleAlignment"
); } } while (false)
;
1434 DCHECK_LE(size_in_bytes, kMaxRegularHeapObjectSize)((void) 0);
1435 return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
1436}
1437
1438TNode<HeapObject> CodeStubAssembler::Allocate(int size_in_bytes,
1439 AllocationFlags flags) {
1440 return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
1441}
1442
1443TNode<BoolT> CodeStubAssembler::IsRegularHeapObjectSize(TNode<IntPtrT> size) {
1444 return UintPtrLessThanOrEqual(size,
1445 IntPtrConstant(kMaxRegularHeapObjectSize));
1446}
1447
1448#if V8_ENABLE_WEBASSEMBLY1
1449TNode<HeapObject> CodeStubAssembler::AllocateWasmArray(
1450 TNode<IntPtrT> size_in_bytes, int initialization) {
1451 TNode<HeapObject> array =
1452 Allocate(size_in_bytes, AllocationFlag::kAllowLargeObjectAllocation);
1453 if (initialization == kUninitialized) return array;
1454
1455 TNode<IntPtrT> array_address = BitcastTaggedToWord(array);
1456 TNode<IntPtrT> start = IntPtrAdd(
1457 array_address, IntPtrConstant(WasmArray::kHeaderSize - kHeapObjectTag));
1458 TNode<IntPtrT> limit = IntPtrAdd(
1459 array_address, IntPtrSub(size_in_bytes, IntPtrConstant(kHeapObjectTag)));
1460
1461 TNode<Object> value;
1462 if (initialization == kInitializeToZero) {
1463 // A pointer-sized zero pattern is just what we need for numeric Wasm
1464 // arrays (their object size is rounded up to a multiple of kPointerSize).
1465 value = SmiConstant(0);
1466 } else if (initialization == kInitializeToNull) {
1467 value = NullConstant();
1468 } else {
1469 UNREACHABLE()V8_Fatal("unreachable code");
1470 }
1471 StoreFieldsNoWriteBarrier(start, limit, value);
1472 return array;
1473}
1474#endif // V8_ENABLE_WEBASSEMBLY
1475
1476void CodeStubAssembler::BranchIfToBooleanIsTrue(TNode<Object> value,
1477 Label* if_true,
1478 Label* if_false) {
1479 Label if_smi(this), if_notsmi(this), if_heapnumber(this, Label::kDeferred),
1480 if_bigint(this, Label::kDeferred);
1481 // Rule out false {value}.
1482 GotoIf(TaggedEqual(value, FalseConstant()), if_false);
1483
1484 // Check if {value} is a Smi or a HeapObject.
1485 Branch(TaggedIsSmi(value), &if_smi, &if_notsmi);
1486
1487 BIND(&if_smi)Bind(&if_smi);
1488 {
1489 // The {value} is a Smi, only need to check against zero.
1490 BranchIfSmiEqual(CAST(value)Cast(value), SmiConstant(0), if_false, if_true);
1491 }
1492
1493 BIND(&if_notsmi)Bind(&if_notsmi);
1494 {
1495 TNode<HeapObject> value_heapobject = CAST(value)Cast(value);
1496
1497 // Check if {value} is the empty string.
1498 GotoIf(IsEmptyString(value_heapobject), if_false);
1499
1500 // The {value} is a HeapObject, load its map.
1501 TNode<Map> value_map = LoadMap(value_heapobject);
1502
1503 // Only null, undefined and document.all have the undetectable bit set,
1504 // so we can return false immediately when that bit is set.
1505 GotoIf(IsUndetectableMap(value_map), if_false);
1506
1507 // We still need to handle numbers specially, but all other {value}s
1508 // that make it here yield true.
1509 GotoIf(IsHeapNumberMap(value_map), &if_heapnumber);
1510 Branch(IsBigInt(value_heapobject), &if_bigint, if_true);
1511
1512 BIND(&if_heapnumber)Bind(&if_heapnumber);
1513 {
1514 // Load the floating point value of {value}.
1515 TNode<Float64T> value_value =
1516 LoadObjectField<Float64T>(value_heapobject, HeapNumber::kValueOffset);
1517
1518 // Check if the floating point {value} is neither 0.0, -0.0 nor NaN.
1519 Branch(Float64LessThan(Float64Constant(0.0), Float64Abs(value_value)),
1520 if_true, if_false);
1521 }
1522
1523 BIND(&if_bigint)Bind(&if_bigint);
1524 {
1525 TNode<BigInt> bigint = CAST(value)Cast(value);
1526 TNode<Word32T> bitfield = LoadBigIntBitfield(bigint);
1527 TNode<Uint32T> length = DecodeWord32<BigIntBase::LengthBits>(bitfield);
1528 Branch(Word32Equal(length, Int32Constant(0)), if_false, if_true);
1529 }
1530 }
1531}
1532
1533TNode<RawPtrT> CodeStubAssembler::LoadSandboxedPointerFromObject(
1534 TNode<HeapObject> object, TNode<IntPtrT> field_offset) {
1535#ifdef V8_SANDBOXED_POINTERS
1536 return ReinterpretCast<RawPtrT>(
1537 LoadObjectField<SandboxedPtrT>(object, field_offset));
1538#else
1539 return LoadObjectField<RawPtrT>(object, field_offset);
1540#endif // V8_SANDBOXED_POINTERS
1541}
1542
1543void CodeStubAssembler::StoreSandboxedPointerToObject(TNode<HeapObject> object,
1544 TNode<IntPtrT> offset,
1545 TNode<RawPtrT> pointer) {
1546#ifdef V8_SANDBOXED_POINTERS
1547 TNode<SandboxedPtrT> sbx_ptr = ReinterpretCast<SandboxedPtrT>(pointer);
1548
1549 // Ensure pointer points into the sandbox.
1550 TNode<ExternalReference> sandbox_base_address =
1551 ExternalConstant(ExternalReference::sandbox_base_address());
1552 TNode<ExternalReference> sandbox_end_address =
1553 ExternalConstant(ExternalReference::sandbox_end_address());
1554 TNode<UintPtrT> sandbox_base = Load<UintPtrT>(sandbox_base_address);
1555 TNode<UintPtrT> sandbox_end = Load<UintPtrT>(sandbox_end_address);
1556 CSA_CHECK(this, UintPtrGreaterThanOrEqual(sbx_ptr, sandbox_base))(this)->FastCheck(UintPtrGreaterThanOrEqual(sbx_ptr, sandbox_base
))
;
1557 CSA_CHECK(this, UintPtrLessThan(sbx_ptr, sandbox_end))(this)->FastCheck(UintPtrLessThan(sbx_ptr, sandbox_end));
1558
1559 StoreObjectFieldNoWriteBarrier<SandboxedPtrT>(object, offset, sbx_ptr);
1560#else
1561 StoreObjectFieldNoWriteBarrier<RawPtrT>(object, offset, pointer);
1562#endif // V8_SANDBOXED_POINTERS
1563}
1564
1565TNode<RawPtrT> CodeStubAssembler::EmptyBackingStoreBufferConstant() {
1566#ifdef V8_SANDBOXED_POINTERS
1567 // TODO(chromium:1218005) consider creating a LoadSandboxedPointerConstant()
1568 // if more of these constants are required later on.
1569 TNode<ExternalReference> empty_backing_store_buffer =
1570 ExternalConstant(ExternalReference::empty_backing_store_buffer());
1571 return Load<RawPtrT>(empty_backing_store_buffer);
1572#else
1573 return ReinterpretCast<RawPtrT>(IntPtrConstant(0));
1574#endif // V8_SANDBOXED_POINTERS
1575}
1576
1577#ifdef V8_SANDBOXED_EXTERNAL_POINTERS
1578TNode<ExternalPointerT> CodeStubAssembler::ChangeIndexToExternalPointer(
1579 TNode<Uint32T> index) {
1580 DCHECK_EQ(kExternalPointerSize, kUInt32Size)((void) 0);
1581 TNode<Uint32T> shifted_index =
1582 Word32Shl(index, Uint32Constant(kExternalPointerIndexShift));
1583 return ReinterpretCast<ExternalPointerT>(shifted_index);
1584}
1585
1586TNode<Uint32T> CodeStubAssembler::ChangeExternalPointerToIndex(
1587 TNode<ExternalPointerT> external_pointer) {
1588 DCHECK_EQ(kExternalPointerSize, kUInt32Size)((void) 0);
1589 TNode<Uint32T> shifted_index = ReinterpretCast<Uint32T>(external_pointer);
1590 return Word32Shr(shifted_index, Uint32Constant(kExternalPointerIndexShift));
1591}
1592#endif // V8_SANDBOXED_EXTERNAL_POINTERS
1593
1594void CodeStubAssembler::InitializeExternalPointerField(TNode<HeapObject> object,
1595 TNode<IntPtrT> offset) {
1596#ifdef V8_SANDBOXED_EXTERNAL_POINTERS
1597 TNode<ExternalReference> external_pointer_table_address = ExternalConstant(
1598 ExternalReference::external_pointer_table_address(isolate()));
1599
1600 // We could implement the fast path for allocating from the freelist here,
1601 // however, this logic needs to be atomic and so requires CSA to expose
1602 // atomic operations.
1603 TNode<ExternalReference> table_allocate_function = ExternalConstant(
1604 ExternalReference::external_pointer_table_allocate_entry());
1605 TNode<Uint32T> index = UncheckedCast<Uint32T>(CallCFunction(
1606 table_allocate_function, MachineType::Uint32(),
1607 std::make_pair(MachineType::Pointer(), external_pointer_table_address)));
1608
1609 // Currently, we assume that the caller will immediately initialize the entry
1610 // through StoreExternalPointerToObject after allocating it. That way, we
1611 // avoid initializing the entry twice (once with nullptr, then again with the
1612 // real value). TODO(saelo) initialize the entry with zero here and switch
1613 // callers to a version that initializes the entry with a given pointer.
1614
1615 TNode<ExternalPointerT> pointer = ChangeIndexToExternalPointer(index);
1616 StoreObjectFieldNoWriteBarrier<ExternalPointerT>(object, offset, pointer);
1617#endif
1618}
1619
1620TNode<RawPtrT> CodeStubAssembler::LoadExternalPointerFromObject(
1621 TNode<HeapObject> object, TNode<IntPtrT> offset,
1622 ExternalPointerTag external_pointer_tag) {
1623#ifdef V8_SANDBOXED_EXTERNAL_POINTERS
1624 TNode<ExternalReference> external_pointer_table_address = ExternalConstant(
1625 ExternalReference::external_pointer_table_address(isolate()));
1626 TNode<RawPtrT> table = UncheckedCast<RawPtrT>(
1627 Load(MachineType::Pointer(), external_pointer_table_address,
1628 UintPtrConstant(Internals::kExternalPointerTableBufferOffset)));
1629
1630 TNode<ExternalPointerT> encoded =
1631 LoadObjectField<ExternalPointerT>(object, offset);
1632 TNode<Uint32T> index = ChangeExternalPointerToIndex(encoded);
1633 // TODO(v8:10391): consider updating ElementOffsetFromIndex to generate code
1634 // that does one shift right instead of two shifts (right and then left).
1635 TNode<IntPtrT> table_offset = ElementOffsetFromIndex(
1636 ChangeUint32ToWord(index), SYSTEM_POINTER_ELEMENTS, 0);
1637
1638 TNode<UintPtrT> entry = Load<UintPtrT>(table, table_offset);
1639 if (external_pointer_tag != 0) {
1640 TNode<UintPtrT> tag = UintPtrConstant(~external_pointer_tag);
1641 entry = UncheckedCast<UintPtrT>(WordAnd(entry, tag));
1642 }
1643 return UncheckedCast<RawPtrT>(UncheckedCast<WordT>(entry));
1644#else
1645 return LoadObjectField<RawPtrT>(object, offset);
1646#endif // V8_SANDBOXED_EXTERNAL_POINTERS
1647}
1648
1649void CodeStubAssembler::StoreExternalPointerToObject(
1650 TNode<HeapObject> object, TNode<IntPtrT> offset, TNode<RawPtrT> pointer,
1651 ExternalPointerTag external_pointer_tag) {
1652#ifdef V8_SANDBOXED_EXTERNAL_POINTERS
1653 TNode<ExternalReference> external_pointer_table_address = ExternalConstant(
1654 ExternalReference::external_pointer_table_address(isolate()));
1655 TNode<RawPtrT> table = UncheckedCast<RawPtrT>(
1656 Load(MachineType::Pointer(), external_pointer_table_address,
1657 UintPtrConstant(Internals::kExternalPointerTableBufferOffset)));
1658
1659 TNode<ExternalPointerT> encoded =
1660 LoadObjectField<ExternalPointerT>(object, offset);
1661 TNode<Uint32T> index = ChangeExternalPointerToIndex(encoded);
1662 // TODO(v8:10391): consider updating ElementOffsetFromIndex to generate code
1663 // that does one shift right instead of two shifts (right and then left).
1664 TNode<IntPtrT> table_offset = ElementOffsetFromIndex(
1665 ChangeUint32ToWord(index), SYSTEM_POINTER_ELEMENTS, 0);
1666
1667 TNode<UintPtrT> value = UncheckedCast<UintPtrT>(pointer);
1668 if (external_pointer_tag != 0) {
1669 TNode<UintPtrT> tag = UintPtrConstant(external_pointer_tag);
1670 value = UncheckedCast<UintPtrT>(WordOr(pointer, tag));
1671 }
1672 StoreNoWriteBarrier(MachineType::PointerRepresentation(), table, table_offset,
1673 value);
1674#else
1675 StoreObjectFieldNoWriteBarrier<RawPtrT>(object, offset, pointer);
1676#endif // V8_SANDBOXED_EXTERNAL_POINTERS
1677}
1678
1679TNode<Object> CodeStubAssembler::LoadFromParentFrame(int offset) {
1680 TNode<RawPtrT> frame_pointer = LoadParentFramePointer();
1681 return LoadFullTagged(frame_pointer, IntPtrConstant(offset));
1682}
1683
1684TNode<Uint8T> CodeStubAssembler::LoadUint8Ptr(TNode<RawPtrT> ptr,
1685 TNode<IntPtrT> offset) {
1686 return Load<Uint8T>(IntPtrAdd(ReinterpretCast<IntPtrT>(ptr), offset));
1687}
1688
1689TNode<IntPtrT> CodeStubAssembler::LoadAndUntagObjectField(
1690 TNode<HeapObject> object, int offset) {
1691 // Please use LoadMap(object) instead.
1692 DCHECK_NE(offset, HeapObject::kMapOffset)((void) 0);
1693 if (SmiValuesAre32Bits()) {
1694#if V8_TARGET_LITTLE_ENDIAN1
1695 offset += 4;
1696#endif
1697 return ChangeInt32ToIntPtr(LoadObjectField<Int32T>(object, offset));
1698 } else {
1699 return SmiToIntPtr(LoadObjectField<Smi>(object, offset));
1700 }
1701}
1702
1703TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32ObjectField(
1704 TNode<HeapObject> object, int offset) {
1705 // Please use LoadMap(object) instead.
1706 DCHECK_NE(offset, HeapObject::kMapOffset)((void) 0);
1707 if (SmiValuesAre32Bits()) {
1708#if V8_TARGET_LITTLE_ENDIAN1
1709 offset += 4;
1710#endif
1711 return LoadObjectField<Int32T>(object, offset);
1712 } else {
1713 return SmiToInt32(LoadObjectField<Smi>(object, offset));
1714 }
1715}
1716
1717TNode<Float64T> CodeStubAssembler::LoadHeapNumberValue(
1718 TNode<HeapObject> object) {
1719 CSA_DCHECK(this, Word32Or(IsHeapNumber(object), IsOddball(object)))((void)0);
1720 STATIC_ASSERT(HeapNumber::kValueOffset == Oddball::kToNumberRawOffset)static_assert(HeapNumber::kValueOffset == Oddball::kToNumberRawOffset
, "HeapNumber::kValueOffset == Oddball::kToNumberRawOffset")
;
1721 return LoadObjectField<Float64T>(object, HeapNumber::kValueOffset);
1722}
1723
1724TNode<Map> CodeStubAssembler::GetInstanceTypeMap(InstanceType instance_type) {
1725 Handle<Map> map_handle(
1726 Map::GetInstanceTypeMap(ReadOnlyRoots(isolate()), instance_type),
1727 isolate());
1728 return HeapConstant(map_handle);
1729}
1730
1731TNode<Map> CodeStubAssembler::LoadMap(TNode<HeapObject> object) {
1732 TNode<Map> map = LoadObjectField<Map>(object, HeapObject::kMapOffset);
1733#ifdef V8_MAP_PACKING
1734 // Check the loaded map is unpacked. i.e. the lowest two bits != 0b10
1735 CSA_DCHECK(this,((void)0)
1736 WordNotEqual(WordAnd(BitcastTaggedToWord(map),((void)0)
1737 IntPtrConstant(Internals::kMapWordXorMask)),((void)0)
1738 IntPtrConstant(Internals::kMapWordSignature)))((void)0);
1739#endif
1740 return map;
1741}
1742
1743TNode<Uint16T> CodeStubAssembler::LoadInstanceType(TNode<HeapObject> object) {
1744 return LoadMapInstanceType(LoadMap(object));
1745}
1746
1747TNode<BoolT> CodeStubAssembler::HasInstanceType(TNode<HeapObject> object,
1748 InstanceType instance_type) {
1749 return InstanceTypeEqual(LoadInstanceType(object), instance_type);
1750}
1751
1752TNode<BoolT> CodeStubAssembler::DoesntHaveInstanceType(
1753 TNode<HeapObject> object, InstanceType instance_type) {
1754 return Word32NotEqual(LoadInstanceType(object), Int32Constant(instance_type));
1755}
1756
1757TNode<BoolT> CodeStubAssembler::TaggedDoesntHaveInstanceType(
1758 TNode<HeapObject> any_tagged, InstanceType type) {
1759 /* return Phi <TaggedIsSmi(val), DoesntHaveInstanceType(val, type)> */
1760 TNode<BoolT> tagged_is_smi = TaggedIsSmi(any_tagged);
1761 return Select<BoolT>(
1762 tagged_is_smi, [=]() { return tagged_is_smi; },
1763 [=]() { return DoesntHaveInstanceType(any_tagged, type); });
1764}
1765
1766TNode<BoolT> CodeStubAssembler::IsSpecialReceiverMap(TNode<Map> map) {
1767 TNode<BoolT> is_special =
1768 IsSpecialReceiverInstanceType(LoadMapInstanceType(map));
1769 uint32_t mask = Map::Bits1::HasNamedInterceptorBit::kMask |
1770 Map::Bits1::IsAccessCheckNeededBit::kMask;
1771 USE(mask)do { ::v8::base::Use unused_tmp_array_for_use_macro[]{mask}; (
void)unused_tmp_array_for_use_macro; } while (false)
;
1772 // Interceptors or access checks imply special receiver.
1773 CSA_DCHECK(this,((void)0)
1774 SelectConstant<BoolT>(IsSetWord32(LoadMapBitField(map), mask),((void)0)
1775 is_special, Int32TrueConstant()))((void)0);
1776 return is_special;
1777}
1778
1779TNode<Word32T> CodeStubAssembler::IsStringWrapperElementsKind(TNode<Map> map) {
1780 TNode<Int32T> kind = LoadMapElementsKind(map);
1781 return Word32Or(
1782 Word32Equal(kind, Int32Constant(FAST_STRING_WRAPPER_ELEMENTS)),
1783 Word32Equal(kind, Int32Constant(SLOW_STRING_WRAPPER_ELEMENTS)));
1784}
1785
1786void CodeStubAssembler::GotoIfMapHasSlowProperties(TNode<Map> map,
1787 Label* if_slow) {
1788 GotoIf(IsStringWrapperElementsKind(map), if_slow);
1789 GotoIf(IsSpecialReceiverMap(map), if_slow);
1790 GotoIf(IsDictionaryMap(map), if_slow);
1791}
1792
1793TNode<HeapObject> CodeStubAssembler::LoadFastProperties(
1794 TNode<JSReceiver> object) {
1795 CSA_SLOW_DCHECK(this, Word32BinaryNot(IsDictionaryMap(LoadMap(object))))((void)0);
1796 TNode<Object> properties = LoadJSReceiverPropertiesOrHash(object);
1797 return Select<HeapObject>(
1798 TaggedIsSmi(properties), [=] { return EmptyFixedArrayConstant(); },
1799 [=] { return CAST(properties)Cast(properties); });
1800}
1801
1802TNode<HeapObject> CodeStubAssembler::LoadSlowProperties(
1803 TNode<JSReceiver> object) {
1804 CSA_SLOW_DCHECK(this, IsDictionaryMap(LoadMap(object)))((void)0);
1805 TNode<Object> properties = LoadJSReceiverPropertiesOrHash(object);
1806 NodeGenerator<HeapObject> make_empty = [=]() -> TNode<HeapObject> {
1807 if (V8_ENABLE_SWISS_NAME_DICTIONARY_BOOLfalse) {
1808 return EmptySwissPropertyDictionaryConstant();
1809 } else {
1810 return EmptyPropertyDictionaryConstant();
1811 }
1812 };
1813 NodeGenerator<HeapObject> cast_properties = [=] {
1814 TNode<HeapObject> dict = CAST(properties)Cast(properties);
1815 if (V8_ENABLE_SWISS_NAME_DICTIONARY_BOOLfalse) {
1816 CSA_DCHECK(this, Word32Or(IsSwissNameDictionary(dict),((void)0)
1817 IsGlobalDictionary(dict)))((void)0);
1818 } else {
1819 CSA_DCHECK(this,((void)0)
1820 Word32Or(IsNameDictionary(dict), IsGlobalDictionary(dict)))((void)0);
1821 }
1822 return dict;
1823 };
1824 return Select<HeapObject>(TaggedIsSmi(properties), make_empty,
1825 cast_properties);
1826}
1827
1828TNode<Object> CodeStubAssembler::LoadJSArgumentsObjectLength(
1829 TNode<Context> context, TNode<JSArgumentsObject> array) {
1830 CSA_DCHECK(this, IsJSArgumentsObjectWithLength(context, array))((void)0);
1831 constexpr int offset = JSStrictArgumentsObject::kLengthOffset;
1832 STATIC_ASSERT(offset == JSSloppyArgumentsObject::kLengthOffset)static_assert(offset == JSSloppyArgumentsObject::kLengthOffset
, "offset == JSSloppyArgumentsObject::kLengthOffset")
;
1833 return LoadObjectField(array, offset);
1834}
1835
1836TNode<Smi> CodeStubAssembler::LoadFastJSArrayLength(TNode<JSArray> array) {
1837 TNode<Number> length = LoadJSArrayLength(array);
1838 CSA_DCHECK(this, Word32Or(IsFastElementsKind(LoadElementsKind(array)),((void)0)
1839 IsElementsKindInRange(((void)0)
1840 LoadElementsKind(array),((void)0)
1841 FIRST_ANY_NONEXTENSIBLE_ELEMENTS_KIND,((void)0)
1842 LAST_ANY_NONEXTENSIBLE_ELEMENTS_KIND)))((void)0);
1843 // JSArray length is always a positive Smi for fast arrays.
1844 CSA_SLOW_DCHECK(this, TaggedIsPositiveSmi(length))((void)0);
1845 return CAST(length)Cast(length);
1846}
1847
1848TNode<Smi> CodeStubAssembler::LoadFixedArrayBaseLength(
1849 TNode<FixedArrayBase> array) {
1850 CSA_SLOW_DCHECK(this, IsNotWeakFixedArraySubclass(array))((void)0);
1851 return LoadObjectField<Smi>(array, FixedArrayBase::kLengthOffset);
1852}
1853
1854TNode<IntPtrT> CodeStubAssembler::LoadAndUntagFixedArrayBaseLength(
1855 TNode<FixedArrayBase> array) {
1856 return LoadAndUntagObjectField(array, FixedArrayBase::kLengthOffset);
1857}
1858
1859TNode<IntPtrT> CodeStubAssembler::LoadFeedbackVectorLength(
1860 TNode<FeedbackVector> vector) {
1861 return ChangeInt32ToIntPtr(
1862 LoadObjectField<Int32T>(vector, FeedbackVector::kLengthOffset));
1863}
1864
1865TNode<Smi> CodeStubAssembler::LoadWeakFixedArrayLength(
1866 TNode<WeakFixedArray> array) {
1867 return LoadObjectField<Smi>(array, WeakFixedArray::kLengthOffset);
1868}
1869
1870TNode<IntPtrT> CodeStubAssembler::LoadAndUntagWeakFixedArrayLength(
1871 TNode<WeakFixedArray> array) {
1872 return LoadAndUntagObjectField(array, WeakFixedArray::kLengthOffset);
1873}
1874
1875TNode<Int32T> CodeStubAssembler::LoadNumberOfDescriptors(
1876 TNode<DescriptorArray> array) {
1877 return UncheckedCast<Int32T>(LoadObjectField<Int16T>(
1878 array, DescriptorArray::kNumberOfDescriptorsOffset));
1879}
1880
1881TNode<Int32T> CodeStubAssembler::LoadNumberOfOwnDescriptors(TNode<Map> map) {
1882 TNode<Uint32T> bit_field3 = LoadMapBitField3(map);
1883 return UncheckedCast<Int32T>(
1884 DecodeWord32<Map::Bits3::NumberOfOwnDescriptorsBits>(bit_field3));
1885}
1886
1887TNode<Int32T> CodeStubAssembler::LoadMapBitField(TNode<Map> map) {
1888 return UncheckedCast<Int32T>(
1889 LoadObjectField<Uint8T>(map, Map::kBitFieldOffset));
1890}
1891
1892TNode<Int32T> CodeStubAssembler::LoadMapBitField2(TNode<Map> map) {
1893 return UncheckedCast<Int32T>(
1894 LoadObjectField<Uint8T>(map, Map::kBitField2Offset));
1895}
1896
1897TNode<Uint32T> CodeStubAssembler::LoadMapBitField3(TNode<Map> map) {
1898 return LoadObjectField<Uint32T>(map, Map::kBitField3Offset);
1899}
1900
1901TNode<Uint16T> CodeStubAssembler::LoadMapInstanceType(TNode<Map> map) {
1902 return LoadObjectField<Uint16T>(map, Map::kInstanceTypeOffset);
1903}
1904
1905TNode<Int32T> CodeStubAssembler::LoadMapElementsKind(TNode<Map> map) {
1906 TNode<Int32T> bit_field2 = LoadMapBitField2(map);
1907 return Signed(DecodeWord32<Map::Bits2::ElementsKindBits>(bit_field2));
1908}
1909
1910TNode<Int32T> CodeStubAssembler::LoadElementsKind(TNode<HeapObject> object) {
1911 return LoadMapElementsKind(LoadMap(object));
1912}
1913
1914TNode<DescriptorArray> CodeStubAssembler::LoadMapDescriptors(TNode<Map> map) {
1915 return LoadObjectField<DescriptorArray>(map, Map::kInstanceDescriptorsOffset);
1916}
1917
1918TNode<HeapObject> CodeStubAssembler::LoadMapPrototype(TNode<Map> map) {
1919 return LoadObjectField<HeapObject>(map, Map::kPrototypeOffset);
1920}
1921
1922TNode<IntPtrT> CodeStubAssembler::LoadMapInstanceSizeInWords(TNode<Map> map) {
1923 return ChangeInt32ToIntPtr(
1924 LoadObjectField<Uint8T>(map, Map::kInstanceSizeInWordsOffset));
1925}
1926
1927TNode<IntPtrT> CodeStubAssembler::LoadMapInobjectPropertiesStartInWords(
1928 TNode<Map> map) {
1929 // See Map::GetInObjectPropertiesStartInWords() for details.
1930 CSA_DCHECK(this, IsJSObjectMap(map))((void)0);
1931 return ChangeInt32ToIntPtr(LoadObjectField<Uint8T>(
1932 map, Map::kInobjectPropertiesStartOrConstructorFunctionIndexOffset));
1933}
1934
1935TNode<IntPtrT> CodeStubAssembler::LoadMapConstructorFunctionIndex(
1936 TNode<Map> map) {
1937 // See Map::GetConstructorFunctionIndex() for details.
1938 CSA_DCHECK(this, IsPrimitiveInstanceType(LoadMapInstanceType(map)))((void)0);
1939 return ChangeInt32ToIntPtr(LoadObjectField<Uint8T>(
1940 map, Map::kInobjectPropertiesStartOrConstructorFunctionIndexOffset));
1941}
1942
1943TNode<Object> CodeStubAssembler::LoadMapConstructor(TNode<Map> map) {
1944 TVARIABLE(Object, result,TVariable<Object> result(LoadObjectField( map, Map::kConstructorOrBackPointerOrNativeContextOffset
), this)
1945 LoadObjectField(TVariable<Object> result(LoadObjectField( map, Map::kConstructorOrBackPointerOrNativeContextOffset
), this)
1946 map, Map::kConstructorOrBackPointerOrNativeContextOffset))TVariable<Object> result(LoadObjectField( map, Map::kConstructorOrBackPointerOrNativeContextOffset
), this)
;
1947
1948 Label done(this), loop(this, &result);
1949 Goto(&loop);
1950 BIND(&loop)Bind(&loop);
1951 {
1952 GotoIf(TaggedIsSmi(result.value()), &done);
1953 TNode<BoolT> is_map_type =
1954 InstanceTypeEqual(LoadInstanceType(CAST(result.value())Cast(result.value())), MAP_TYPE);
1955 GotoIfNot(is_map_type, &done);
1956 result =
1957 LoadObjectField(CAST(result.value())Cast(result.value()),
1958 Map::kConstructorOrBackPointerOrNativeContextOffset);
1959 Goto(&loop);
1960 }
1961 BIND(&done)Bind(&done);
1962 return result.value();
1963}
1964
1965TNode<WordT> CodeStubAssembler::LoadMapEnumLength(TNode<Map> map) {
1966 TNode<Uint32T> bit_field3 = LoadMapBitField3(map);
1967 return DecodeWordFromWord32<Map::Bits3::EnumLengthBits>(bit_field3);
1968}
1969
1970TNode<Object> CodeStubAssembler::LoadMapBackPointer(TNode<Map> map) {
1971 TNode<HeapObject> object = CAST(LoadObjectField(Cast(LoadObjectField( map, Map::kConstructorOrBackPointerOrNativeContextOffset
))
1972 map, Map::kConstructorOrBackPointerOrNativeContextOffset))Cast(LoadObjectField( map, Map::kConstructorOrBackPointerOrNativeContextOffset
))
;
1973 return Select<Object>(
1974 IsMap(object), [=] { return object; },
1975 [=] { return UndefinedConstant(); });
1976}
1977
1978TNode<Uint32T> CodeStubAssembler::EnsureOnlyHasSimpleProperties(
1979 TNode<Map> map, TNode<Int32T> instance_type, Label* bailout) {
1980 // This check can have false positives, since it applies to any
1981 // JSPrimitiveWrapper type.
1982 GotoIf(IsCustomElementsReceiverInstanceType(instance_type), bailout);
1983
1984 TNode<Uint32T> bit_field3 = LoadMapBitField3(map);
1985 GotoIf(IsSetWord32(bit_field3, Map::Bits3::IsDictionaryMapBit::kMask),
1986 bailout);
1987
1988 return bit_field3;
1989}
1990
1991TNode<IntPtrT> CodeStubAssembler::LoadJSReceiverIdentityHash(
1992 TNode<JSReceiver> receiver, Label* if_no_hash) {
1993 TVARIABLE(IntPtrT, var_hash)TVariable<IntPtrT> var_hash(this);
1994 Label done(this), if_smi(this), if_property_array(this),
1995 if_swiss_property_dictionary(this), if_property_dictionary(this),
1996 if_fixed_array(this);
1997
1998 TNode<Object> properties_or_hash =
1999 LoadObjectField(receiver, JSReceiver::kPropertiesOrHashOffset);
2000 GotoIf(TaggedIsSmi(properties_or_hash), &if_smi);
2001
2002 TNode<HeapObject> properties = CAST(properties_or_hash)Cast(properties_or_hash);
2003 TNode<Uint16T> properties_instance_type = LoadInstanceType(properties);
2004
2005 GotoIf(InstanceTypeEqual(properties_instance_type, PROPERTY_ARRAY_TYPE),
2006 &if_property_array);
2007 if (V8_ENABLE_SWISS_NAME_DICTIONARY_BOOLfalse) {
2008 GotoIf(
2009 InstanceTypeEqual(properties_instance_type, SWISS_NAME_DICTIONARY_TYPE),
2010 &if_swiss_property_dictionary);
2011 }
2012 Branch(InstanceTypeEqual(properties_instance_type, NAME_DICTIONARY_TYPE),
2013 &if_property_dictionary, &if_fixed_array);
2014
2015 BIND(&if_fixed_array)Bind(&if_fixed_array);
2016 {
2017 var_hash = IntPtrConstant(PropertyArray::kNoHashSentinel);
2018 Goto(&done);
2019 }
2020
2021 BIND(&if_smi)Bind(&if_smi);
2022 {
2023 var_hash = SmiUntag(CAST(properties_or_hash)Cast(properties_or_hash));
2024 Goto(&done);
2025 }
2026
2027 BIND(&if_property_array)Bind(&if_property_array);
2028 {
2029 TNode<IntPtrT> length_and_hash = LoadAndUntagObjectField(
2030 properties, PropertyArray::kLengthAndHashOffset);
2031 var_hash = Signed(DecodeWord<PropertyArray::HashField>(length_and_hash));
2032 Goto(&done);
2033 }
2034 if (V8_ENABLE_SWISS_NAME_DICTIONARY_BOOLfalse) {
2035 BIND(&if_swiss_property_dictionary)Bind(&if_swiss_property_dictionary);
2036 {
2037 var_hash = Signed(
2038 ChangeUint32ToWord(LoadSwissNameDictionaryHash(CAST(properties)Cast(properties))));
2039 Goto(&done);
2040 }
2041 }
2042
2043 BIND(&if_property_dictionary)Bind(&if_property_dictionary);
2044 {
2045 var_hash = SmiUntag(CAST(LoadFixedArrayElement(Cast(LoadFixedArrayElement( Cast(properties), NameDictionary::
kObjectHashIndex))
2046 CAST(properties), NameDictionary::kObjectHashIndex))Cast(LoadFixedArrayElement( Cast(properties), NameDictionary::
kObjectHashIndex))
);
2047 Goto(&done);
2048 }
2049
2050 BIND(&done)Bind(&done);
2051 if (if_no_hash != nullptr) {
2052 GotoIf(IntPtrEqual(var_hash.value(),
2053 IntPtrConstant(PropertyArray::kNoHashSentinel)),
2054 if_no_hash);
2055 }
2056 return var_hash.value();
2057}
2058
2059TNode<Uint32T> CodeStubAssembler::LoadNameHashAssumeComputed(TNode<Name> name) {
2060 TNode<Uint32T> hash_field = LoadNameRawHashField(name);
2061 CSA_DCHECK(this, IsClearWord32(hash_field, Name::kHashNotComputedMask))((void)0);
2062 return DecodeWord32<Name::HashBits>(hash_field);
2063}
2064
2065TNode<Uint32T> CodeStubAssembler::LoadNameHash(TNode<Name> name,
2066 Label* if_hash_not_computed) {
2067 TNode<Uint32T> raw_hash_field = LoadNameRawHashField(name);
2068 if (if_hash_not_computed != nullptr) {
2069 GotoIf(IsSetWord32(raw_hash_field, Name::kHashNotComputedMask),
2070 if_hash_not_computed);
2071 }
2072 return DecodeWord32<Name::HashBits>(raw_hash_field);
2073}
2074
2075TNode<Smi> CodeStubAssembler::LoadStringLengthAsSmi(TNode<String> string) {
2076 return SmiFromIntPtr(LoadStringLengthAsWord(string));
2077}
2078
2079TNode<IntPtrT> CodeStubAssembler::LoadStringLengthAsWord(TNode<String> string) {
2080 return Signed(ChangeUint32ToWord(LoadStringLengthAsWord32(string)));
2081}
2082
2083TNode<Uint32T> CodeStubAssembler::LoadStringLengthAsWord32(
2084 TNode<String> string) {
2085 return LoadObjectField<Uint32T>(string, String::kLengthOffset);
2086}
2087
2088TNode<Object> CodeStubAssembler::LoadJSPrimitiveWrapperValue(
2089 TNode<JSPrimitiveWrapper> object) {
2090 return LoadObjectField(object, JSPrimitiveWrapper::kValueOffset);
2091}
2092
2093void CodeStubAssembler::DispatchMaybeObject(TNode<MaybeObject> maybe_object,
2094 Label* if_smi, Label* if_cleared,
2095 Label* if_weak, Label* if_strong,
2096 TVariable<Object>* extracted) {
2097 Label inner_if_smi(this), inner_if_strong(this);
2098
2099 GotoIf(TaggedIsSmi(maybe_object), &inner_if_smi);
2100
2101 GotoIf(IsCleared(maybe_object), if_cleared);
2102
2103 GotoIf(IsStrong(maybe_object), &inner_if_strong);
2104
2105 *extracted = GetHeapObjectAssumeWeak(maybe_object);
2106 Goto(if_weak);
2107
2108 BIND(&inner_if_smi)Bind(&inner_if_smi);
2109 *extracted = CAST(maybe_object)Cast(maybe_object);
2110 Goto(if_smi);
2111
2112 BIND(&inner_if_strong)Bind(&inner_if_strong);
2113 *extracted = CAST(maybe_object)Cast(maybe_object);
2114 Goto(if_strong);
2115}
2116
2117void CodeStubAssembler::DcheckHasValidMap(TNode<HeapObject> object) {
2118#ifdef V8_MAP_PACKING
2119 // Test if the map is an unpacked and valid map
2120 CSA_DCHECK(this, IsMap(LoadMap(object)))((void)0);
2121#endif
2122}
2123
2124TNode<BoolT> CodeStubAssembler::IsStrong(TNode<MaybeObject> value) {
2125 return Word32Equal(Word32And(TruncateIntPtrToInt32(
2126 BitcastTaggedToWordForTagAndSmiBits(value)),
2127 Int32Constant(kHeapObjectTagMask)),
2128 Int32Constant(kHeapObjectTag));
2129}
2130
2131TNode<HeapObject> CodeStubAssembler::GetHeapObjectIfStrong(
2132 TNode<MaybeObject> value, Label* if_not_strong) {
2133 GotoIfNot(IsStrong(value), if_not_strong);
2134 return CAST(value)Cast(value);
2135}
2136
2137TNode<BoolT> CodeStubAssembler::IsWeakOrCleared(TNode<MaybeObject> value) {
2138 return Word32Equal(Word32And(TruncateIntPtrToInt32(
2139 BitcastTaggedToWordForTagAndSmiBits(value)),
2140 Int32Constant(kHeapObjectTagMask)),
2141 Int32Constant(kWeakHeapObjectTag));
2142}
2143
2144TNode<BoolT> CodeStubAssembler::IsCleared(TNode<MaybeObject> value) {
2145 return Word32Equal(TruncateIntPtrToInt32(BitcastMaybeObjectToWord(value)),
2146 Int32Constant(kClearedWeakHeapObjectLower32));
2147}
2148
2149TNode<HeapObject> CodeStubAssembler::GetHeapObjectAssumeWeak(
2150 TNode<MaybeObject> value) {
2151 CSA_DCHECK(this, IsWeakOrCleared(value))((void)0);
2152 CSA_DCHECK(this, IsNotCleared(value))((void)0);
2153 return UncheckedCast<HeapObject>(BitcastWordToTagged(WordAnd(
2154 BitcastMaybeObjectToWord(value), IntPtrConstant(~kWeakHeapObjectMask))));
2155}
2156
2157TNode<HeapObject> CodeStubAssembler::GetHeapObjectAssumeWeak(
2158 TNode<MaybeObject> value, Label* if_cleared) {
2159 GotoIf(IsCleared(value), if_cleared);
2160 return GetHeapObjectAssumeWeak(value);
2161}
2162
2163// This version generates
2164// (maybe_object & ~mask) == value
2165// It works for non-Smi |maybe_object| and for both Smi and HeapObject values
2166// but requires a big constant for ~mask.
2167TNode<BoolT> CodeStubAssembler::IsWeakReferenceToObject(
2168 TNode<MaybeObject> maybe_object, TNode<Object> value) {
2169 CSA_DCHECK(this, TaggedIsNotSmi(maybe_object))((void)0);
2170 if (COMPRESS_POINTERS_BOOLfalse) {
2171 return Word32Equal(
2172 Word32And(TruncateWordToInt32(BitcastMaybeObjectToWord(maybe_object)),
2173 Uint32Constant(~static_cast<uint32_t>(kWeakHeapObjectMask))),
2174 TruncateWordToInt32(BitcastTaggedToWord(value)));
2175 } else {
2176 return WordEqual(WordAnd(BitcastMaybeObjectToWord(maybe_object),
2177 IntPtrConstant(~kWeakHeapObjectMask)),
2178 BitcastTaggedToWord(value));
2179 }
2180}
2181
2182// This version generates
2183// maybe_object == (heap_object | mask)
2184// It works for any |maybe_object| values and generates a better code because it
2185// uses a small constant for mask.
2186TNode<BoolT> CodeStubAssembler::IsWeakReferenceTo(
2187 TNode<MaybeObject> maybe_object, TNode<HeapObject> heap_object) {
2188 if (COMPRESS_POINTERS_BOOLfalse) {
2189 return Word32Equal(
2190 TruncateWordToInt32(BitcastMaybeObjectToWord(maybe_object)),
2191 Word32Or(TruncateWordToInt32(BitcastTaggedToWord(heap_object)),
2192 Int32Constant(kWeakHeapObjectMask)));
2193 } else {
2194 return WordEqual(BitcastMaybeObjectToWord(maybe_object),
2195 WordOr(BitcastTaggedToWord(heap_object),
2196 IntPtrConstant(kWeakHeapObjectMask)));
2197 }
2198}
2199
2200TNode<MaybeObject> CodeStubAssembler::MakeWeak(TNode<HeapObject> value) {
2201 return ReinterpretCast<MaybeObject>(BitcastWordToTagged(
2202 WordOr(BitcastTaggedToWord(value), IntPtrConstant(kWeakHeapObjectTag))));
2203}
2204
2205template <>
2206TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(TNode<FixedArray> array) {
2207 return LoadAndUntagFixedArrayBaseLength(array);
2208}
2209
2210template <>
2211TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(TNode<WeakFixedArray> array) {
2212 return LoadAndUntagWeakFixedArrayLength(array);
2213}
2214
2215template <>
2216TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(TNode<PropertyArray> array) {
2217 return LoadPropertyArrayLength(array);
2218}
2219
2220template <>
2221TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(
2222 TNode<DescriptorArray> array) {
2223 return IntPtrMul(ChangeInt32ToIntPtr(LoadNumberOfDescriptors(array)),
2224 IntPtrConstant(DescriptorArray::kEntrySize));
2225}
2226
2227template <>
2228TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(
2229 TNode<TransitionArray> array) {
2230 return LoadAndUntagWeakFixedArrayLength(array);
2231}
2232
2233template <typename Array, typename TIndex, typename TValue>
2234TNode<TValue> CodeStubAssembler::LoadArrayElement(TNode<Array> array,
2235 int array_header_size,
2236 TNode<TIndex> index_node,
2237 int additional_offset) {
2238 // TODO(v8:9708): Do we want to keep both IntPtrT and UintPtrT variants?
2239 static_assert(std::is_same<TIndex, Smi>::value ||
2240 std::is_same<TIndex, UintPtrT>::value ||
2241 std::is_same<TIndex, IntPtrT>::value,
2242 "Only Smi, UintPtrT or IntPtrT indices are allowed");
2243 CSA_DCHECK(this, IntPtrGreaterThanOrEqual(ParameterToIntPtr(index_node),((void)0)
2244 IntPtrConstant(0)))((void)0);
2245 DCHECK(IsAligned(additional_offset, kTaggedSize))((void) 0);
2246 int32_t header_size = array_header_size + additional_offset - kHeapObjectTag;
2247 TNode<IntPtrT> offset =
2248 ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS, header_size);
2249 CSA_DCHECK(this, IsOffsetInBounds(offset, LoadArrayLength(array),((void)0)
2250 array_header_size))((void)0);
2251 constexpr MachineType machine_type = MachineTypeOf<TValue>::value;
2252 return UncheckedCast<TValue>(LoadFromObject(machine_type, array, offset));
2253}
2254
2255template V8_EXPORT_PRIVATE TNode<MaybeObject>
2256CodeStubAssembler::LoadArrayElement<TransitionArray, IntPtrT>(
2257 TNode<TransitionArray>, int, TNode<IntPtrT>, int);
2258
2259template <typename TIndex>
2260TNode<Object> CodeStubAssembler::LoadFixedArrayElement(
2261 TNode<FixedArray> object, TNode<TIndex> index, int additional_offset,
2262 CheckBounds check_bounds) {
2263 // TODO(v8:9708): Do we want to keep both IntPtrT and UintPtrT variants?
2264 static_assert(std::is_same<TIndex, Smi>::value ||
2265 std::is_same<TIndex, UintPtrT>::value ||
2266 std::is_same<TIndex, IntPtrT>::value,
2267 "Only Smi, UintPtrT or IntPtrT indexes are allowed");
2268 CSA_DCHECK(this, IsFixedArraySubclass(object))((void)0);
2269 CSA_DCHECK(this, IsNotWeakFixedArraySubclass(object))((void)0);
2270
2271 if (NeedsBoundsCheck(check_bounds)) {
2272 FixedArrayBoundsCheck(object, index, additional_offset);
2273 }
2274 TNode<MaybeObject> element = LoadArrayElement(object, FixedArray::kHeaderSize,
2275 index, additional_offset);
2276 return CAST(element)Cast(element);
2277}
2278
2279template V8_EXPORT_PRIVATE TNode<Object>
2280CodeStubAssembler::LoadFixedArrayElement<Smi>(TNode<FixedArray>, TNode<Smi>,
2281 int, CheckBounds);
2282template V8_EXPORT_PRIVATE TNode<Object>
2283CodeStubAssembler::LoadFixedArrayElement<UintPtrT>(TNode<FixedArray>,
2284 TNode<UintPtrT>, int,
2285 CheckBounds);
2286template V8_EXPORT_PRIVATE TNode<Object>
2287CodeStubAssembler::LoadFixedArrayElement<IntPtrT>(TNode<FixedArray>,
2288 TNode<IntPtrT>, int,
2289 CheckBounds);
2290
2291void CodeStubAssembler::FixedArrayBoundsCheck(TNode<FixedArrayBase> array,
2292 TNode<Smi> index,
2293 int additional_offset) {
2294 if (!FLAG_fixed_array_bounds_checks) return;
2295 DCHECK(IsAligned(additional_offset, kTaggedSize))((void) 0);
2296 TNode<Smi> effective_index;
2297 Smi constant_index;
2298 bool index_is_constant = TryToSmiConstant(index, &constant_index);
2299 if (index_is_constant) {
2300 effective_index = SmiConstant(Smi::ToInt(constant_index) +
2301 additional_offset / kTaggedSize);
2302 } else {
2303 effective_index =
2304 SmiAdd(index, SmiConstant(additional_offset / kTaggedSize));
2305 }
2306 CSA_CHECK(this, SmiBelow(effective_index, LoadFixedArrayBaseLength(array)))(this)->FastCheck(SmiBelow(effective_index, LoadFixedArrayBaseLength
(array)))
;
2307}
2308
2309void CodeStubAssembler::FixedArrayBoundsCheck(TNode<FixedArrayBase> array,
2310 TNode<IntPtrT> index,
2311 int additional_offset) {
2312 if (!FLAG_fixed_array_bounds_checks) return;
2313 DCHECK(IsAligned(additional_offset, kTaggedSize))((void) 0);
2314 // IntPtrAdd does constant-folding automatically.
2315 TNode<IntPtrT> effective_index =
2316 IntPtrAdd(index, IntPtrConstant(additional_offset / kTaggedSize));
2317 CSA_CHECK(this, UintPtrLessThan(effective_index,(this)->FastCheck(UintPtrLessThan(effective_index, LoadAndUntagFixedArrayBaseLength
(array)))
2318 LoadAndUntagFixedArrayBaseLength(array)))(this)->FastCheck(UintPtrLessThan(effective_index, LoadAndUntagFixedArrayBaseLength
(array)))
;
2319}
2320
2321TNode<Object> CodeStubAssembler::LoadPropertyArrayElement(
2322 TNode<PropertyArray> object, TNode<IntPtrT> index) {
2323 int additional_offset = 0;
2324 return CAST(LoadArrayElement(object, PropertyArray::kHeaderSize, index,Cast(LoadArrayElement(object, PropertyArray::kHeaderSize, index
, additional_offset))
2325 additional_offset))Cast(LoadArrayElement(object, PropertyArray::kHeaderSize, index
, additional_offset))
;
2326}
2327
2328TNode<IntPtrT> CodeStubAssembler::LoadPropertyArrayLength(
2329 TNode<PropertyArray> object) {
2330 TNode<IntPtrT> value =
2331 LoadAndUntagObjectField(object, PropertyArray::kLengthAndHashOffset);
2332 return Signed(DecodeWord<PropertyArray::LengthField>(value));
2333}
2334
2335TNode<RawPtrT> CodeStubAssembler::LoadJSTypedArrayDataPtr(
2336 TNode<JSTypedArray> typed_array) {
2337 // Data pointer = external_pointer + static_cast<Tagged_t>(base_pointer).
2338 TNode<RawPtrT> external_pointer =
2339 LoadJSTypedArrayExternalPointerPtr(typed_array);
2340
2341 TNode<IntPtrT> base_pointer;
2342 if (COMPRESS_POINTERS_BOOLfalse) {
2343 TNode<Int32T> compressed_base =
2344 LoadObjectField<Int32T>(typed_array, JSTypedArray::kBasePointerOffset);
2345 // Zero-extend TaggedT to WordT according to current compression scheme
2346 // so that the addition with |external_pointer| (which already contains
2347 // compensated offset value) below will decompress the tagged value.
2348 // See JSTypedArray::ExternalPointerCompensationForOnHeapArray() for
2349 // details.
2350 base_pointer = Signed(ChangeUint32ToWord(compressed_base));
2351 } else {
2352 base_pointer =
2353 LoadObjectField<IntPtrT>(typed_array, JSTypedArray::kBasePointerOffset);
2354 }
2355 return RawPtrAdd(external_pointer, base_pointer);
2356}
2357
2358TNode<BigInt> CodeStubAssembler::LoadFixedBigInt64ArrayElementAsTagged(
2359 TNode<RawPtrT> data_pointer, TNode<IntPtrT> offset) {
2360 if (Is64()) {
2361 TNode<IntPtrT> value = Load<IntPtrT>(data_pointer, offset);
2362 return BigIntFromInt64(value);
2363 } else {
2364 DCHECK(!Is64())((void) 0);
2365#if defined(V8_TARGET_BIG_ENDIAN)
2366 TNode<IntPtrT> high = Load<IntPtrT>(data_pointer, offset);
2367 TNode<IntPtrT> low = Load<IntPtrT>(
2368 data_pointer, IntPtrAdd(offset, IntPtrConstant(kSystemPointerSize)));
2369#else
2370 TNode<IntPtrT> low = Load<IntPtrT>(data_pointer, offset);
2371 TNode<IntPtrT> high = Load<IntPtrT>(
2372 data_pointer, IntPtrAdd(offset, IntPtrConstant(kSystemPointerSize)));
2373#endif
2374 return BigIntFromInt32Pair(low, high);
2375 }
2376}
2377
2378TNode<BigInt> CodeStubAssembler::BigIntFromInt32Pair(TNode<IntPtrT> low,
2379 TNode<IntPtrT> high) {
2380 DCHECK(!Is64())((void) 0);
2381 TVARIABLE(BigInt, var_result)TVariable<BigInt> var_result(this);
2382 TVARIABLE(Word32T, var_sign, Int32Constant(BigInt::SignBits::encode(false)))TVariable<Word32T> var_sign(Int32Constant(BigInt::SignBits
::encode(false)), this)
;
2383 TVARIABLE(IntPtrT, var_high, high)TVariable<IntPtrT> var_high(high, this);
2384 TVARIABLE(IntPtrT, var_low, low)TVariable<IntPtrT> var_low(low, this);
2385 Label high_zero(this), negative(this), allocate_one_digit(this),
2386 allocate_two_digits(this), if_zero(this), done(this);
2387
2388 GotoIf(IntPtrEqual(var_high.value(), IntPtrConstant(0)), &high_zero);
2389 Branch(IntPtrLessThan(var_high.value(), IntPtrConstant(0)), &negative,
2390 &allocate_two_digits);
2391
2392 BIND(&high_zero)Bind(&high_zero);
2393 Branch(IntPtrEqual(var_low.value(), IntPtrConstant(0)), &if_zero,
2394 &allocate_one_digit);
2395
2396 BIND(&negative)Bind(&negative);
2397 {
2398 var_sign = Int32Constant(BigInt::SignBits::encode(true));
2399 // We must negate the value by computing "0 - (high|low)", performing
2400 // both parts of the subtraction separately and manually taking care
2401 // of the carry bit (which is 1 iff low != 0).
2402 var_high = IntPtrSub(IntPtrConstant(0), var_high.value());
2403 Label carry(this), no_carry(this);
2404 Branch(IntPtrEqual(var_low.value(), IntPtrConstant(0)), &no_carry, &carry);
2405 BIND(&carry)Bind(&carry);
2406 var_high = IntPtrSub(var_high.value(), IntPtrConstant(1));
2407 Goto(&no_carry);
2408 BIND(&no_carry)Bind(&no_carry);
2409 var_low = IntPtrSub(IntPtrConstant(0), var_low.value());
2410 // var_high was non-zero going into this block, but subtracting the
2411 // carry bit from it could bring us back onto the "one digit" path.
2412 Branch(IntPtrEqual(var_high.value(), IntPtrConstant(0)),
2413 &allocate_one_digit, &allocate_two_digits);
2414 }
2415
2416 BIND(&allocate_one_digit)Bind(&allocate_one_digit);
2417 {
2418 var_result = AllocateRawBigInt(IntPtrConstant(1));
2419 StoreBigIntBitfield(var_result.value(),
2420 Word32Or(var_sign.value(),
2421 Int32Constant(BigInt::LengthBits::encode(1))));
2422 StoreBigIntDigit(var_result.value(), 0, Unsigned(var_low.value()));
2423 Goto(&done);
2424 }
2425
2426 BIND(&allocate_two_digits)Bind(&allocate_two_digits);
2427 {
2428 var_result = AllocateRawBigInt(IntPtrConstant(2));
2429 StoreBigIntBitfield(var_result.value(),
2430 Word32Or(var_sign.value(),
2431 Int32Constant(BigInt::LengthBits::encode(2))));
2432 StoreBigIntDigit(var_result.value(), 0, Unsigned(var_low.value()));
2433 StoreBigIntDigit(var_result.value(), 1, Unsigned(var_high.value()));
2434 Goto(&done);
2435 }
2436
2437 BIND(&if_zero)Bind(&if_zero);
2438 var_result = AllocateBigInt(IntPtrConstant(0));
2439 Goto(&done);
2440
2441 BIND(&done)Bind(&done);
2442 return var_result.value();
2443}
2444
2445TNode<BigInt> CodeStubAssembler::BigIntFromInt64(TNode<IntPtrT> value) {
2446 DCHECK(Is64())((void) 0);
2447 TVARIABLE(BigInt, var_result)TVariable<BigInt> var_result(this);
2448 Label done(this), if_positive(this), if_negative(this), if_zero(this);
2449 GotoIf(IntPtrEqual(value, IntPtrConstant(0)), &if_zero);
2450 var_result = AllocateRawBigInt(IntPtrConstant(1));
2451 Branch(IntPtrGreaterThan(value, IntPtrConstant(0)), &if_positive,
2452 &if_negative);
2453
2454 BIND(&if_positive)Bind(&if_positive);
2455 {
2456 StoreBigIntBitfield(var_result.value(),
2457 Int32Constant(BigInt::SignBits::encode(false) |
2458 BigInt::LengthBits::encode(1)));
2459 StoreBigIntDigit(var_result.value(), 0, Unsigned(value));
2460 Goto(&done);
2461 }
2462
2463 BIND(&if_negative)Bind(&if_negative);
2464 {
2465 StoreBigIntBitfield(var_result.value(),
2466 Int32Constant(BigInt::SignBits::encode(true) |
2467 BigInt::LengthBits::encode(1)));
2468 StoreBigIntDigit(var_result.value(), 0,
2469 Unsigned(IntPtrSub(IntPtrConstant(0), value)));
2470 Goto(&done);
2471 }
2472
2473 BIND(&if_zero)Bind(&if_zero);
2474 {
2475 var_result = AllocateBigInt(IntPtrConstant(0));
2476 Goto(&done);
2477 }
2478
2479 BIND(&done)Bind(&done);
2480 return var_result.value();
2481}
2482
2483TNode<BigInt> CodeStubAssembler::LoadFixedBigUint64ArrayElementAsTagged(
2484 TNode<RawPtrT> data_pointer, TNode<IntPtrT> offset) {
2485 Label if_zero(this), done(this);
2486 if (Is64()) {
2487 TNode<UintPtrT> value = Load<UintPtrT>(data_pointer, offset);
2488 return BigIntFromUint64(value);
2489 } else {
2490 DCHECK(!Is64())((void) 0);
2491#if defined(V8_TARGET_BIG_ENDIAN)
2492 TNode<UintPtrT> high = Load<UintPtrT>(data_pointer, offset);
2493 TNode<UintPtrT> low = Load<UintPtrT>(
2494 data_pointer, IntPtrAdd(offset, IntPtrConstant(kSystemPointerSize)));
2495#else
2496 TNode<UintPtrT> low = Load<UintPtrT>(data_pointer, offset);
2497 TNode<UintPtrT> high = Load<UintPtrT>(
2498 data_pointer, IntPtrAdd(offset, IntPtrConstant(kSystemPointerSize)));
2499#endif
2500 return BigIntFromUint32Pair(low, high);
2501 }
2502}
2503
2504TNode<BigInt> CodeStubAssembler::BigIntFromUint32Pair(TNode<UintPtrT> low,
2505 TNode<UintPtrT> high) {
2506 DCHECK(!Is64())((void) 0);
2507 TVARIABLE(BigInt, var_result)TVariable<BigInt> var_result(this);
2508 Label high_zero(this), if_zero(this), done(this);
2509
2510 GotoIf(IntPtrEqual(high, IntPtrConstant(0)), &high_zero);
2511 var_result = AllocateBigInt(IntPtrConstant(2));
2512 StoreBigIntDigit(var_result.value(), 0, low);
2513 StoreBigIntDigit(var_result.value(), 1, high);
2514 Goto(&done);
2515
2516 BIND(&high_zero)Bind(&high_zero);
2517 GotoIf(IntPtrEqual(low, IntPtrConstant(0)), &if_zero);
2518 var_result = AllocateBigInt(IntPtrConstant(1));
2519 StoreBigIntDigit(var_result.value(), 0, low);
2520 Goto(&done);
2521
2522 BIND(&if_zero)Bind(&if_zero);
2523 var_result = AllocateBigInt(IntPtrConstant(0));
2524 Goto(&done);
2525
2526 BIND(&done)Bind(&done);
2527 return var_result.value();
2528}
2529
2530TNode<BigInt> CodeStubAssembler::BigIntFromUint64(TNode<UintPtrT> value) {
2531 DCHECK(Is64())((void) 0);
2532 TVARIABLE(BigInt, var_result)TVariable<BigInt> var_result(this);
2533 Label done(this), if_zero(this);
2534 GotoIf(IntPtrEqual(value, IntPtrConstant(0)), &if_zero);
2535 var_result = AllocateBigInt(IntPtrConstant(1));
2536 StoreBigIntDigit(var_result.value(), 0, value);
2537 Goto(&done);
2538
2539 BIND(&if_zero)Bind(&if_zero);
2540 var_result = AllocateBigInt(IntPtrConstant(0));
2541 Goto(&done);
2542 BIND(&done)Bind(&done);
2543 return var_result.value();
2544}
2545
2546TNode<Numeric> CodeStubAssembler::LoadFixedTypedArrayElementAsTagged(
2547 TNode<RawPtrT> data_pointer, TNode<UintPtrT> index,
2548 ElementsKind elements_kind) {
2549 TNode<IntPtrT> offset =
2550 ElementOffsetFromIndex(Signed(index), elements_kind, 0);
2551 switch (elements_kind) {
2552 case UINT8_ELEMENTS: /* fall through */
2553 case UINT8_CLAMPED_ELEMENTS:
2554 return SmiFromInt32(Load<Uint8T>(data_pointer, offset));
2555 case INT8_ELEMENTS:
2556 return SmiFromInt32(Load<Int8T>(data_pointer, offset));
2557 case UINT16_ELEMENTS:
2558 return SmiFromInt32(Load<Uint16T>(data_pointer, offset));
2559 case INT16_ELEMENTS:
2560 return SmiFromInt32(Load<Int16T>(data_pointer, offset));
2561 case UINT32_ELEMENTS:
2562 return ChangeUint32ToTagged(Load<Uint32T>(data_pointer, offset));
2563 case INT32_ELEMENTS:
2564 return ChangeInt32ToTagged(Load<Int32T>(data_pointer, offset));
2565 case FLOAT32_ELEMENTS:
2566 return AllocateHeapNumberWithValue(
2567 ChangeFloat32ToFloat64(Load<Float32T>(data_pointer, offset)));
2568 case FLOAT64_ELEMENTS:
2569 return AllocateHeapNumberWithValue(Load<Float64T>(data_pointer, offset));
2570 case BIGINT64_ELEMENTS:
2571 return LoadFixedBigInt64ArrayElementAsTagged(data_pointer, offset);
2572 case BIGUINT64_ELEMENTS:
2573 return LoadFixedBigUint64ArrayElementAsTagged(data_pointer, offset);
2574 default:
2575 UNREACHABLE()V8_Fatal("unreachable code");
2576 }
2577}
2578
2579TNode<Numeric> CodeStubAssembler::LoadFixedTypedArrayElementAsTagged(
2580 TNode<RawPtrT> data_pointer, TNode<UintPtrT> index,
2581 TNode<Int32T> elements_kind) {
2582 TVARIABLE(Numeric, var_result)TVariable<Numeric> var_result(this);
2583 Label done(this), if_unknown_type(this, Label::kDeferred);
2584 int32_t elements_kinds[] = {
2585#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) TYPE##_ELEMENTS,
2586 TYPED_ARRAYS(TYPED_ARRAY_CASE)TYPED_ARRAY_CASE(Uint8, uint8, UINT8, uint8_t) TYPED_ARRAY_CASE
(Int8, int8, INT8, int8_t) TYPED_ARRAY_CASE(Uint16, uint16, UINT16
, uint16_t) TYPED_ARRAY_CASE(Int16, int16, INT16, int16_t) TYPED_ARRAY_CASE
(Uint32, uint32, UINT32, uint32_t) TYPED_ARRAY_CASE(Int32, int32
, INT32, int32_t) TYPED_ARRAY_CASE(Float32, float32, FLOAT32,
float) TYPED_ARRAY_CASE(Float64, float64, FLOAT64, double) TYPED_ARRAY_CASE
(Uint8Clamped, uint8_clamped, UINT8_CLAMPED, uint8_t) TYPED_ARRAY_CASE
(BigUint64, biguint64, BIGUINT64, uint64_t) TYPED_ARRAY_CASE(
BigInt64, bigint64, BIGINT64, int64_t)
RAB_GSAB_TYPED_ARRAYS(TYPED_ARRAY_CASE)TYPED_ARRAY_CASE(RabGsabUint8, rab_gsab_uint8, RAB_GSAB_UINT8
, uint8_t) TYPED_ARRAY_CASE(RabGsabInt8, rab_gsab_int8, RAB_GSAB_INT8
, int8_t) TYPED_ARRAY_CASE(RabGsabUint16, rab_gsab_uint16, RAB_GSAB_UINT16
, uint16_t) TYPED_ARRAY_CASE(RabGsabInt16, rab_gsab_int16, RAB_GSAB_INT16
, int16_t) TYPED_ARRAY_CASE(RabGsabUint32, rab_gsab_uint32, RAB_GSAB_UINT32
, uint32_t) TYPED_ARRAY_CASE(RabGsabInt32, rab_gsab_int32, RAB_GSAB_INT32
, int32_t) TYPED_ARRAY_CASE(RabGsabFloat32, rab_gsab_float32,
RAB_GSAB_FLOAT32, float) TYPED_ARRAY_CASE(RabGsabFloat64, rab_gsab_float64
, RAB_GSAB_FLOAT64, double) TYPED_ARRAY_CASE(RabGsabUint8Clamped
, rab_gsab_uint8_clamped, RAB_GSAB_UINT8_CLAMPED, uint8_t) TYPED_ARRAY_CASE
(RabGsabBigUint64, rab_gsab_biguint64, RAB_GSAB_BIGUINT64, uint64_t
) TYPED_ARRAY_CASE(RabGsabBigInt64, rab_gsab_bigint64, RAB_GSAB_BIGINT64
, int64_t)
2587#undef TYPED_ARRAY_CASE
2588 };
2589
2590#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) Label if_##type##array(this);
2591 TYPED_ARRAYS(TYPED_ARRAY_CASE)TYPED_ARRAY_CASE(Uint8, uint8, UINT8, uint8_t) TYPED_ARRAY_CASE
(Int8, int8, INT8, int8_t) TYPED_ARRAY_CASE(Uint16, uint16, UINT16
, uint16_t) TYPED_ARRAY_CASE(Int16, int16, INT16, int16_t) TYPED_ARRAY_CASE
(Uint32, uint32, UINT32, uint32_t) TYPED_ARRAY_CASE(Int32, int32
, INT32, int32_t) TYPED_ARRAY_CASE(Float32, float32, FLOAT32,
float) TYPED_ARRAY_CASE(Float64, float64, FLOAT64, double) TYPED_ARRAY_CASE
(Uint8Clamped, uint8_clamped, UINT8_CLAMPED, uint8_t) TYPED_ARRAY_CASE
(BigUint64, biguint64, BIGUINT64, uint64_t) TYPED_ARRAY_CASE(
BigInt64, bigint64, BIGINT64, int64_t)
2592#undef TYPED_ARRAY_CASE
2593
2594 Label* elements_kind_labels[] = {
2595#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) &if_##type##array,
2596 TYPED_ARRAYS(TYPED_ARRAY_CASE)TYPED_ARRAY_CASE(Uint8, uint8, UINT8, uint8_t) TYPED_ARRAY_CASE
(Int8, int8, INT8, int8_t) TYPED_ARRAY_CASE(Uint16, uint16, UINT16
, uint16_t) TYPED_ARRAY_CASE(Int16, int16, INT16, int16_t) TYPED_ARRAY_CASE
(Uint32, uint32, UINT32, uint32_t) TYPED_ARRAY_CASE(Int32, int32
, INT32, int32_t) TYPED_ARRAY_CASE(Float32, float32, FLOAT32,
float) TYPED_ARRAY_CASE(Float64, float64, FLOAT64, double) TYPED_ARRAY_CASE
(Uint8Clamped, uint8_clamped, UINT8_CLAMPED, uint8_t) TYPED_ARRAY_CASE
(BigUint64, biguint64, BIGUINT64, uint64_t) TYPED_ARRAY_CASE(
BigInt64, bigint64, BIGINT64, int64_t)
2597 // The same labels again for RAB / GSAB. We dispatch RAB / GSAB elements
2598 // kinds to the corresponding non-RAB / GSAB elements kinds.
2599 TYPED_ARRAYS(TYPED_ARRAY_CASE)TYPED_ARRAY_CASE(Uint8, uint8, UINT8, uint8_t) TYPED_ARRAY_CASE
(Int8, int8, INT8, int8_t) TYPED_ARRAY_CASE(Uint16, uint16, UINT16
, uint16_t) TYPED_ARRAY_CASE(Int16, int16, INT16, int16_t) TYPED_ARRAY_CASE
(Uint32, uint32, UINT32, uint32_t) TYPED_ARRAY_CASE(Int32, int32
, INT32, int32_t) TYPED_ARRAY_CASE(Float32, float32, FLOAT32,
float) TYPED_ARRAY_CASE(Float64, float64, FLOAT64, double) TYPED_ARRAY_CASE
(Uint8Clamped, uint8_clamped, UINT8_CLAMPED, uint8_t) TYPED_ARRAY_CASE
(BigUint64, biguint64, BIGUINT64, uint64_t) TYPED_ARRAY_CASE(
BigInt64, bigint64, BIGINT64, int64_t)
2600#undef TYPED_ARRAY_CASE
2601 };
2602 STATIC_ASSERT(arraysize(elements_kinds) == arraysize(elements_kind_labels))static_assert((sizeof(ArraySizeHelper(elements_kinds))) == (sizeof
(ArraySizeHelper(elements_kind_labels))), "arraysize(elements_kinds) == arraysize(elements_kind_labels)"
)
;
2603
2604 Switch(elements_kind, &if_unknown_type, elements_kinds, elements_kind_labels,
2605 arraysize(elements_kinds)(sizeof(ArraySizeHelper(elements_kinds))));
2606
2607 BIND(&if_unknown_type)Bind(&if_unknown_type);
2608 Unreachable();
2609
2610#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \
2611 BIND(&if_##type##array)Bind(&if_##type##array); \
2612 { \
2613 var_result = LoadFixedTypedArrayElementAsTagged(data_pointer, index, \
2614 TYPE##_ELEMENTS); \
2615 Goto(&done); \
2616 }
2617 TYPED_ARRAYS(TYPED_ARRAY_CASE)TYPED_ARRAY_CASE(Uint8, uint8, UINT8, uint8_t) TYPED_ARRAY_CASE
(Int8, int8, INT8, int8_t) TYPED_ARRAY_CASE(Uint16, uint16, UINT16
, uint16_t) TYPED_ARRAY_CASE(Int16, int16, INT16, int16_t) TYPED_ARRAY_CASE
(Uint32, uint32, UINT32, uint32_t) TYPED_ARRAY_CASE(Int32, int32
, INT32, int32_t) TYPED_ARRAY_CASE(Float32, float32, FLOAT32,
float) TYPED_ARRAY_CASE(Float64, float64, FLOAT64, double) TYPED_ARRAY_CASE
(Uint8Clamped, uint8_clamped, UINT8_CLAMPED, uint8_t) TYPED_ARRAY_CASE
(BigUint64, biguint64, BIGUINT64, uint64_t) TYPED_ARRAY_CASE(
BigInt64, bigint64, BIGINT64, int64_t)
2618#undef TYPED_ARRAY_CASE
2619
2620 BIND(&done)Bind(&done);
2621 return var_result.value();
2622}
2623
2624template <typename TIndex>
2625TNode<MaybeObject> CodeStubAssembler::LoadFeedbackVectorSlot(
2626 TNode<FeedbackVector> feedback_vector, TNode<TIndex> slot,
2627 int additional_offset) {
2628 int32_t header_size = FeedbackVector::kRawFeedbackSlotsOffset +
2629 additional_offset - kHeapObjectTag;
2630 TNode<IntPtrT> offset =
2631 ElementOffsetFromIndex(slot, HOLEY_ELEMENTS, header_size);
2632 CSA_SLOW_DCHECK(((void)0)
2633 this, IsOffsetInBounds(offset, LoadFeedbackVectorLength(feedback_vector),((void)0)
2634 FeedbackVector::kHeaderSize))((void)0);
2635 return Load<MaybeObject>(feedback_vector, offset);
2636}
2637
2638template TNode<MaybeObject> CodeStubAssembler::LoadFeedbackVectorSlot(
2639 TNode<FeedbackVector> feedback_vector, TNode<TaggedIndex> slot,
2640 int additional_offset);
2641template TNode<MaybeObject> CodeStubAssembler::LoadFeedbackVectorSlot(
2642 TNode<FeedbackVector> feedback_vector, TNode<IntPtrT> slot,
2643 int additional_offset);
2644template TNode<MaybeObject> CodeStubAssembler::LoadFeedbackVectorSlot(
2645 TNode<FeedbackVector> feedback_vector, TNode<UintPtrT> slot,
2646 int additional_offset);
2647
2648template <typename Array>
2649TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32ArrayElement(
2650 TNode<Array> object, int array_header_size, TNode<IntPtrT> index,
2651 int additional_offset) {
2652 DCHECK(IsAligned(additional_offset, kTaggedSize))((void) 0);
2653 int endian_correction = 0;
2654#if V8_TARGET_LITTLE_ENDIAN1
2655 if (SmiValuesAre32Bits()) endian_correction = 4;
2656#endif
2657 int32_t header_size = array_header_size + additional_offset - kHeapObjectTag +
2658 endian_correction;
2659 TNode<IntPtrT> offset =
2660 ElementOffsetFromIndex(index, HOLEY_ELEMENTS, header_size);
2661 CSA_DCHECK(this, IsOffsetInBounds(offset, LoadArrayLength(object),((void)0)
2662 array_header_size + endian_correction))((void)0);
2663 if (SmiValuesAre32Bits()) {
2664 return Load<Int32T>(object, offset);
2665 } else {
2666 return SmiToInt32(Load<Smi>(object, offset));
2667 }
2668}
2669
2670TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement(
2671 TNode<FixedArray> object, TNode<IntPtrT> index, int additional_offset) {
2672 CSA_SLOW_DCHECK(this, IsFixedArraySubclass(object))((void)0);
2673 return LoadAndUntagToWord32ArrayElement(object, FixedArray::kHeaderSize,
2674 index, additional_offset);
2675}
2676
2677TNode<MaybeObject> CodeStubAssembler::LoadWeakFixedArrayElement(
2678 TNode<WeakFixedArray> object, TNode<IntPtrT> index, int additional_offset) {
2679 return LoadArrayElement(object, WeakFixedArray::kHeaderSize, index,
2680 additional_offset);
2681}
2682
2683TNode<Float64T> CodeStubAssembler::LoadFixedDoubleArrayElement(
2684 TNode<FixedDoubleArray> object, TNode<IntPtrT> index, Label* if_hole,
2685 MachineType machine_type) {
2686 int32_t header_size = FixedDoubleArray::kHeaderSize - kHeapObjectTag;
2687 TNode<IntPtrT> offset =
2688 ElementOffsetFromIndex(index, HOLEY_DOUBLE_ELEMENTS, header_size);
2689 CSA_DCHECK(this, IsOffsetInBounds(((void)0)
2690 offset, LoadAndUntagFixedArrayBaseLength(object),((void)0)
2691 FixedDoubleArray::kHeaderSize, HOLEY_DOUBLE_ELEMENTS))((void)0);
2692 return LoadDoubleWithHoleCheck(object, offset, if_hole, machine_type);
2693}
2694
2695TNode<Object> CodeStubAssembler::LoadFixedArrayBaseElementAsTagged(
2696 TNode<FixedArrayBase> elements, TNode<IntPtrT> index,
2697 TNode<Int32T> elements_kind, Label* if_accessor, Label* if_hole) {
2698 TVARIABLE(Object, var_result)TVariable<Object> var_result(this);
2699 Label done(this), if_packed(this), if_holey(this), if_packed_double(this),
2700 if_holey_double(this), if_dictionary(this, Label::kDeferred);
2701
2702 int32_t kinds[] = {
2703 // Handled by if_packed.
2704 PACKED_SMI_ELEMENTS, PACKED_ELEMENTS, PACKED_NONEXTENSIBLE_ELEMENTS,
2705 PACKED_SEALED_ELEMENTS, PACKED_FROZEN_ELEMENTS,
2706 // Handled by if_holey.
2707 HOLEY_SMI_ELEMENTS, HOLEY_ELEMENTS, HOLEY_NONEXTENSIBLE_ELEMENTS,
2708 HOLEY_SEALED_ELEMENTS, HOLEY_FROZEN_ELEMENTS,
2709 // Handled by if_packed_double.
2710 PACKED_DOUBLE_ELEMENTS,
2711 // Handled by if_holey_double.
2712 HOLEY_DOUBLE_ELEMENTS};
2713 Label* labels[] = {// PACKED_{SMI,}_ELEMENTS
2714 &if_packed, &if_packed, &if_packed, &if_packed, &if_packed,
2715 // HOLEY_{SMI,}_ELEMENTS
2716 &if_holey, &if_holey, &if_holey, &if_holey, &if_holey,
2717 // PACKED_DOUBLE_ELEMENTS
2718 &if_packed_double,
2719 // HOLEY_DOUBLE_ELEMENTS
2720 &if_holey_double};
2721 Switch(elements_kind, &if_dictionary, kinds, labels, arraysize(kinds)(sizeof(ArraySizeHelper(kinds))));
2722
2723 BIND(&if_packed)Bind(&if_packed);
2724 {
2725 var_result = LoadFixedArrayElement(CAST(elements)Cast(elements), index, 0);
2726 Goto(&done);
2727 }
2728
2729 BIND(&if_holey)Bind(&if_holey);
2730 {
2731 var_result = LoadFixedArrayElement(CAST(elements)Cast(elements), index);
2732 Branch(TaggedEqual(var_result.value(), TheHoleConstant()), if_hole, &done);
2733 }
2734
2735 BIND(&if_packed_double)Bind(&if_packed_double);
2736 {
2737 var_result = AllocateHeapNumberWithValue(
2738 LoadFixedDoubleArrayElement(CAST(elements)Cast(elements), index));
2739 Goto(&done);
2740 }
2741
2742 BIND(&if_holey_double)Bind(&if_holey_double);
2743 {
2744 var_result = AllocateHeapNumberWithValue(
2745 LoadFixedDoubleArrayElement(CAST(elements)Cast(elements), index, if_hole));
2746 Goto(&done);
2747 }
2748
2749 BIND(&if_dictionary)Bind(&if_dictionary);
2750 {
2751 CSA_DCHECK(this, IsDictionaryElementsKind(elements_kind))((void)0);
2752 var_result = BasicLoadNumberDictionaryElement(CAST(elements)Cast(elements), index,
2753 if_accessor, if_hole);
2754 Goto(&done);
2755 }
2756
2757 BIND(&done)Bind(&done);
2758 return var_result.value();
2759}
2760
2761TNode<BoolT> CodeStubAssembler::IsDoubleHole(TNode<Object> base,
2762 TNode<IntPtrT> offset) {
2763 // TODO(ishell): Compare only the upper part for the hole once the
2764 // compiler is able to fold addition of already complex |offset| with
2765 // |kIeeeDoubleExponentWordOffset| into one addressing mode.
2766 if (Is64()) {
2767 TNode<Uint64T> element = Load<Uint64T>(base, offset);
2768 return Word64Equal(element, Int64Constant(kHoleNanInt64));
2769 } else {
2770 TNode<Uint32T> element_upper = Load<Uint32T>(
2771 base, IntPtrAdd(offset, IntPtrConstant(kIeeeDoubleExponentWordOffset)));
2772 return Word32Equal(element_upper, Int32Constant(kHoleNanUpper32));
2773 }
2774}
2775
2776TNode<Float64T> CodeStubAssembler::LoadDoubleWithHoleCheck(
2777 TNode<Object> base, TNode<IntPtrT> offset, Label* if_hole,
2778 MachineType machine_type) {
2779 if (if_hole) {
2780 GotoIf(IsDoubleHole(base, offset), if_hole);
2781 }
2782 if (machine_type.IsNone()) {
2783 // This means the actual value is not needed.
2784 return TNode<Float64T>();
2785 }
2786 return UncheckedCast<Float64T>(Load(machine_type, base, offset));
2787}
2788
2789TNode<ScopeInfo> CodeStubAssembler::LoadScopeInfo(TNode<Context> context) {
2790 return CAST(LoadContextElement(context, Context::SCOPE_INFO_INDEX))Cast(LoadContextElement(context, Context::SCOPE_INFO_INDEX));
2791}
2792
2793TNode<BoolT> CodeStubAssembler::LoadScopeInfoHasExtensionField(
2794 TNode<ScopeInfo> scope_info) {
2795 TNode<IntPtrT> value =
2796 LoadAndUntagObjectField(scope_info, ScopeInfo::kFlagsOffset);
2797 return IsSetWord<ScopeInfo::HasContextExtensionSlotBit>(value);
2798}
2799
2800void CodeStubAssembler::StoreContextElementNoWriteBarrier(
2801 TNode<Context> context, int slot_index, TNode<Object> value) {
2802 int offset = Context::SlotOffset(slot_index);
2803 StoreNoWriteBarrier(MachineRepresentation::kTagged, context,
2804 IntPtrConstant(offset), value);
2805}
2806
2807TNode<NativeContext> CodeStubAssembler::LoadNativeContext(
2808 TNode<Context> context) {
2809 TNode<Map> map = LoadMap(context);
2810 return CAST(LoadObjectField(Cast(LoadObjectField( map, Map::kConstructorOrBackPointerOrNativeContextOffset
))
2811 map, Map::kConstructorOrBackPointerOrNativeContextOffset))Cast(LoadObjectField( map, Map::kConstructorOrBackPointerOrNativeContextOffset
))
;
2812}
2813
2814TNode<Context> CodeStubAssembler::LoadModuleContext(TNode<Context> context) {
2815 TNode<NativeContext> native_context = LoadNativeContext(context);
2816 TNode<Map> module_map = CAST(Cast(LoadContextElement(native_context, Context::MODULE_CONTEXT_MAP_INDEX
))
2817 LoadContextElement(native_context, Context::MODULE_CONTEXT_MAP_INDEX))Cast(LoadContextElement(native_context, Context::MODULE_CONTEXT_MAP_INDEX
))
;
2818 TVariable<Object> cur_context(context, this);
2819
2820 Label context_found(this);
2821
2822 Label context_search(this, &cur_context);
2823
2824 // Loop until cur_context->map() is module_map.
2825 Goto(&context_search);
2826 BIND(&context_search)Bind(&context_search);
2827 {
2828 CSA_DCHECK(this, Word32BinaryNot(((void)0)
2829 TaggedEqual(cur_context.value(), native_context)))((void)0);
2830 GotoIf(TaggedEqual(LoadMap(CAST(cur_context.value())Cast(cur_context.value())), module_map),
2831 &context_found);
2832
2833 cur_context =
2834 LoadContextElement(CAST(cur_context.value())Cast(cur_context.value()), Context::PREVIOUS_INDEX);
2835 Goto(&context_search);
2836 }
2837
2838 BIND(&context_found)Bind(&context_found);
2839 return UncheckedCast<Context>(cur_context.value());
2840}
2841
2842TNode<Object> CodeStubAssembler::GetImportMetaObject(TNode<Context> context) {
2843 const TNode<Context> module_context = LoadModuleContext(context);
2844 const TNode<HeapObject> module =
2845 CAST(LoadContextElement(module_context, Context::EXTENSION_INDEX))Cast(LoadContextElement(module_context, Context::EXTENSION_INDEX
))
;
2846 const TNode<Object> import_meta =
2847 LoadObjectField(module, SourceTextModule::kImportMetaOffset);
2848
2849 TVARIABLE(Object, return_value, import_meta)TVariable<Object> return_value(import_meta, this);
2850
2851 Label end(this);
2852 GotoIfNot(IsTheHole(import_meta), &end);
2853
2854 return_value = CallRuntime(Runtime::kGetImportMetaObject, context);
2855 Goto(&end);
2856
2857 BIND(&end)Bind(&end);
2858 return return_value.value();
2859}
2860
2861TNode<Map> CodeStubAssembler::LoadObjectFunctionInitialMap(
2862 TNode<NativeContext> native_context) {
2863 TNode<JSFunction> object_function =
2864 CAST(LoadContextElement(native_context, Context::OBJECT_FUNCTION_INDEX))Cast(LoadContextElement(native_context, Context::OBJECT_FUNCTION_INDEX
))
;
2865 return CAST(LoadJSFunctionPrototypeOrInitialMap(object_function))Cast(LoadJSFunctionPrototypeOrInitialMap(object_function));
2866}
2867
2868TNode<Map> CodeStubAssembler::LoadSlowObjectWithNullPrototypeMap(
2869 TNode<NativeContext> native_context) {
2870 TNode<Map> map = CAST(LoadContextElement(Cast(LoadContextElement( native_context, Context::SLOW_OBJECT_WITH_NULL_PROTOTYPE_MAP
))
2871 native_context, Context::SLOW_OBJECT_WITH_NULL_PROTOTYPE_MAP))Cast(LoadContextElement( native_context, Context::SLOW_OBJECT_WITH_NULL_PROTOTYPE_MAP
))
;
2872 return map;
2873}
2874
2875TNode<Map> CodeStubAssembler::LoadJSArrayElementsMap(
2876 TNode<Int32T> kind, TNode<NativeContext> native_context) {
2877 CSA_DCHECK(this, IsFastElementsKind(kind))((void)0);
2878 TNode<IntPtrT> offset =
2879 IntPtrAdd(IntPtrConstant(Context::FIRST_JS_ARRAY_MAP_SLOT),
2880 ChangeInt32ToIntPtr(kind));
2881 return UncheckedCast<Map>(LoadContextElement(native_context, offset));
2882}
2883
2884TNode<Map> CodeStubAssembler::LoadJSArrayElementsMap(
2885 ElementsKind kind, TNode<NativeContext> native_context) {
2886 return UncheckedCast<Map>(
2887 LoadContextElement(native_context, Context::ArrayMapIndex(kind)));
2888}
2889
2890TNode<BoolT> CodeStubAssembler::IsGeneratorFunction(
2891 TNode<JSFunction> function) {
2892 const TNode<SharedFunctionInfo> shared_function_info =
2893 LoadObjectField<SharedFunctionInfo>(
2894 function, JSFunction::kSharedFunctionInfoOffset);
2895
2896 const TNode<Uint32T> function_kind =
2897 DecodeWord32<SharedFunctionInfo::FunctionKindBits>(
2898 LoadObjectField<Uint32T>(shared_function_info,
2899 SharedFunctionInfo::kFlagsOffset));
2900
2901 // See IsGeneratorFunction(FunctionKind kind).
2902 return IsInRange(
2903 function_kind,
2904 static_cast<uint32_t>(FunctionKind::kAsyncConciseGeneratorMethod),
2905 static_cast<uint32_t>(FunctionKind::kConciseGeneratorMethod));
2906}
2907
2908TNode<BoolT> CodeStubAssembler::IsJSFunctionWithPrototypeSlot(
2909 TNode<HeapObject> object) {
2910 // Only JSFunction maps may have HasPrototypeSlotBit set.
2911 return IsSetWord32<Map::Bits1::HasPrototypeSlotBit>(
2912 LoadMapBitField(LoadMap(object)));
2913}
2914
2915void CodeStubAssembler::BranchIfHasPrototypeProperty(
2916 TNode<JSFunction> function, TNode<Int32T> function_map_bit_field,
2917 Label* if_true, Label* if_false) {
2918 // (has_prototype_slot() && IsConstructor()) ||
2919 // IsGeneratorFunction(shared()->kind())
2920 uint32_t mask = Map::Bits1::HasPrototypeSlotBit::kMask |
2921 Map::Bits1::IsConstructorBit::kMask;
2922
2923 GotoIf(IsAllSetWord32(function_map_bit_field, mask), if_true);
2924 Branch(IsGeneratorFunction(function), if_true, if_false);
2925}
2926
2927void CodeStubAssembler::GotoIfPrototypeRequiresRuntimeLookup(
2928 TNode<JSFunction> function, TNode<Map> map, Label* runtime) {
2929 // !has_prototype_property() || has_non_instance_prototype()
2930 TNode<Int32T> map_bit_field = LoadMapBitField(map);
2931 Label next_check(this);
2932 BranchIfHasPrototypeProperty(function, map_bit_field, &next_check, runtime);
2933 BIND(&next_check)Bind(&next_check);
2934 GotoIf(IsSetWord32<Map::Bits1::HasNonInstancePrototypeBit>(map_bit_field),
2935 runtime);
2936}
2937
2938TNode<HeapObject> CodeStubAssembler::LoadJSFunctionPrototype(
2939 TNode<JSFunction> function, Label* if_bailout) {
2940 CSA_DCHECK(this, IsFunctionWithPrototypeSlotMap(LoadMap(function)))((void)0);
2941 CSA_DCHECK(this, IsClearWord32<Map::Bits1::HasNonInstancePrototypeBit>(((void)0)
2942 LoadMapBitField(LoadMap(function))))((void)0);
2943 TNode<HeapObject> proto_or_map = LoadObjectField<HeapObject>(
2944 function, JSFunction::kPrototypeOrInitialMapOffset);
2945 GotoIf(IsTheHole(proto_or_map), if_bailout);
2946
2947 TVARIABLE(HeapObject, var_result, proto_or_map)TVariable<HeapObject> var_result(proto_or_map, this);
2948 Label done(this, &var_result);
2949 GotoIfNot(IsMap(proto_or_map), &done);
2950
2951 var_result = LoadMapPrototype(CAST(proto_or_map)Cast(proto_or_map));
2952 Goto(&done);
2953
2954 BIND(&done)Bind(&done);
2955 return var_result.value();
2956}
2957
2958TNode<BytecodeArray> CodeStubAssembler::LoadSharedFunctionInfoBytecodeArray(
2959 TNode<SharedFunctionInfo> shared) {
2960 TNode<HeapObject> function_data = LoadObjectField<HeapObject>(
2961 shared, SharedFunctionInfo::kFunctionDataOffset);
2962
2963 TVARIABLE(HeapObject, var_result, function_data)TVariable<HeapObject> var_result(function_data, this);
2964
2965 Label check_for_interpreter_data(this, &var_result);
2966 Label done(this, &var_result);
2967
2968 GotoIfNot(HasInstanceType(var_result.value(), CODET_TYPE),
2969 &check_for_interpreter_data);
2970 {
2971 TNode<CodeT> code = CAST(var_result.value())Cast(var_result.value());
2972#ifdef DEBUG
2973 TNode<Int32T> code_flags =
2974 LoadObjectField<Int32T>(code, CodeT::kFlagsOffset);
2975 CSA_DCHECK(((void)0)
2976 this, Word32Equal(DecodeWord32<CodeT::KindField>(code_flags),((void)0)
2977 Int32Constant(static_cast<int>(CodeKind::BASELINE))))((void)0);
2978#endif // DEBUG
2979 TNode<HeapObject> baseline_data = LoadObjectField<HeapObject>(
2980 FromCodeT(code), Code::kDeoptimizationDataOrInterpreterDataOffset);
2981 var_result = baseline_data;
2982 }
2983 Goto(&check_for_interpreter_data);
2984
2985 BIND(&check_for_interpreter_data)Bind(&check_for_interpreter_data);
2986
2987 GotoIfNot(HasInstanceType(var_result.value(), INTERPRETER_DATA_TYPE), &done);
2988 TNode<BytecodeArray> bytecode_array = LoadObjectField<BytecodeArray>(
2989 var_result.value(), InterpreterData::kBytecodeArrayOffset);
2990 var_result = bytecode_array;
2991 Goto(&done);
2992
2993 BIND(&done)Bind(&done);
2994 return CAST(var_result.value())Cast(var_result.value());
2995}
2996
2997void CodeStubAssembler::StoreObjectByteNoWriteBarrier(TNode<HeapObject> object,
2998 int offset,
2999 TNode<Word32T> value) {
3000 StoreNoWriteBarrier(MachineRepresentation::kWord8, object,
3001 IntPtrConstant(offset - kHeapObjectTag), value);
3002}
3003
3004void CodeStubAssembler::StoreHeapNumberValue(TNode<HeapNumber> object,
3005 TNode<Float64T> value) {
3006 StoreObjectFieldNoWriteBarrier(object, HeapNumber::kValueOffset, value);
3007}
3008
3009void CodeStubAssembler::StoreObjectField(TNode<HeapObject> object, int offset,
3010 TNode<Smi> value) {
3011 StoreObjectFieldNoWriteBarrier(object, offset, value);
3012}
3013
3014void CodeStubAssembler::StoreObjectField(TNode<HeapObject> object,
3015 TNode<IntPtrT> offset,
3016 TNode<Smi> value) {
3017 StoreObjectFieldNoWriteBarrier(object, offset, value);
3018}
3019
3020void CodeStubAssembler::StoreObjectField(TNode<HeapObject> object, int offset,
3021 TNode<Object> value) {
3022 DCHECK_NE(HeapObject::kMapOffset, offset)((void) 0); // Use StoreMap instead.
3023 OptimizedStoreField(MachineRepresentation::kTagged,
3024 UncheckedCast<HeapObject>(object), offset, value);
3025}
3026
3027void CodeStubAssembler::StoreObjectField(TNode<HeapObject> object,
3028 TNode<IntPtrT> offset,
3029 TNode<Object> value) {
3030 int const_offset;
3031 if (TryToInt32Constant(offset, &const_offset)) {
3032 StoreObjectField(object, const_offset, value);
3033 } else {
3034 Store(object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value);
3035 }
3036}
3037
3038void CodeStubAssembler::UnsafeStoreObjectFieldNoWriteBarrier(
3039 TNode<HeapObject> object, int offset, TNode<Object> value) {
3040 DCHECK_NE(HeapObject::kMapOffset, offset)((void) 0); // Use StoreMap instead.
3041 OptimizedStoreFieldUnsafeNoWriteBarrier(MachineRepresentation::kTagged,
3042 object, offset, value);
3043}
3044
3045void CodeStubAssembler::StoreJSSharedStructInObjectField(
3046 TNode<HeapObject> object, TNode<IntPtrT> offset, TNode<Object> value) {
3047 CSA_DCHECK(this, IsJSSharedStruct(object))((void)0);
3048 // JSSharedStructs are allocated in the shared old space, which is currently
3049 // collected by stopping the world, so the incremental write barrier is not
3050 // needed. They can only store Smis and other HeapObjects in the shared old
3051 // space, so the generational write barrier is also not needed.
3052 // TODO(v8:12547): Add a safer, shared variant of NoWriteBarrier instead of
3053 // using Unsafe.
3054 int const_offset;
3055 if (TryToInt32Constant(offset, &const_offset)) {
3056 UnsafeStoreObjectFieldNoWriteBarrier(object, const_offset, value);
3057 } else {
3058 UnsafeStoreNoWriteBarrier(MachineRepresentation::kTagged, object,
3059 IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)),
3060 value);
3061 }
3062}
3063
3064void CodeStubAssembler::StoreMap(TNode<HeapObject> object, TNode<Map> map) {
3065 OptimizedStoreMap(object, map);
3066 DcheckHasValidMap(object);
3067}
3068
3069void CodeStubAssembler::StoreMapNoWriteBarrier(TNode<HeapObject> object,
3070 RootIndex map_root_index) {
3071 StoreMapNoWriteBarrier(object, CAST(LoadRoot(map_root_index))Cast(LoadRoot(map_root_index)));
3072}
3073
3074void CodeStubAssembler::StoreMapNoWriteBarrier(TNode<HeapObject> object,
3075 TNode<Map> map) {
3076 OptimizedStoreMap(object, map);
3077 DcheckHasValidMap(object);
3078}
3079
3080void CodeStubAssembler::StoreObjectFieldRoot(TNode<HeapObject> object,
3081 int offset, RootIndex root_index) {
3082 TNode<Object> root = LoadRoot(root_index);
3083 if (offset == HeapObject::kMapOffset) {
3084 StoreMap(object, CAST(root)Cast(root));
3085 } else if (RootsTable::IsImmortalImmovable(root_index)) {
3086 StoreObjectFieldNoWriteBarrier(object, offset, root);
3087 } else {
3088 StoreObjectField(object, offset, root);
3089 }
3090}
3091
3092template <typename TIndex>
3093void CodeStubAssembler::StoreFixedArrayOrPropertyArrayElement(
3094 TNode<UnionT<FixedArray, PropertyArray>> object, TNode<TIndex> index_node,
3095 TNode<Object> value, WriteBarrierMode barrier_mode, int additional_offset) {
3096 // TODO(v8:9708): Do we want to keep both IntPtrT and UintPtrT variants?
3097 static_assert(std::is_same<TIndex, Smi>::value ||
3098 std::is_same<TIndex, UintPtrT>::value ||
3099 std::is_same<TIndex, IntPtrT>::value,
3100 "Only Smi, UintPtrT or IntPtrT index is allowed");
3101 DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||((void) 0)
3102 barrier_mode == UNSAFE_SKIP_WRITE_BARRIER ||((void) 0)
3103 barrier_mode == UPDATE_WRITE_BARRIER ||((void) 0)
3104 barrier_mode == UPDATE_EPHEMERON_KEY_WRITE_BARRIER)((void) 0);
3105 DCHECK(IsAligned(additional_offset, kTaggedSize))((void) 0);
3106 STATIC_ASSERT(static_cast<int>(FixedArray::kHeaderSize) ==static_assert(static_cast<int>(FixedArray::kHeaderSize)
== static_cast<int>(PropertyArray::kHeaderSize), "static_cast<int>(FixedArray::kHeaderSize) == static_cast<int>(PropertyArray::kHeaderSize)"
)
3107 static_cast<int>(PropertyArray::kHeaderSize))static_assert(static_cast<int>(FixedArray::kHeaderSize)
== static_cast<int>(PropertyArray::kHeaderSize), "static_cast<int>(FixedArray::kHeaderSize) == static_cast<int>(PropertyArray::kHeaderSize)"
)
;
3108 int header_size =
3109 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag;
3110 TNode<IntPtrT> offset =
3111 ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS, header_size);
3112 STATIC_ASSERT(static_cast<int>(FixedArrayBase::kLengthOffset) ==static_assert(static_cast<int>(FixedArrayBase::kLengthOffset
) == static_cast<int>(WeakFixedArray::kLengthOffset), "static_cast<int>(FixedArrayBase::kLengthOffset) == static_cast<int>(WeakFixedArray::kLengthOffset)"
)
3113 static_cast<int>(WeakFixedArray::kLengthOffset))static_assert(static_cast<int>(FixedArrayBase::kLengthOffset
) == static_cast<int>(WeakFixedArray::kLengthOffset), "static_cast<int>(FixedArrayBase::kLengthOffset) == static_cast<int>(WeakFixedArray::kLengthOffset)"
)
;
3114 STATIC_ASSERT(static_cast<int>(FixedArrayBase::kLengthOffset) ==static_assert(static_cast<int>(FixedArrayBase::kLengthOffset
) == static_cast<int>(PropertyArray::kLengthAndHashOffset
), "static_cast<int>(FixedArrayBase::kLengthOffset) == static_cast<int>(PropertyArray::kLengthAndHashOffset)"
)
3115 static_cast<int>(PropertyArray::kLengthAndHashOffset))static_assert(static_cast<int>(FixedArrayBase::kLengthOffset
) == static_cast<int>(PropertyArray::kLengthAndHashOffset
), "static_cast<int>(FixedArrayBase::kLengthOffset) == static_cast<int>(PropertyArray::kLengthAndHashOffset)"
)
;
3116 // Check that index_node + additional_offset <= object.length.
3117 // TODO(cbruni): Use proper LoadXXLength helpers
3118 CSA_DCHECK(((void)0)
3119 this,((void)0)
3120 IsOffsetInBounds(((void)0)
3121 offset,((void)0)
3122 Select<IntPtrT>(((void)0)
3123 IsPropertyArray(object),((void)0)
3124 [=] {((void)0)
3125 TNode<IntPtrT> length_and_hash = LoadAndUntagObjectField(((void)0)
3126 object, PropertyArray::kLengthAndHashOffset);((void)0)
3127 return Signed(((void)0)
3128 DecodeWord<PropertyArray::LengthField>(length_and_hash));((void)0)
3129 },((void)0)
3130 [=] {((void)0)
3131 return LoadAndUntagObjectField(object,((void)0)
3132 FixedArrayBase::kLengthOffset);((void)0)
3133 }),((void)0)
3134 FixedArray::kHeaderSize))((void)0);
3135 if (barrier_mode == SKIP_WRITE_BARRIER) {
3136 StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset, value);
3137 } else if (barrier_mode == UNSAFE_SKIP_WRITE_BARRIER) {
3138 UnsafeStoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset,
3139 value);
3140 } else if (barrier_mode == UPDATE_EPHEMERON_KEY_WRITE_BARRIER) {
3141 StoreEphemeronKey(object, offset, value);
3142 } else {
3143 Store(object, offset, value);
3144 }
3145}
3146
3147template V8_EXPORT_PRIVATE void
3148CodeStubAssembler::StoreFixedArrayOrPropertyArrayElement<Smi>(
3149 TNode<UnionT<FixedArray, PropertyArray>>, TNode<Smi>, TNode<Object>,
3150 WriteBarrierMode, int);
3151
3152template V8_EXPORT_PRIVATE void
3153CodeStubAssembler::StoreFixedArrayOrPropertyArrayElement<IntPtrT>(
3154 TNode<UnionT<FixedArray, PropertyArray>>, TNode<IntPtrT>, TNode<Object>,
3155 WriteBarrierMode, int);
3156
3157template V8_EXPORT_PRIVATE void
3158CodeStubAssembler::StoreFixedArrayOrPropertyArrayElement<UintPtrT>(
3159 TNode<UnionT<FixedArray, PropertyArray>>, TNode<UintPtrT>, TNode<Object>,
3160 WriteBarrierMode, int);
3161
3162template <typename TIndex>
3163void CodeStubAssembler::StoreFixedDoubleArrayElement(
3164 TNode<FixedDoubleArray> object, TNode<TIndex> index, TNode<Float64T> value,
3165 CheckBounds check_bounds) {
3166 // TODO(v8:9708): Do we want to keep both IntPtrT and UintPtrT variants?
3167 static_assert(std::is_same<TIndex, Smi>::value ||
3168 std::is_same<TIndex, UintPtrT>::value ||
3169 std::is_same<TIndex, IntPtrT>::value,
3170 "Only Smi, UintPtrT or IntPtrT index is allowed");
3171 if (NeedsBoundsCheck(check_bounds)) {
3172 FixedArrayBoundsCheck(object, index, 0);
3173 }
3174 TNode<IntPtrT> offset = ElementOffsetFromIndex(
3175 index, PACKED_DOUBLE_ELEMENTS, FixedArray::kHeaderSize - kHeapObjectTag);
3176 MachineRepresentation rep = MachineRepresentation::kFloat64;
3177 // Make sure we do not store signalling NaNs into double arrays.
3178 TNode<Float64T> value_silenced = Float64SilenceNaN(value);
3179 StoreNoWriteBarrier(rep, object, offset, value_silenced);
3180}
3181
3182// Export the Smi version which is used outside of code-stub-assembler.
3183template V8_EXPORT_PRIVATE void CodeStubAssembler::StoreFixedDoubleArrayElement<
3184 Smi>(TNode<FixedDoubleArray>, TNode<Smi>, TNode<Float64T>, CheckBounds);
3185
3186void CodeStubAssembler::StoreFeedbackVectorSlot(
3187 TNode<FeedbackVector> feedback_vector, TNode<UintPtrT> slot,
3188 TNode<AnyTaggedT> value, WriteBarrierMode barrier_mode,
3189 int additional_offset) {
3190 DCHECK(IsAligned(additional_offset, kTaggedSize))((void) 0);
3191 DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||((void) 0)
3192 barrier_mode == UNSAFE_SKIP_WRITE_BARRIER ||((void) 0)
3193 barrier_mode == UPDATE_WRITE_BARRIER)((void) 0);
3194 int header_size = FeedbackVector::kRawFeedbackSlotsOffset +
3195 additional_offset - kHeapObjectTag;
3196 TNode<IntPtrT> offset =
3197 ElementOffsetFromIndex(Signed(slot), HOLEY_ELEMENTS, header_size);
3198 // Check that slot <= feedback_vector.length.
3199 CSA_DCHECK(this,((void)0)
3200 IsOffsetInBounds(offset, LoadFeedbackVectorLength(feedback_vector),((void)0)
3201 FeedbackVector::kHeaderSize),((void)0)
3202 SmiFromIntPtr(offset), feedback_vector)((void)0);
3203 if (barrier_mode == SKIP_WRITE_BARRIER) {
3204 StoreNoWriteBarrier(MachineRepresentation::kTagged, feedback_vector, offset,
3205 value);
3206 } else if (barrier_mode == UNSAFE_SKIP_WRITE_BARRIER) {
3207 UnsafeStoreNoWriteBarrier(MachineRepresentation::kTagged, feedback_vector,
3208 offset, value);
3209 } else {
3210 Store(feedback_vector, offset, value);
3211 }
3212}
3213
3214TNode<Int32T> CodeStubAssembler::EnsureArrayPushable(TNode<Context> context,
3215 TNode<Map> map,
3216 Label* bailout) {
3217 // Disallow pushing onto prototypes. It might be the JSArray prototype.
3218 // Disallow pushing onto non-extensible objects.
3219 Comment("Disallow pushing onto prototypes");
3220 GotoIfNot(IsExtensibleNonPrototypeMap(map), bailout);
3221
3222 EnsureArrayLengthWritable(context, map, bailout);
3223
3224 TNode<Uint32T> kind =
3225 DecodeWord32<Map::Bits2::ElementsKindBits>(LoadMapBitField2(map));
3226 return Signed(kind);
3227}
3228
3229void CodeStubAssembler::PossiblyGrowElementsCapacity(
3230 ElementsKind kind, TNode<HeapObject> array, TNode<BInt> length,
3231 TVariable<FixedArrayBase>* var_elements, TNode<BInt> growth,
3232 Label* bailout) {
3233 Label fits(this, var_elements);
3234 TNode<BInt> capacity =
3235 TaggedToParameter<BInt>(LoadFixedArrayBaseLength(var_elements->value()));
3236
3237 TNode<BInt> new_length = IntPtrOrSmiAdd(growth, length);
3238 GotoIfNot(IntPtrOrSmiGreaterThan(new_length, capacity), &fits);
3239 TNode<BInt> new_capacity = CalculateNewElementsCapacity(new_length);
3240 *var_elements = GrowElementsCapacity(array, var_elements->value(), kind, kind,
3241 capacity, new_capacity, bailout);
3242 Goto(&fits);
3243 BIND(&fits)Bind(&fits);
3244}
3245
3246TNode<Smi> CodeStubAssembler::BuildAppendJSArray(ElementsKind kind,
3247 TNode<JSArray> array,
3248 CodeStubArguments* args,
3249 TVariable<IntPtrT>* arg_index,
3250 Label* bailout) {
3251 Comment("BuildAppendJSArray: ", ElementsKindToString(kind));
3252 Label pre_bailout(this);
3253 Label success(this);
3254 TVARIABLE(Smi, var_tagged_length)TVariable<Smi> var_tagged_length(this);
3255 TVARIABLE(BInt, var_length, SmiToBInt(LoadFastJSArrayLength(array)))TVariable<BInt> var_length(SmiToBInt(LoadFastJSArrayLength
(array)), this)
;
3256 TVARIABLE(FixedArrayBase, var_elements, LoadElements(array))TVariable<FixedArrayBase> var_elements(LoadElements(array
), this)
;
3257
3258 // Resize the capacity of the fixed array if it doesn't fit.
3259 TNode<IntPtrT> first = arg_index->value();
3260 TNode<BInt> growth =
3261 IntPtrToBInt(IntPtrSub(args->GetLengthWithoutReceiver(), first));
3262 PossiblyGrowElementsCapacity(kind, array, var_length.value(), &var_elements,
3263 growth, &pre_bailout);
3264
3265 // Push each argument onto the end of the array now that there is enough
3266 // capacity.
3267 CodeStubAssembler::VariableList push_vars({&var_length}, zone());
3268 TNode<FixedArrayBase> elements = var_elements.value();
3269 args->ForEach(
3270 push_vars,
3271 [&](TNode<Object> arg) {
3272 TryStoreArrayElement(kind, &pre_bailout, elements, var_length.value(),
3273 arg);
3274 Increment(&var_length);
3275 },
3276 first);
3277 {
3278 TNode<Smi> length = BIntToSmi(var_length.value());
3279 var_tagged_length = length;
3280 StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
3281 Goto(&success);
3282 }
3283
3284 BIND(&pre_bailout)Bind(&pre_bailout);
3285 {
3286 TNode<Smi> length = ParameterToTagged(var_length.value());
3287 var_tagged_length = length;
3288 TNode<Smi> diff = SmiSub(length, LoadFastJSArrayLength(array));
3289 StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
3290 *arg_index = IntPtrAdd(arg_index->value(), SmiUntag(diff));
3291 Goto(bailout);
3292 }
3293
3294 BIND(&success)Bind(&success);
3295 return var_tagged_length.value();
3296}
3297
3298void CodeStubAssembler::TryStoreArrayElement(ElementsKind kind, Label* bailout,
3299 TNode<FixedArrayBase> elements,
3300 TNode<BInt> index,
3301 TNode<Object> value) {
3302 if (IsSmiElementsKind(kind)) {
3303 GotoIf(TaggedIsNotSmi(value), bailout);
3304 } else if (IsDoubleElementsKind(kind)) {
3305 GotoIfNotNumber(value, bailout);
3306 }
3307
3308 if (IsDoubleElementsKind(kind)) {
3309 StoreElement(elements, kind, index, ChangeNumberToFloat64(CAST(value)Cast(value)));
3310 } else {
3311 StoreElement(elements, kind, index, value);
3312 }
3313}
3314
3315void CodeStubAssembler::BuildAppendJSArray(ElementsKind kind,
3316 TNode<JSArray> array,
3317 TNode<Object> value,
3318 Label* bailout) {
3319 Comment("BuildAppendJSArray: ", ElementsKindToString(kind));
3320 TVARIABLE(BInt, var_length, SmiToBInt(LoadFastJSArrayLength(array)))TVariable<BInt> var_length(SmiToBInt(LoadFastJSArrayLength
(array)), this)
;
3321 TVARIABLE(FixedArrayBase, var_elements, LoadElements(array))TVariable<FixedArrayBase> var_elements(LoadElements(array
), this)
;
3322
3323 // Resize the capacity of the fixed array if it doesn't fit.
3324 TNode<BInt> growth = IntPtrOrSmiConstant<BInt>(1);
3325 PossiblyGrowElementsCapacity(kind, array, var_length.value(), &var_elements,
3326 growth, bailout);
3327
3328 // Push each argument onto the end of the array now that there is enough
3329 // capacity.
3330 TryStoreArrayElement(kind, bailout, var_elements.value(), var_length.value(),
3331 value);
3332 Increment(&var_length);
3333
3334 TNode<Smi> length = BIntToSmi(var_length.value());
3335 StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
3336}
3337
3338TNode<Cell> CodeStubAssembler::AllocateCellWithValue(TNode<Object> value,
3339 WriteBarrierMode mode) {
3340 TNode<HeapObject> result = Allocate(Cell::kSize, AllocationFlag::kNone);
3341 StoreMapNoWriteBarrier(result, RootIndex::kCellMap);
3342 TNode<Cell> cell = CAST(result)Cast(result);
3343 StoreCellValue(cell, value, mode);
3344 return cell;
3345}
3346
3347TNode<Object> CodeStubAssembler::LoadCellValue(TNode<Cell> cell) {
3348 return LoadObjectField(cell, Cell::kValueOffset);
3349}
3350
3351void CodeStubAssembler::StoreCellValue(TNode<Cell> cell, TNode<Object> value,
3352 WriteBarrierMode mode) {
3353 DCHECK(mode == SKIP_WRITE_BARRIER || mode == UPDATE_WRITE_BARRIER)((void) 0);
3354
3355 if (mode == UPDATE_WRITE_BARRIER) {
3356 StoreObjectField(cell, Cell::kValueOffset, value);
3357 } else {
3358 StoreObjectFieldNoWriteBarrier(cell, Cell::kValueOffset, value);
3359 }
3360}
3361
3362TNode<HeapNumber> CodeStubAssembler::AllocateHeapNumber() {
3363 TNode<HeapObject> result = Allocate(HeapNumber::kSize, AllocationFlag::kNone);
3364 RootIndex heap_map_index = RootIndex::kHeapNumberMap;
3365 StoreMapNoWriteBarrier(result, heap_map_index);
3366 return UncheckedCast<HeapNumber>(result);
3367}
3368
3369TNode<HeapNumber> CodeStubAssembler::AllocateHeapNumberWithValue(
3370 TNode<Float64T> value) {
3371 TNode<HeapNumber> result = AllocateHeapNumber();
3372 StoreHeapNumberValue(result, value);
3373 return result;
3374}
3375
3376TNode<Object> CodeStubAssembler::CloneIfMutablePrimitive(TNode<Object> object) {
3377 TVARIABLE(Object, result, object)TVariable<Object> result(object, this);
3378 Label done(this);
3379
3380 GotoIf(TaggedIsSmi(object), &done);
3381 // TODO(leszeks): Read the field descriptor to decide if this heap number is
3382 // mutable or not.
3383 GotoIfNot(IsHeapNumber(UncheckedCast<HeapObject>(object)), &done);
3384 {
3385 // Mutable heap number found --- allocate a clone.
3386 TNode<Float64T> value =
3387 LoadHeapNumberValue(UncheckedCast<HeapNumber>(object));
3388 result = AllocateHeapNumberWithValue(value);
3389 Goto(&done);
3390 }
3391
3392 BIND(&done)Bind(&done);
3393 return result.value();
3394}
3395
3396TNode<BigInt> CodeStubAssembler::AllocateBigInt(TNode<IntPtrT> length) {
3397 TNode<BigInt> result = AllocateRawBigInt(length);
3398 StoreBigIntBitfield(result,
3399 Word32Shl(TruncateIntPtrToInt32(length),
3400 Int32Constant(BigInt::LengthBits::kShift)));
3401 return result;
3402}
3403
3404TNode<BigInt> CodeStubAssembler::AllocateRawBigInt(TNode<IntPtrT> length) {
3405 TNode<IntPtrT> size =
3406 IntPtrAdd(IntPtrConstant(BigInt::kHeaderSize),
3407 Signed(WordShl(length, kSystemPointerSizeLog2)));
3408 TNode<HeapObject> raw_result =
3409 Allocate(size, AllocationFlag::kAllowLargeObjectAllocation);
3410 StoreMapNoWriteBarrier(raw_result, RootIndex::kBigIntMap);
3411 if (FIELD_SIZE(BigInt::kOptionalPaddingOffset)(BigInt::kOptionalPaddingOffsetEnd + 1 - BigInt::kOptionalPaddingOffset
)
!= 0) {
3412 DCHECK_EQ(4, FIELD_SIZE(BigInt::kOptionalPaddingOffset))((void) 0);
3413 StoreObjectFieldNoWriteBarrier(raw_result, BigInt::kOptionalPaddingOffset,
3414 Int32Constant(0));
3415 }
3416 return UncheckedCast<BigInt>(raw_result);
3417}
3418
3419void CodeStubAssembler::StoreBigIntBitfield(TNode<BigInt> bigint,
3420 TNode<Word32T> bitfield) {
3421 StoreObjectFieldNoWriteBarrier(bigint, BigInt::kBitfieldOffset, bitfield);
3422}
3423
3424void CodeStubAssembler::StoreBigIntDigit(TNode<BigInt> bigint,
3425 intptr_t digit_index,
3426 TNode<UintPtrT> digit) {
3427 CHECK_LE(0, digit_index)do { bool _cmp = ::v8::base::CmpLEImpl< typename ::v8::base
::pass_value_or_ref<decltype(0)>::type, typename ::v8::
base::pass_value_or_ref<decltype(digit_index)>::type>
((0), (digit_index)); do { if ((__builtin_expect(!!(!(_cmp)),
0))) { V8_Fatal("Check failed: %s.", "0" " " "<=" " " "digit_index"
); } } while (false); } while (false)
;
3428 CHECK_LT(digit_index, BigInt::kMaxLength)do { bool _cmp = ::v8::base::CmpLTImpl< typename ::v8::base
::pass_value_or_ref<decltype(digit_index)>::type, typename
::v8::base::pass_value_or_ref<decltype(BigInt::kMaxLength
)>::type>((digit_index), (BigInt::kMaxLength)); do { if
((__builtin_expect(!!(!(_cmp)), 0))) { V8_Fatal("Check failed: %s."
, "digit_index" " " "<" " " "BigInt::kMaxLength"); } } while
(false); } while (false)
;
3429 StoreObjectFieldNoWriteBarrier(
3430 bigint,
3431 BigInt::kDigitsOffset +
3432 static_cast<int>(digit_index) * kSystemPointerSize,
3433 digit);
3434}
3435
3436void CodeStubAssembler::StoreBigIntDigit(TNode<BigInt> bigint,
3437 TNode<IntPtrT> digit_index,
3438 TNode<UintPtrT> digit) {
3439 TNode<IntPtrT> offset =
3440 IntPtrAdd(IntPtrConstant(BigInt::kDigitsOffset),
3441 IntPtrMul(digit_index, IntPtrConstant(kSystemPointerSize)));
3442 StoreObjectFieldNoWriteBarrier(bigint, offset, digit);
3443}
3444
3445TNode<Word32T> CodeStubAssembler::LoadBigIntBitfield(TNode<BigInt> bigint) {
3446 return UncheckedCast<Word32T>(
3447 LoadObjectField<Uint32T>(bigint, BigInt::kBitfieldOffset));
3448}
3449
3450TNode<UintPtrT> CodeStubAssembler::LoadBigIntDigit(TNode<BigInt> bigint,
3451 intptr_t digit_index) {
3452 CHECK_LE(0, digit_index)do { bool _cmp = ::v8::base::CmpLEImpl< typename ::v8::base
::pass_value_or_ref<decltype(0)>::type, typename ::v8::
base::pass_value_or_ref<decltype(digit_index)>::type>
((0), (digit_index)); do { if ((__builtin_expect(!!(!(_cmp)),
0))) { V8_Fatal("Check failed: %s.", "0" " " "<=" " " "digit_index"
); } } while (false); } while (false)
;
3453 CHECK_LT(digit_index, BigInt::kMaxLength)do { bool _cmp = ::v8::base::CmpLTImpl< typename ::v8::base
::pass_value_or_ref<decltype(digit_index)>::type, typename
::v8::base::pass_value_or_ref<decltype(BigInt::kMaxLength
)>::type>((digit_index), (BigInt::kMaxLength)); do { if
((__builtin_expect(!!(!(_cmp)), 0))) { V8_Fatal("Check failed: %s."
, "digit_index" " " "<" " " "BigInt::kMaxLength"); } } while
(false); } while (false)
;
3454 return LoadObjectField<UintPtrT>(
3455 bigint, BigInt::kDigitsOffset +
3456 static_cast<int>(digit_index) * kSystemPointerSize);
3457}
3458
3459TNode<UintPtrT> CodeStubAssembler::LoadBigIntDigit(TNode<BigInt> bigint,
3460 TNode<IntPtrT> digit_index) {
3461 TNode<IntPtrT> offset =
3462 IntPtrAdd(IntPtrConstant(BigInt::kDigitsOffset),
3463 IntPtrMul(digit_index, IntPtrConstant(kSystemPointerSize)));
3464 return LoadObjectField<UintPtrT>(bigint, offset);
3465}
3466
3467TNode<ByteArray> CodeStubAssembler::AllocateNonEmptyByteArray(
3468 TNode<UintPtrT> length, AllocationFlags flags) {
3469 CSA_DCHECK(this, WordNotEqual(length, IntPtrConstant(0)))((void)0);
3470
3471 Comment("AllocateNonEmptyByteArray");
3472 TVARIABLE(Object, var_result)TVariable<Object> var_result(this);
3473
3474 TNode<IntPtrT> raw_size =
3475 GetArrayAllocationSize(Signed(length), UINT8_ELEMENTS,
3476 ByteArray::kHeaderSize + kObjectAlignmentMask);
3477 TNode<IntPtrT> size =
3478 WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
3479
3480 TNode<HeapObject> result = Allocate(size, flags);
3481
3482 DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kByteArrayMap))((void) 0);
3483 StoreMapNoWriteBarrier(result, RootIndex::kByteArrayMap);
3484 StoreObjectFieldNoWriteBarrier(result, ByteArray::kLengthOffset,
3485 SmiTag(Signed(length)));
3486
3487 return CAST(result)Cast(result);
3488}
3489
3490TNode<ByteArray> CodeStubAssembler::AllocateByteArray(TNode<UintPtrT> length,
3491 AllocationFlags flags) {
3492 // TODO(ishell): unify with AllocateNonEmptyByteArray().
3493
3494 Comment("AllocateByteArray");
3495 TVARIABLE(Object, var_result)TVariable<Object> var_result(this);
3496
3497 // Compute the ByteArray size and check if it fits into new space.
3498 Label if_lengthiszero(this), if_sizeissmall(this),
3499 if_notsizeissmall(this, Label::kDeferred), if_join(this);
3500 GotoIf(WordEqual(length, UintPtrConstant(0)), &if_lengthiszero);
3501
3502 TNode<IntPtrT> raw_size =
3503 GetArrayAllocationSize(Signed(length), UINT8_ELEMENTS,
3504 ByteArray::kHeaderSize + kObjectAlignmentMask);
3505 TNode<IntPtrT> size =
3506 WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
3507 Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
3508 &if_sizeissmall, &if_notsizeissmall);
3509
3510 BIND(&if_sizeissmall)Bind(&if_sizeissmall);
3511 {
3512 // Just allocate the ByteArray in new space.
3513 TNode<HeapObject> result =
3514 AllocateInNewSpace(UncheckedCast<IntPtrT>(size), flags);
3515 DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kByteArrayMap))((void) 0);
3516 StoreMapNoWriteBarrier(result, RootIndex::kByteArrayMap);
3517 StoreObjectFieldNoWriteBarrier(result, ByteArray::kLengthOffset,
3518 SmiTag(Signed(length)));
3519 var_result = result;
3520 Goto(&if_join);
3521 }
3522
3523 BIND(&if_notsizeissmall)Bind(&if_notsizeissmall);
3524 {
3525 // We might need to allocate in large object space, go to the runtime.
3526 TNode<Object> result =
3527 CallRuntime(Runtime::kAllocateByteArray, NoContextConstant(),
3528 ChangeUintPtrToTagged(length));
3529 var_result = result;
3530 Goto(&if_join);
3531 }
3532
3533 BIND(&if_lengthiszero)Bind(&if_lengthiszero);
3534 {
3535 var_result = EmptyByteArrayConstant();
3536 Goto(&if_join);
3537 }
3538
3539 BIND(&if_join)Bind(&if_join);
3540 return CAST(var_result.value())Cast(var_result.value());
3541}
3542
3543TNode<String> CodeStubAssembler::AllocateSeqOneByteString(
3544 uint32_t length, AllocationFlags flags) {
3545 Comment("AllocateSeqOneByteString");
3546 if (length == 0) {
3547 return EmptyStringConstant();
3548 }
3549 TNode<HeapObject> result = Allocate(SeqOneByteString::SizeFor(length), flags);
3550 DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kOneByteStringMap))((void) 0);
3551 StoreMapNoWriteBarrier(result, RootIndex::kOneByteStringMap);
3552 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
3553 Uint32Constant(length));
3554 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kRawHashFieldOffset,
3555 Int32Constant(String::kEmptyHashField));
3556 return CAST(result)Cast(result);
3557}
3558
3559TNode<BoolT> CodeStubAssembler::IsZeroOrContext(TNode<Object> object) {
3560 return Select<BoolT>(
3561 TaggedEqual(object, SmiConstant(0)), [=] { return Int32TrueConstant(); },
3562 [=] { return IsContext(CAST(object)Cast(object)); });
3563}
3564
3565TNode<String> CodeStubAssembler::AllocateSeqTwoByteString(
3566 uint32_t length, AllocationFlags flags) {
3567 Comment("AllocateSeqTwoByteString");
3568 if (length == 0) {
3569 return EmptyStringConstant();
3570 }
3571 TNode<HeapObject> result = Allocate(SeqTwoByteString::SizeFor(length), flags);
3572 DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kStringMap))((void) 0);
3573 StoreMapNoWriteBarrier(result, RootIndex::kStringMap);
3574 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
3575 Uint32Constant(length));
3576 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kRawHashFieldOffset,
3577 Int32Constant(String::kEmptyHashField));
3578 return CAST(result)Cast(result);
3579}
3580
3581TNode<String> CodeStubAssembler::AllocateSlicedString(RootIndex map_root_index,
3582 TNode<Uint32T> length,
3583 TNode<String> parent,
3584 TNode<Smi> offset) {
3585 DCHECK(map_root_index == RootIndex::kSlicedOneByteStringMap ||((void) 0)
3586 map_root_index == RootIndex::kSlicedStringMap)((void) 0);
3587 TNode<HeapObject> result = Allocate(SlicedString::kSize);
3588 DCHECK(RootsTable::IsImmortalImmovable(map_root_index))((void) 0);
3589 StoreMapNoWriteBarrier(result, map_root_index);
3590 StoreObjectFieldNoWriteBarrier(result, SlicedString::kRawHashFieldOffset,
3591 Int32Constant(String::kEmptyHashField));
3592 StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length);
3593 StoreObjectFieldNoWriteBarrier(result, SlicedString::kParentOffset, parent);
3594 StoreObjectFieldNoWriteBarrier(result, SlicedString::kOffsetOffset, offset);
3595 return CAST(result)Cast(result);
3596}
3597
3598TNode<String> CodeStubAssembler::AllocateSlicedOneByteString(
3599 TNode<Uint32T> length, TNode<String> parent, TNode<Smi> offset) {
3600 return AllocateSlicedString(RootIndex::kSlicedOneByteStringMap, length,
3601 parent, offset);
3602}
3603
3604TNode<String> CodeStubAssembler::AllocateSlicedTwoByteString(
3605 TNode<Uint32T> length, TNode<String> parent, TNode<Smi> offset) {
3606 return AllocateSlicedString(RootIndex::kSlicedStringMap, length, parent,
3607 offset);
3608}
3609
3610TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionary(
3611 int at_least_space_for) {
3612 return AllocateNameDictionary(IntPtrConstant(at_least_space_for));
3613}
3614
3615TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionary(
3616 TNode<IntPtrT> at_least_space_for, AllocationFlags flags) {
3617 CSA_DCHECK(this, UintPtrLessThanOrEqual(((void)0)
3618 at_least_space_for,((void)0)
3619 IntPtrConstant(NameDictionary::kMaxCapacity)))((void)0);
3620 TNode<IntPtrT> capacity = HashTableComputeCapacity(at_least_space_for);
3621 return AllocateNameDictionaryWithCapacity(capacity, flags);
3622}
3623
3624TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionaryWithCapacity(
3625 TNode<IntPtrT> capacity, AllocationFlags flags) {
3626 CSA_DCHECK(this, WordIsPowerOfTwo(capacity))((void)0);
3627 CSA_DCHECK(this, IntPtrGreaterThan(capacity, IntPtrConstant(0)))((void)0);
3628 TNode<IntPtrT> length = EntryToIndex<NameDictionary>(capacity);
3629 TNode<IntPtrT> store_size = IntPtrAdd(
3630 TimesTaggedSize(length), IntPtrConstant(NameDictionary::kHeaderSize));
3631
3632 TNode<NameDictionary> result =
3633 UncheckedCast<NameDictionary>(Allocate(store_size, flags));
3634
3635 // Initialize FixedArray fields.
3636 {
3637 DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kNameDictionaryMap))((void) 0);
3638 StoreMapNoWriteBarrier(result, RootIndex::kNameDictionaryMap);
3639 StoreObjectFieldNoWriteBarrier(result, FixedArray::kLengthOffset,
3640 SmiFromIntPtr(length));
3641 }
3642
3643 // Initialized HashTable fields.
3644 {
3645 TNode<Smi> zero = SmiConstant(0);
3646 StoreFixedArrayElement(result, NameDictionary::kNumberOfElementsIndex, zero,
3647 SKIP_WRITE_BARRIER);
3648 StoreFixedArrayElement(result,
3649 NameDictionary::kNumberOfDeletedElementsIndex, zero,
3650 SKIP_WRITE_BARRIER);
3651 StoreFixedArrayElement(result, NameDictionary::kCapacityIndex,
3652 SmiTag(capacity), SKIP_WRITE_BARRIER);
3653 // Initialize Dictionary fields.
3654 StoreFixedArrayElement(result, NameDictionary::kNextEnumerationIndexIndex,
3655 SmiConstant(PropertyDetails::kInitialIndex),
3656 SKIP_WRITE_BARRIER);
3657 StoreFixedArrayElement(result, NameDictionary::kObjectHashIndex,
3658 SmiConstant(PropertyArray::kNoHashSentinel),
3659 SKIP_WRITE_BARRIER);
3660 }
3661
3662 // Initialize NameDictionary elements.
3663 {
3664 TNode<IntPtrT> result_word = BitcastTaggedToWord(result);
3665 TNode<IntPtrT> start_address = IntPtrAdd(
3666 result_word, IntPtrConstant(NameDictionary::OffsetOfElementAt(
3667 NameDictionary::kElementsStartIndex) -
3668 kHeapObjectTag));
3669 TNode<IntPtrT> end_address = IntPtrAdd(
3670 result_word, IntPtrSub(store_size, IntPtrConstant(kHeapObjectTag)));
3671
3672 TNode<Oddball> filler = UndefinedConstant();
3673 DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kUndefinedValue))((void) 0);
3674
3675 StoreFieldsNoWriteBarrier(start_address, end_address, filler);
3676 }
3677
3678 return result;
3679}
3680
3681TNode<NameDictionary> CodeStubAssembler::CopyNameDictionary(
3682 TNode<NameDictionary> dictionary, Label* large_object_fallback) {
3683 Comment("Copy boilerplate property dict");
3684 TNode<IntPtrT> capacity = SmiUntag(GetCapacity<NameDictionary>(dictionary));
3685 CSA_DCHECK(this, IntPtrGreaterThanOrEqual(capacity, IntPtrConstant(0)))((void)0);
3686 GotoIf(UintPtrGreaterThan(
3687 capacity, IntPtrConstant(NameDictionary::kMaxRegularCapacity)),
3688 large_object_fallback);
3689 TNode<NameDictionary> properties =
3690 AllocateNameDictionaryWithCapacity(capacity);
3691 TNode<IntPtrT> length = SmiUntag(LoadFixedArrayBaseLength(dictionary));
3692 CopyFixedArrayElements(PACKED_ELEMENTS, dictionary, properties, length,
3693 SKIP_WRITE_BARRIER);
3694 return properties;
3695}
3696
3697template <typename CollectionType>
3698TNode<CollectionType> CodeStubAssembler::AllocateOrderedHashTable(
3699 TNode<IntPtrT> capacity) {
3700 capacity = IntPtrRoundUpToPowerOfTwo32(capacity);
3701 capacity =
3702 IntPtrMax(capacity, IntPtrConstant(CollectionType::kInitialCapacity));
3703 return AllocateOrderedHashTableWithCapacity<CollectionType>(capacity);
3704}
3705
3706template <typename CollectionType>
3707TNode<CollectionType> CodeStubAssembler::AllocateOrderedHashTableWithCapacity(
3708 TNode<IntPtrT> capacity) {
3709 CSA_DCHECK(this, WordIsPowerOfTwo(capacity))((void)0);
3710 CSA_DCHECK(this,((void)0)
3711 IntPtrGreaterThanOrEqual(((void)0)
3712 capacity, IntPtrConstant(CollectionType::kInitialCapacity)))((void)0);
3713 CSA_DCHECK(this,((void)0)
3714 IntPtrLessThanOrEqual(((void)0)
3715 capacity, IntPtrConstant(CollectionType::MaxCapacity())))((void)0);
3716
3717 STATIC_ASSERT(CollectionType::kLoadFactor == 2)static_assert(CollectionType::kLoadFactor == 2, "CollectionType::kLoadFactor == 2"
)
;
3718 TNode<IntPtrT> bucket_count = Signed(WordShr(capacity, IntPtrConstant(1)));
3719 TNode<IntPtrT> data_table_length =
3720 IntPtrMul(capacity, IntPtrConstant(CollectionType::kEntrySize));
3721
3722 TNode<IntPtrT> data_table_start_index = IntPtrAdd(
3723 IntPtrConstant(CollectionType::HashTableStartIndex()), bucket_count);
3724 TNode<IntPtrT> fixed_array_length =
3725 IntPtrAdd(data_table_start_index, data_table_length);
3726
3727 // Allocate the table and add the proper map.
3728 const ElementsKind elements_kind = HOLEY_ELEMENTS;
3729 TNode<Map> fixed_array_map =
3730 HeapConstant(CollectionType::GetMap(ReadOnlyRoots(isolate())));
3731 TNode<CollectionType> table = CAST(AllocateFixedArray(Cast(AllocateFixedArray( elements_kind, fixed_array_length, AllocationFlag
::kAllowLargeObjectAllocation, fixed_array_map))
3732 elements_kind, fixed_array_length,Cast(AllocateFixedArray( elements_kind, fixed_array_length, AllocationFlag
::kAllowLargeObjectAllocation, fixed_array_map))
3733 AllocationFlag::kAllowLargeObjectAllocation, fixed_array_map))Cast(AllocateFixedArray( elements_kind, fixed_array_length, AllocationFlag
::kAllowLargeObjectAllocation, fixed_array_map))
;
3734
3735 Comment("Initialize the OrderedHashTable fields.");
3736 const WriteBarrierMode barrier_mode = SKIP_WRITE_BARRIER;
3737 UnsafeStoreFixedArrayElement(table, CollectionType::NumberOfElementsIndex(),
3738 SmiConstant(0), barrier_mode);
3739 UnsafeStoreFixedArrayElement(table,
3740 CollectionType::NumberOfDeletedElementsIndex(),
3741 SmiConstant(0), barrier_mode);
3742 UnsafeStoreFixedArrayElement(table, CollectionType::NumberOfBucketsIndex(),
3743 SmiFromIntPtr(bucket_count), barrier_mode);
3744
3745 TNode<IntPtrT> object_address = BitcastTaggedToWord(table);
3746
3747 STATIC_ASSERT(CollectionType::HashTableStartIndex() ==static_assert(CollectionType::HashTableStartIndex() == CollectionType
::NumberOfBucketsIndex() + 1, "CollectionType::HashTableStartIndex() == CollectionType::NumberOfBucketsIndex() + 1"
)
3748 CollectionType::NumberOfBucketsIndex() + 1)static_assert(CollectionType::HashTableStartIndex() == CollectionType
::NumberOfBucketsIndex() + 1, "CollectionType::HashTableStartIndex() == CollectionType::NumberOfBucketsIndex() + 1"
)
;
3749
3750 TNode<Smi> not_found_sentinel = SmiConstant(CollectionType::kNotFound);
3751
3752 intptr_t const_capacity;
3753 if (TryToIntPtrConstant(capacity, &const_capacity) &&
3754 const_capacity == CollectionType::kInitialCapacity) {
3755 int const_bucket_count =
3756 static_cast<int>(const_capacity / CollectionType::kLoadFactor);
3757 int const_data_table_length =
3758 static_cast<int>(const_capacity * CollectionType::kEntrySize);
3759 int const_data_table_start_index = static_cast<int>(
3760 CollectionType::HashTableStartIndex() + const_bucket_count);
3761
3762 Comment("Fill the buckets with kNotFound (constant capacity).");
3763 for (int i = 0; i < const_bucket_count; i++) {
3764 UnsafeStoreFixedArrayElement(table,
3765 CollectionType::HashTableStartIndex() + i,
3766 not_found_sentinel, barrier_mode);
3767 }
3768
3769 Comment("Fill the data table with undefined (constant capacity).");
3770 for (int i = 0; i < const_data_table_length; i++) {
3771 UnsafeStoreFixedArrayElement(table, const_data_table_start_index + i,
3772 UndefinedConstant(), barrier_mode);
3773 }
3774 } else {
3775 Comment("Fill the buckets with kNotFound.");
3776 TNode<IntPtrT> buckets_start_address =
3777 IntPtrAdd(object_address,
3778 IntPtrConstant(FixedArray::OffsetOfElementAt(
3779 CollectionType::HashTableStartIndex()) -
3780 kHeapObjectTag));
3781 TNode<IntPtrT> buckets_end_address =
3782 IntPtrAdd(buckets_start_address, TimesTaggedSize(bucket_count));
3783
3784 StoreFieldsNoWriteBarrier(buckets_start_address, buckets_end_address,
3785 not_found_sentinel);
3786
3787 Comment("Fill the data table with undefined.");
3788 TNode<IntPtrT> data_start_address = buckets_end_address;
3789 TNode<IntPtrT> data_end_address = IntPtrAdd(
3790 object_address,
3791 IntPtrAdd(IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag),
3792 TimesTaggedSize(fixed_array_length)));
3793
3794 StoreFieldsNoWriteBarrier(data_start_address, data_end_address,
3795 UndefinedConstant());
3796
3797#ifdef DEBUG
3798 TNode<IntPtrT> ptr_diff =
3799 IntPtrSub(data_end_address, buckets_start_address);
3800 TNode<IntPtrT> array_length = LoadAndUntagFixedArrayBaseLength(table);
3801 TNode<IntPtrT> array_data_fields = IntPtrSub(
3802 array_length, IntPtrConstant(CollectionType::HashTableStartIndex()));
3803 TNode<IntPtrT> expected_end =
3804 IntPtrAdd(data_start_address,
3805 TimesTaggedSize(IntPtrMul(
3806 capacity, IntPtrConstant(CollectionType::kEntrySize))));
3807
3808 CSA_DCHECK(this, IntPtrEqual(ptr_diff, TimesTaggedSize(array_data_fields)))((void)0);
3809 CSA_DCHECK(this, IntPtrEqual(expected_end, data_end_address))((void)0);
3810#endif
3811 }
3812
3813 return table;
3814}
3815
3816TNode<OrderedNameDictionary> CodeStubAssembler::AllocateOrderedNameDictionary(
3817 TNode<IntPtrT> capacity) {
3818 TNode<OrderedNameDictionary> table =
3819 AllocateOrderedHashTable<OrderedNameDictionary>(capacity);
3820 StoreFixedArrayElement(table, OrderedNameDictionary::PrefixIndex(),
3821 SmiConstant(PropertyArray::kNoHashSentinel),
3822 SKIP_WRITE_BARRIER);
3823 return table;
3824}
3825
3826TNode<OrderedNameDictionary> CodeStubAssembler::AllocateOrderedNameDictionary(
3827 int capacity) {
3828 return AllocateOrderedNameDictionary(IntPtrConstant(capacity));
3829}
3830
3831TNode<OrderedHashSet> CodeStubAssembler::AllocateOrderedHashSet() {
3832 return AllocateOrderedHashTableWithCapacity<OrderedHashSet>(
3833 IntPtrConstant(OrderedHashSet::kInitialCapacity));
3834}
3835
3836TNode<OrderedHashMap> CodeStubAssembler::AllocateOrderedHashMap() {
3837 return AllocateOrderedHashTableWithCapacity<OrderedHashMap>(
3838 IntPtrConstant(OrderedHashMap::kInitialCapacity));
3839}
3840
3841TNode<JSObject> CodeStubAssembler::AllocateJSObjectFromMap(
3842 TNode<Map> map, base::Optional<TNode<HeapObject>> properties,
3843 base::Optional<TNode<FixedArray>> elements, AllocationFlags flags,
3844 SlackTrackingMode slack_tracking_mode) {
3845 CSA_DCHECK(this, Word32BinaryNot(IsJSFunctionMap(map)))((void)0);
3846 CSA_DCHECK(this, Word32BinaryNot(InstanceTypeEqual(LoadMapInstanceType(map),((void)0)
3847 JS_GLOBAL_OBJECT_TYPE)))((void)0);
3848 TNode<IntPtrT> instance_size =
3849 TimesTaggedSize(LoadMapInstanceSizeInWords(map));
3850 TNode<HeapObject> object = AllocateInNewSpace(instance_size, flags);
3851 StoreMapNoWriteBarrier(object, map);
3852 InitializeJSObjectFromMap(object, map, instance_size, properties, elements,
3853 slack_tracking_mode);
3854 return CAST(object)Cast(object);
3855}
3856
3857void CodeStubAssembler::InitializeJSObjectFromMap(
3858 TNode<HeapObject> object, TNode<Map> map, TNode<IntPtrT> instance_size,
3859 base::Optional<TNode<HeapObject>> properties,
3860 base::Optional<TNode<FixedArray>> elements,
3861 SlackTrackingMode slack_tracking_mode) {
3862 // This helper assumes that the object is in new-space, as guarded by the
3863 // check in AllocatedJSObjectFromMap.
3864 if (!properties) {
3865 CSA_DCHECK(this, Word32BinaryNot(IsDictionaryMap((map))))((void)0);
3866 StoreObjectFieldRoot(object, JSObject::kPropertiesOrHashOffset,
3867 RootIndex::kEmptyFixedArray);
3868 } else {
3869 CSA_DCHECK(this, Word32Or(Word32Or(Word32Or(IsPropertyArray(*properties),((void)0)
3870 IsNameDictionary(*properties)),((void)0)
3871 IsSwissNameDictionary(*properties)),((void)0)
3872 IsEmptyFixedArray(*properties)))((void)0);
3873 StoreObjectFieldNoWriteBarrier(object, JSObject::kPropertiesOrHashOffset,
3874 *properties);
3875 }
3876 if (!elements) {
3877 StoreObjectFieldRoot(object, JSObject::kElementsOffset,
3878 RootIndex::kEmptyFixedArray);
3879 } else {
3880 StoreObjectFieldNoWriteBarrier(object, JSObject::kElementsOffset,
3881 *elements);
3882 }
3883 if (slack_tracking_mode == kNoSlackTracking) {
3884 InitializeJSObjectBodyNoSlackTracking(object, map, instance_size);
3885 } else {
3886 DCHECK_EQ(slack_tracking_mode, kWithSlackTracking)((void) 0);
3887 InitializeJSObjectBodyWithSlackTracking(object, map, instance_size);
3888 }
3889}
3890
3891void CodeStubAssembler::InitializeJSObjectBodyNoSlackTracking(
3892 TNode<HeapObject> object, TNode<Map> map, TNode<IntPtrT> instance_size,
3893 int start_offset) {
3894 STATIC_ASSERT(Map::kNoSlackTracking == 0)static_assert(Map::kNoSlackTracking == 0, "Map::kNoSlackTracking == 0"
)
;
3895 CSA_DCHECK(this, IsClearWord32<Map::Bits3::ConstructionCounterBits>(((void)0)
3896 LoadMapBitField3(map)))((void)0);
3897 InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), instance_size,
3898 RootIndex::kUndefinedValue);
3899}
3900
3901void CodeStubAssembler::InitializeJSObjectBodyWithSlackTracking(
3902 TNode<HeapObject> object, TNode<Map> map, TNode<IntPtrT> instance_size) {
3903 Comment("InitializeJSObjectBodyNoSlackTracking");
3904
3905 // Perform in-object slack tracking if requested.
3906 int start_offset = JSObject::kHeaderSize;
3907 TNode<Uint32T> bit_field3 = LoadMapBitField3(map);
3908 Label end(this), slack_tracking(this), complete(this, Label::kDeferred);
3909 STATIC_ASSERT(Map::kNoSlackTracking == 0)static_assert(Map::kNoSlackTracking == 0, "Map::kNoSlackTracking == 0"
)
;
3910 GotoIf(IsSetWord32<Map::Bits3::ConstructionCounterBits>(bit_field3),
3911 &slack_tracking);
3912 Comment("No slack tracking");
3913 InitializeJSObjectBodyNoSlackTracking(object, map, instance_size);
3914 Goto(&end);
3915
3916 BIND(&slack_tracking)Bind(&slack_tracking);
3917 {
3918 Comment("Decrease construction counter");
3919 // Slack tracking is only done on initial maps.
3920 CSA_DCHECK(this, IsUndefined(LoadMapBackPointer(map)))((void)0);
3921 STATIC_ASSERT(Map::Bits3::ConstructionCounterBits::kLastUsedBit == 31)static_assert(Map::Bits3::ConstructionCounterBits::kLastUsedBit
== 31, "Map::Bits3::ConstructionCounterBits::kLastUsedBit == 31"
)
;
3922 TNode<Word32T> new_bit_field3 = Int32Sub(
3923 bit_field3,
3924 Int32Constant(1 << Map::Bits3::ConstructionCounterBits::kShift));
3925 StoreObjectFieldNoWriteBarrier(map, Map::kBitField3Offset, new_bit_field3);
3926 STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1)static_assert(Map::kSlackTrackingCounterEnd == 1, "Map::kSlackTrackingCounterEnd == 1"
)
;
3927
3928 // The object still has in-object slack therefore the |unsed_or_unused|
3929 // field contain the "used" value.
3930 TNode<IntPtrT> used_size =
3931 Signed(TimesTaggedSize(ChangeUint32ToWord(LoadObjectField<Uint8T>(
3932 map, Map::kUsedOrUnusedInstanceSizeInWordsOffset))));
3933
3934 Comment("iInitialize filler fields");
3935 InitializeFieldsWithRoot(object, used_size, instance_size,
3936 RootIndex::kOnePointerFillerMap);
3937
3938 Comment("Initialize undefined fields");
3939 InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), used_size,
3940 RootIndex::kUndefinedValue);
3941
3942 STATIC_ASSERT(Map::kNoSlackTracking == 0)static_assert(Map::kNoSlackTracking == 0, "Map::kNoSlackTracking == 0"
)
;
3943 GotoIf(IsClearWord32<Map::Bits3::ConstructionCounterBits>(new_bit_field3),
3944 &complete);
3945 Goto(&end);
3946 }
3947
3948 // Finalize the instance size.
3949 BIND(&complete)Bind(&complete);
3950 {
3951 // ComplextInobjectSlackTracking doesn't allocate and thus doesn't need a
3952 // context.
3953 CallRuntime(Runtime::kCompleteInobjectSlackTrackingForMap,
3954 NoContextConstant(), map);
3955 Goto(&end);
3956 }
3957
3958 BIND(&end)Bind(&end);
3959}
3960
3961void CodeStubAssembler::StoreFieldsNoWriteBarrier(TNode<IntPtrT> start_address,
3962 TNode<IntPtrT> end_address,
3963 TNode<Object> value) {
3964 Comment("StoreFieldsNoWriteBarrier");
3965 CSA_DCHECK(this, WordIsAligned(start_address, kTaggedSize))((void)0);
3966 CSA_DCHECK(this, WordIsAligned(end_address, kTaggedSize))((void)0);
3967 BuildFastLoop<IntPtrT>(
3968 start_address, end_address,
3969 [=](TNode<IntPtrT> current) {
3970 UnsafeStoreNoWriteBarrier(MachineRepresentation::kTagged, current,
3971 value);
3972 },
3973 kTaggedSize, IndexAdvanceMode::kPost);
3974}
3975
3976void CodeStubAssembler::MakeFixedArrayCOW(TNode<FixedArray> array) {
3977 CSA_DCHECK(this, IsFixedArrayMap(LoadMap(array)))((void)0);
3978 Label done(this);
3979 // The empty fixed array is not modifiable anyway. And we shouldn't change its
3980 // Map.
3981 GotoIf(TaggedEqual(array, EmptyFixedArrayConstant()), &done);
3982 StoreMap(array, FixedCOWArrayMapConstant());
3983 Goto(&done);
3984 BIND(&done)Bind(&done);
3985}
3986
3987TNode<BoolT> CodeStubAssembler::IsValidFastJSArrayCapacity(
3988 TNode<IntPtrT> capacity) {
3989 return UintPtrLessThanOrEqual(capacity,
3990 UintPtrConstant(JSArray::kMaxFastArrayLength));
3991}
3992
3993TNode<JSArray> CodeStubAssembler::AllocateJSArray(
3994 TNode<Map> array_map, TNode<FixedArrayBase> elements, TNode<Smi> length,
3995 base::Optional<TNode<AllocationSite>> allocation_site,
3996 int array_header_size) {
3997 Comment("begin allocation of JSArray passing in elements");
3998 CSA_SLOW_DCHECK(this, TaggedIsPositiveSmi(length))((void)0);
3999
4000 int base_size = array_header_size;
4001 if (allocation_site) {
4002 DCHECK(V8_ALLOCATION_SITE_TRACKING_BOOL)((void) 0);
4003 base_size += AllocationMemento::kSize;
4004 }
4005
4006 TNode<IntPtrT> size = IntPtrConstant(base_size);
4007 TNode<JSArray> result =
4008 AllocateUninitializedJSArray(array_map, length, allocation_site, size);
4009 StoreObjectFieldNoWriteBarrier(result, JSArray::kElementsOffset, elements);
4010 return result;
4011}
4012
4013namespace {
4014
4015// To prevent GC between the array and elements allocation, the elements
4016// object allocation is folded together with the js-array allocation.
4017TNode<FixedArrayBase> InnerAllocateElements(CodeStubAssembler* csa,
4018 TNode<JSArray> js_array,
4019 int offset) {
4020 return csa->UncheckedCast<FixedArrayBase>(
4021 csa->BitcastWordToTagged(csa->IntPtrAdd(
4022 csa->BitcastTaggedToWord(js_array), csa->IntPtrConstant(offset))));
4023}
4024
4025} // namespace
4026
4027std::pair<TNode<JSArray>, TNode<FixedArrayBase>>
4028CodeStubAssembler::AllocateUninitializedJSArrayWithElements(
4029 ElementsKind kind, TNode<Map> array_map, TNode<Smi> length,
4030 base::Optional<TNode<AllocationSite>> allocation_site,
4031 TNode<IntPtrT> capacity, AllocationFlags allocation_flags,
4032 int array_header_size) {
4033 Comment("begin allocation of JSArray with elements");
4034 CHECK_EQ(allocation_flags & ~AllocationFlag::kAllowLargeObjectAllocation, 0)do { bool _cmp = ::v8::base::CmpEQImpl< typename ::v8::base
::pass_value_or_ref<decltype(allocation_flags & ~AllocationFlag
::kAllowLargeObjectAllocation)>::type, typename ::v8::base
::pass_value_or_ref<decltype(0)>::type>((allocation_flags
& ~AllocationFlag::kAllowLargeObjectAllocation), (0)); do
{ if ((__builtin_expect(!!(!(_cmp)), 0))) { V8_Fatal("Check failed: %s."
, "allocation_flags & ~AllocationFlag::kAllowLargeObjectAllocation"
" " "==" " " "0"); } } while (false); } while (false)
;
4035 CSA_SLOW_DCHECK(this, TaggedIsPositiveSmi(length))((void)0);
4036
4037 TVARIABLE(JSArray, array)TVariable<JSArray> array(this);
4038 TVARIABLE(FixedArrayBase, elements)TVariable<FixedArrayBase> elements(this);
4039
4040 Label out(this), empty(this), nonempty(this);
4041
4042 int capacity_int;
4043 if (TryToInt32Constant(capacity, &capacity_int)) {
4044 if (capacity_int == 0) {
4045 TNode<FixedArray> empty_array = EmptyFixedArrayConstant();
4046 array = AllocateJSArray(array_map, empty_array, length, allocation_site,
4047 array_header_size);
4048 return {array.value(), empty_array};
4049 } else {
4050 Goto(&nonempty);
4051 }
4052 } else {
4053 Branch(WordEqual(capacity, IntPtrConstant(0)), &empty, &nonempty);
4054
4055 BIND(&empty)Bind(&empty);
4056 {
4057 TNode<FixedArray> empty_array = EmptyFixedArrayConstant();
4058 array = AllocateJSArray(array_map, empty_array, length, allocation_site,
4059 array_header_size);
4060 elements = empty_array;
4061 Goto(&out);
4062 }
4063 }
4064
4065 BIND(&nonempty)Bind(&nonempty);
4066 {
4067 int base_size = array_header_size;
4068 if (allocation_site) {
4069 DCHECK(V8_ALLOCATION_SITE_TRACKING_BOOL)((void) 0);
4070 base_size += AllocationMemento::kSize;
4071 }
4072
4073 const int elements_offset = base_size;
4074
4075 // Compute space for elements
4076 base_size += FixedArray::kHeaderSize;
4077 TNode<IntPtrT> size = ElementOffsetFromIndex(capacity, kind, base_size);
4078
4079 // For very large arrays in which the requested allocation exceeds the
4080 // maximal size of a regular heap object, we cannot use the allocation
4081 // folding trick. Instead, we first allocate the elements in large object
4082 // space, and then allocate the JSArray (and possibly the allocation
4083 // memento) in new space.
4084 if (allocation_flags & AllocationFlag::kAllowLargeObjectAllocation) {
4085 Label next(this);
4086 GotoIf(IsRegularHeapObjectSize(size), &next);
4087
4088 CSA_CHECK(this, IsValidFastJSArrayCapacity(capacity))(this)->FastCheck(IsValidFastJSArrayCapacity(capacity));
4089
4090 // Allocate and initialize the elements first. Full initialization is
4091 // needed because the upcoming JSArray allocation could trigger GC.
4092 elements = AllocateFixedArray(kind, capacity, allocation_flags);
4093
4094 if (IsDoubleElementsKind(kind)) {
4095 FillFixedDoubleArrayWithZero(CAST(elements.value())Cast(elements.value()), capacity);
4096 } else {
4097 FillFixedArrayWithSmiZero(CAST(elements.value())Cast(elements.value()), capacity);
4098 }
4099
4100 // The JSArray and possibly allocation memento next. Note that
4101 // allocation_flags are *not* passed on here and the resulting JSArray
4102 // will always be in new space.
4103 array = AllocateJSArray(array_map, elements.value(), length,
4104 allocation_site, array_header_size);
4105
4106 Goto(&out);
4107
4108 BIND(&next)Bind(&next);
4109 }
4110 // Fold all objects into a single new space allocation.
4111 array =
4112 AllocateUninitializedJSArray(array_map, length, allocation_site, size);
4113 elements = InnerAllocateElements(this, array.value(), elements_offset);
4114
4115 StoreObjectFieldNoWriteBarrier(array.value(), JSObject::kElementsOffset,
4116 elements.value());
4117
4118 // Setup elements object.
4119 STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kTaggedSize)static_assert(FixedArrayBase::kHeaderSize == 2 * kTaggedSize,
"FixedArrayBase::kHeaderSize == 2 * kTaggedSize")
;
4120 RootIndex elements_map_index = IsDoubleElementsKind(kind)
4121 ? RootIndex::kFixedDoubleArrayMap
4122 : RootIndex::kFixedArrayMap;
4123 DCHECK(RootsTable::IsImmortalImmovable(elements_map_index))((void) 0);
4124 StoreMapNoWriteBarrier(elements.value(), elements_map_index);
4125
4126 CSA_DCHECK(this, WordNotEqual(capacity, IntPtrConstant(0)))((void)0);
4127 TNode<Smi> capacity_smi = SmiTag(capacity);
4128 StoreObjectFieldNoWriteBarrier(elements.value(), FixedArray::kLengthOffset,
4129 capacity_smi);
4130 Goto(&out);
4131 }
4132
4133 BIND(&out)Bind(&out);
4134 return {array.value(), elements.value()};
4135}
4136
4137TNode<JSArray> CodeStubAssembler::AllocateUninitializedJSArray(
4138 TNode<Map> array_map, TNode<Smi> length,
4139 base::Optional<TNode<AllocationSite>> allocation_site,
4140 TNode<IntPtrT> size_in_bytes) {
4141 CSA_SLOW_DCHECK(this, TaggedIsPositiveSmi(length))((void)0);
4142
4143 // Allocate space for the JSArray and the elements FixedArray in one go.
4144 TNode<HeapObject> array = AllocateInNewSpace(size_in_bytes);
4145
4146 StoreMapNoWriteBarrier(array, array_map);
4147 StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
4148 StoreObjectFieldRoot(array, JSArray::kPropertiesOrHashOffset,
4149 RootIndex::kEmptyFixedArray);
4150
4151 if (allocation_site) {
4152 DCHECK(V8_ALLOCATION_SITE_TRACKING_BOOL)((void) 0);
4153 InitializeAllocationMemento(array, IntPtrConstant(JSArray::kHeaderSize),
4154 *allocation_site);
4155 }
4156
4157 return CAST(array)Cast(array);
4158}
4159
4160TNode<JSArray> CodeStubAssembler::AllocateJSArray(
4161 ElementsKind kind, TNode<Map> array_map, TNode<IntPtrT> capacity,
4162 TNode<Smi> length, base::Optional<TNode<AllocationSite>> allocation_site,
4163 AllocationFlags allocation_flags) {
4164 CSA_SLOW_DCHECK(this, TaggedIsPositiveSmi(length))((void)0);
4165
4166 TNode<JSArray> array;
4167 TNode<FixedArrayBase> elements;
4168
4169 std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
4170 kind, array_map, length, allocation_site, capacity, allocation_flags);
4171
4172 Label out(this), nonempty(this);
4173
4174 Branch(WordEqual(capacity, IntPtrConstant(0)), &out, &nonempty);
4175
4176 BIND(&nonempty)Bind(&nonempty);
4177 {
4178 FillFixedArrayWithValue(kind, elements, IntPtrConstant(0), capacity,
4179 RootIndex::kTheHoleValue);
4180 Goto(&out);
4181 }
4182
4183 BIND(&out)Bind(&out);
4184 return array;
4185}
4186
4187TNode<JSArray> CodeStubAssembler::ExtractFastJSArray(TNode<Context> context,
4188 TNode<JSArray> array,
4189 TNode<BInt> begin,
4190 TNode<BInt> count) {
4191 TNode<Map> original_array_map = LoadMap(array);
4192 TNode<Int32T> elements_kind = LoadMapElementsKind(original_array_map);
4193
4194 // Use the canonical map for the Array's ElementsKind
4195 TNode<NativeContext> native_context = LoadNativeContext(context);
4196 TNode<Map> array_map = LoadJSArrayElementsMap(elements_kind, native_context);
4197
4198 TNode<FixedArrayBase> new_elements = ExtractFixedArray(
4199 LoadElements(array), base::Optional<TNode<BInt>>(begin),
4200 base::Optional<TNode<BInt>>(count),
4201 base::Optional<TNode<BInt>>(base::nullopt),
4202 ExtractFixedArrayFlag::kAllFixedArrays, nullptr, elements_kind);
4203
4204 TNode<JSArray> result = AllocateJSArray(
4205 array_map, new_elements, ParameterToTagged(count), base::nullopt);
4206 return result;
4207}
4208
4209TNode<JSArray> CodeStubAssembler::CloneFastJSArray(
4210 TNode<Context> context, TNode<JSArray> array,
4211 base::Optional<TNode<AllocationSite>> allocation_site,
4212 HoleConversionMode convert_holes) {
4213 // TODO(dhai): we should be able to assert IsFastJSArray(array) here, but this
4214 // function is also used to copy boilerplates even when the no-elements
4215 // protector is invalid. This function should be renamed to reflect its uses.
4216
4217 TNode<Number> length = LoadJSArrayLength(array);
4218 TNode<FixedArrayBase> new_elements;
4219 TVARIABLE(FixedArrayBase, var_new_elements)TVariable<FixedArrayBase> var_new_elements(this);
4220 TVARIABLE(Int32T, var_elements_kind, LoadMapElementsKind(LoadMap(array)))TVariable<Int32T> var_elements_kind(LoadMapElementsKind
(LoadMap(array)), this)
;
4221
4222 Label allocate_jsarray(this), holey_extract(this),
4223 allocate_jsarray_main(this);
4224
4225 bool need_conversion =
4226 convert_holes == HoleConversionMode::kConvertToUndefined;
4227 if (need_conversion) {
4228 // We need to take care of holes, if the array is of holey elements kind.
4229 GotoIf(IsHoleyFastElementsKindForRead(var_elements_kind.value()),
4230 &holey_extract);
4231 }
4232
4233 // Simple extraction that preserves holes.
4234 new_elements = ExtractFixedArray(
4235 LoadElements(array),
4236 base::Optional<TNode<BInt>>(IntPtrOrSmiConstant<BInt>(0)),
4237 base::Optional<TNode<BInt>>(TaggedToParameter<BInt>(CAST(length)Cast(length))),
4238 base::Optional<TNode<BInt>>(base::nullopt),
4239 ExtractFixedArrayFlag::kAllFixedArraysDontCopyCOW, nullptr,
4240 var_elements_kind.value());
4241 var_new_elements = new_elements;
4242 Goto(&allocate_jsarray);
4243
4244 if (need_conversion) {
4245 BIND(&holey_extract)Bind(&holey_extract);
4246 // Convert holes to undefined.
4247 TVARIABLE(BoolT, var_holes_converted, Int32FalseConstant())TVariable<BoolT> var_holes_converted(Int32FalseConstant
(), this)
;
4248 // Copy |array|'s elements store. The copy will be compatible with the
4249 // original elements kind unless there are holes in the source. Any holes
4250 // get converted to undefined, hence in that case the copy is compatible
4251 // only with PACKED_ELEMENTS and HOLEY_ELEMENTS, and we will choose
4252 // PACKED_ELEMENTS. Also, if we want to replace holes, we must not use
4253 // ExtractFixedArrayFlag::kDontCopyCOW.
4254 new_elements = ExtractFixedArray(
4255 LoadElements(array),
4256 base::Optional<TNode<BInt>>(IntPtrOrSmiConstant<BInt>(0)),
4257 base::Optional<TNode<BInt>>(TaggedToParameter<BInt>(CAST(length)Cast(length))),
4258 base::Optional<TNode<BInt>>(base::nullopt),
4259 ExtractFixedArrayFlag::kAllFixedArrays, &var_holes_converted);
4260 var_new_elements = new_elements;
4261 // If the array type didn't change, use the original elements kind.
4262 GotoIfNot(var_holes_converted.value(), &allocate_jsarray);
4263 // Otherwise use PACKED_ELEMENTS for the target's elements kind.
4264 var_elements_kind = Int32Constant(PACKED_ELEMENTS);
4265 Goto(&allocate_jsarray);
4266 }
4267
4268 BIND(&allocate_jsarray)Bind(&allocate_jsarray);
4269
4270 // Handle any nonextensible elements kinds
4271 CSA_DCHECK(this, IsElementsKindLessThanOrEqual(((void)0)
4272 var_elements_kind.value(),((void)0)
4273 LAST_ANY_NONEXTENSIBLE_ELEMENTS_KIND))((void)0);
4274 GotoIf(IsElementsKindLessThanOrEqual(var_elements_kind.value(),
4275 LAST_FAST_ELEMENTS_KIND),
4276 &allocate_jsarray_main);
4277 var_elements_kind = Int32Constant(PACKED_ELEMENTS);
4278 Goto(&allocate_jsarray_main);
4279
4280 BIND(&allocate_jsarray_main)Bind(&allocate_jsarray_main);
4281 // Use the cannonical map for the chosen elements kind.
4282 TNode<NativeContext> native_context = LoadNativeContext(context);
4283 TNode<Map> array_map =
4284 LoadJSArrayElementsMap(var_elements_kind.value(), native_context);
4285
4286 TNode<JSArray> result = AllocateJSArray(array_map, var_new_elements.value(),
4287 CAST(length)Cast(length), allocation_site);
4288 return result;
4289}
4290
4291template <typename TIndex>
4292TNode<FixedArrayBase> CodeStubAssembler::AllocateFixedArray(
4293 ElementsKind kind, TNode<TIndex> capacity, AllocationFlags flags,
4294 base::Optional<TNode<Map>> fixed_array_map) {
4295 static_assert(
4296 std::is_same<TIndex, Smi>::value || std::is_same<TIndex, IntPtrT>::value,
4297 "Only Smi or IntPtrT capacity is allowed");
4298 Comment("AllocateFixedArray");
4299 CSA_DCHECK(this,((void)0)
4300 IntPtrOrSmiGreaterThan(capacity, IntPtrOrSmiConstant<TIndex>(0)))((void)0);
4301
4302 const intptr_t kMaxLength = IsDoubleElementsKind(kind)
4303 ? FixedDoubleArray::kMaxLength
4304 : FixedArray::kMaxLength;
4305 intptr_t capacity_constant;
4306 if (ToParameterConstant(capacity, &capacity_constant)) {
4307 CHECK_LE(capacity_constant, kMaxLength)do { bool _cmp = ::v8::base::CmpLEImpl< typename ::v8::base
::pass_value_or_ref<decltype(capacity_constant)>::type,
typename ::v8::base::pass_value_or_ref<decltype(kMaxLength
)>::type>((capacity_constant), (kMaxLength)); do { if (
(__builtin_expect(!!(!(_cmp)), 0))) { V8_Fatal("Check failed: %s."
, "capacity_constant" " " "<=" " " "kMaxLength"); } } while
(false); } while (false)
;
4308 } else {
4309 Label if_out_of_memory(this, Label::kDeferred), next(this);
4310 Branch(IntPtrOrSmiGreaterThan(capacity, IntPtrOrSmiConstant<TIndex>(
4311 static_cast<int>(kMaxLength))),
4312 &if_out_of_memory, &next);
4313
4314 BIND(&if_out_of_memory)Bind(&if_out_of_memory);
4315 CallRuntime(Runtime::kFatalProcessOutOfMemoryInvalidArrayLength,
4316 NoContextConstant());
4317 Unreachable();
4318
4319 BIND(&next)Bind(&next);
4320 }
4321
4322 TNode<IntPtrT> total_size = GetFixedArrayAllocationSize(capacity, kind);
4323
4324 if (IsDoubleElementsKind(kind)) flags |= AllocationFlag::kDoubleAlignment;
4325 // Allocate both array and elements object, and initialize the JSArray.
4326 TNode<HeapObject> array = Allocate(total_size, flags);
4327 if (fixed_array_map) {
4328 // Conservatively only skip the write barrier if there are no allocation
4329 // flags, this ensures that the object hasn't ended up in LOS. Note that the
4330 // fixed array map is currently always immortal and technically wouldn't
4331 // need the write barrier even in LOS, but it's better to not take chances
4332 // in case this invariant changes later, since it's difficult to enforce
4333 // locally here.
4334 if (flags == AllocationFlag::kNone) {
4335 StoreMapNoWriteBarrier(array, *fixed_array_map);
4336 } else {
4337 StoreMap(array, *fixed_array_map);
4338 }
4339 } else {
4340 RootIndex map_index = IsDoubleElementsKind(kind)
4341 ? RootIndex::kFixedDoubleArrayMap
4342 : RootIndex::kFixedArrayMap;
4343 DCHECK(RootsTable::IsImmortalImmovable(map_index))((void) 0);
4344 StoreMapNoWriteBarrier(array, map_index);
4345 }
4346 StoreObjectFieldNoWriteBarrier(array, FixedArrayBase::kLengthOffset,
4347 ParameterToTagged(capacity));
4348 return UncheckedCast<FixedArrayBase>(array);
4349}
4350
4351// There is no need to export the Smi version since it is only used inside
4352// code-stub-assembler.
4353template V8_EXPORT_PRIVATE TNode<FixedArrayBase>
4354 CodeStubAssembler::AllocateFixedArray<IntPtrT>(ElementsKind, TNode<IntPtrT>,
4355 AllocationFlags,
4356 base::Optional<TNode<Map>>);
4357
4358template <typename TIndex>
4359TNode<FixedArray> CodeStubAssembler::ExtractToFixedArray(
4360 TNode<FixedArrayBase> source, TNode<TIndex> first, TNode<TIndex> count,
4361 TNode<TIndex> capacity, TNode<Map> source_map, ElementsKind from_kind,
4362 AllocationFlags allocation_flags, ExtractFixedArrayFlags extract_flags,
4363 HoleConversionMode convert_holes, TVariable<BoolT>* var_holes_converted,
4364 base::Optional<TNode<Int32T>> source_elements_kind) {
4365 static_assert(
4366 std::is_same<TIndex, Smi>::value || std::is_same<TIndex, IntPtrT>::value,
4367 "Only Smi or IntPtrT first, count, and capacity are allowed");
4368
4369 DCHECK(extract_flags & ExtractFixedArrayFlag::kFixedArrays)((void) 0);
4370 CSA_DCHECK(this,((void)0)
4371 IntPtrOrSmiNotEqual(IntPtrOrSmiConstant<TIndex>(0), capacity))((void)0);
4372 CSA_DCHECK(this, TaggedEqual(source_map, LoadMap(source)))((void)0);
4373
4374 TVARIABLE(FixedArrayBase, var_result)TVariable<FixedArrayBase> var_result(this);
4375 TVARIABLE(Map, var_target_map, source_map)TVariable<Map> var_target_map(source_map, this);
4376
4377 Label done(this, {&var_result}), is_cow(this),
4378 new_space_handler(this, {&var_target_map});
4379
4380 // If source_map is either FixedDoubleArrayMap, or FixedCOWArrayMap but
4381 // we can't just use COW, use FixedArrayMap as the target map. Otherwise, use
4382 // source_map as the target map.
4383 if (IsDoubleElementsKind(from_kind)) {
4384 CSA_DCHECK(this, IsFixedDoubleArrayMap(source_map))((void)0);
4385 var_target_map = FixedArrayMapConstant();
4386 Goto(&new_space_handler);
4387 } else {
4388 CSA_DCHECK(this, Word32BinaryNot(IsFixedDoubleArrayMap(source_map)))((void)0);
4389 Branch(TaggedEqual(var_target_map.value(), FixedCOWArrayMapConstant()),
4390 &is_cow, &new_space_handler);
4391
4392 BIND(&is_cow)Bind(&is_cow);
4393 {
4394 // |source| is a COW array, so we don't actually need to allocate a new
4395 // array unless:
4396 // 1) |extract_flags| forces us to, or
4397 // 2) we're asked to extract only part of the |source| (|first| != 0).
4398 if (extract_flags & ExtractFixedArrayFlag::kDontCopyCOW) {
4399 Branch(IntPtrOrSmiNotEqual(IntPtrOrSmiConstant<TIndex>(0), first),
4400 &new_space_handler, [&] {
4401 var_result = source;
4402 Goto(&done);
4403 });
4404 } else {
4405 var_target_map = FixedArrayMapConstant();
4406 Goto(&new_space_handler);
4407 }
4408 }
4409 }
4410
4411 BIND(&new_space_handler)Bind(&new_space_handler);
4412 {
4413 Comment("Copy FixedArray in young generation");
4414 // We use PACKED_ELEMENTS to tell AllocateFixedArray and
4415 // CopyFixedArrayElements that we want a FixedArray.
4416 const ElementsKind to_kind = PACKED_ELEMENTS;
4417 TNode<FixedArrayBase> to_elements = AllocateFixedArray(
4418 to_kind, capacity, allocation_flags, var_target_map.value());
4419 var_result = to_elements;
4420
4421#ifndef V8_ENABLE_SINGLE_GENERATION
4422#ifdef DEBUG
4423 TNode<IntPtrT> object_word = BitcastTaggedToWord(to_elements);
4424 TNode<IntPtrT> object_page = PageFromAddress(object_word);
4425 TNode<IntPtrT> page_flags =
4426 Load<IntPtrT>(object_page, IntPtrConstant(Page::kFlagsOffset));
4427 CSA_DCHECK(((void)0)
4428 this,((void)0)
4429 WordNotEqual(((void)0)
4430 WordAnd(page_flags,((void)0)
4431 IntPtrConstant(MemoryChunk::kIsInYoungGenerationMask)),((void)0)
4432 IntPtrConstant(0)))((void)0);
4433#endif
4434#endif
4435
4436 if (convert_holes == HoleConversionMode::kDontConvert &&
4437 !IsDoubleElementsKind(from_kind)) {
4438 // We can use CopyElements (memcpy) because we don't need to replace or
4439 // convert any values. Since {to_elements} is in new-space, CopyElements
4440 // will efficiently use memcpy.
4441 FillFixedArrayWithValue(to_kind, to_elements, count, capacity,
4442 RootIndex::kTheHoleValue);
4443 CopyElements(to_kind, to_elements, IntPtrConstant(0), source,
4444 ParameterToIntPtr(first), ParameterToIntPtr(count),
4445 SKIP_WRITE_BARRIER);
4446 } else {
4447 CopyFixedArrayElements(from_kind, source, to_kind, to_elements, first,
4448 count, capacity, SKIP_WRITE_BARRIER, convert_holes,
4449 var_holes_converted);
4450 }
4451 Goto(&done);
4452 }
4453
4454 BIND(&done)Bind(&done);
4455 return UncheckedCast<FixedArray>(var_result.value());
4456}
4457
4458template <typename TIndex>
4459TNode<FixedArrayBase> CodeStubAssembler::ExtractFixedDoubleArrayFillingHoles(
4460 TNode<FixedArrayBase> from_array, TNode<TIndex> first, TNode<TIndex> count,
4461 TNode<TIndex> capacity, TNode<Map> fixed_array_map,
4462 TVariable<BoolT>* var_holes_converted, AllocationFlags allocation_flags,
4463 ExtractFixedArrayFlags extract_flags) {
4464 static_assert(
4465 std::is_same<TIndex, Smi>::value || std::is_same<TIndex, IntPtrT>::value,
4466 "Only Smi or IntPtrT first, count, and capacity are allowed");
4467
4468 DCHECK_NE(var_holes_converted, nullptr)((void) 0);
4469 CSA_DCHECK(this, IsFixedDoubleArrayMap(fixed_array_map))((void)0);
4470
4471 TVARIABLE(FixedArrayBase, var_result)TVariable<FixedArrayBase> var_result(this);
4472 const ElementsKind kind = PACKED_DOUBLE_ELEMENTS;
4473 TNode<FixedArrayBase> to_elements =
4474 AllocateFixedArray(kind, capacity, allocation_flags, fixed_array_map);
4475 var_result = to_elements;
4476 // We first try to copy the FixedDoubleArray to a new FixedDoubleArray.
4477 // |var_holes_converted| is set to False preliminarily.
4478 *var_holes_converted = Int32FalseConstant();
4479
4480 // The construction of the loop and the offsets for double elements is
4481 // extracted from CopyFixedArrayElements.
4482 CSA_SLOW_DCHECK(this, IsFixedArrayWithKindOrEmpty(from_array, kind))((void)0);
4483 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize)static_assert(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize
, "FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize")
;
4484
4485 Comment("[ ExtractFixedDoubleArrayFillingHoles");
4486
4487 // This copy can trigger GC, so we pre-initialize the array with holes.
4488 FillFixedArrayWithValue(kind, to_elements, IntPtrOrSmiConstant<TIndex>(0),
4489 capacity, RootIndex::kTheHoleValue);
4490
4491 const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
4492 TNode<IntPtrT> first_from_element_offset =
4493 ElementOffsetFromIndex(first, kind, 0);
4494 TNode<IntPtrT> limit_offset = IntPtrAdd(first_from_element_offset,
4495 IntPtrConstant(first_element_offset));
4496 TVARIABLE(IntPtrT, var_from_offset,TVariable<IntPtrT> var_from_offset(ElementOffsetFromIndex
(IntPtrOrSmiAdd(first, count), kind, first_element_offset), this
)
4497 ElementOffsetFromIndex(IntPtrOrSmiAdd(first, count), kind,TVariable<IntPtrT> var_from_offset(ElementOffsetFromIndex
(IntPtrOrSmiAdd(first, count), kind, first_element_offset), this
)
4498 first_element_offset))TVariable<IntPtrT> var_from_offset(ElementOffsetFromIndex
(IntPtrOrSmiAdd(first, count), kind, first_element_offset), this
)
;
4499
4500 Label decrement(this, {&var_from_offset}), done(this);
4501 TNode<IntPtrT> to_array_adjusted =
4502 IntPtrSub(BitcastTaggedToWord(to_elements), first_from_element_offset);
4503
4504 Branch(WordEqual(var_from_offset.value(), limit_offset), &done, &decrement);
4505
4506 BIND(&decrement)Bind(&decrement);
4507 {
4508 TNode<IntPtrT> from_offset =
4509 IntPtrSub(var_from_offset.value(), IntPtrConstant(kDoubleSize));
4510 var_from_offset = from_offset;
4511
4512 TNode<IntPtrT> to_offset = from_offset;
4513
4514 Label if_hole(this);
4515
4516 TNode<Float64T> value = LoadDoubleWithHoleCheck(
4517 from_array, var_from_offset.value(), &if_hole, MachineType::Float64());
4518
4519 StoreNoWriteBarrier(MachineRepresentation::kFloat64, to_array_adjusted,
4520 to_offset, value);
4521
4522 TNode<BoolT> compare = WordNotEqual(from_offset, limit_offset);
4523 Branch(compare, &decrement, &done);
4524
4525 BIND(&if_hole)Bind(&if_hole);
4526 // We are unlucky: there are holes! We need to restart the copy, this time
4527 // we will copy the FixedDoubleArray to a new FixedArray with undefined
4528 // replacing holes. We signal this to the caller through
4529 // |var_holes_converted|.
4530 *var_holes_converted = Int32TrueConstant();
4531 to_elements =
4532 ExtractToFixedArray(from_array, first, count, capacity, fixed_array_map,
4533 kind, allocation_flags, extract_flags,
4534 HoleConversionMode::kConvertToUndefined);
4535 var_result = to_elements;
4536 Goto(&done);
4537 }
4538
4539 BIND(&done)Bind(&done);
4540 Comment("] ExtractFixedDoubleArrayFillingHoles");
4541 return var_result.value();
4542}
4543
4544template <typename TIndex>
4545TNode<FixedArrayBase> CodeStubAssembler::ExtractFixedArray(
4546 TNode<FixedArrayBase> source, base::Optional<TNode<TIndex>> first,
4547 base::Optional<TNode<TIndex>> count, base::Optional<TNode<TIndex>> capacity,
4548 ExtractFixedArrayFlags extract_flags, TVariable<BoolT>* var_holes_converted,
4549 base::Optional<TNode<Int32T>> source_elements_kind) {
4550 static_assert(
4551 std::is_same<TIndex, Smi>::value || std::is_same<TIndex, IntPtrT>::value,
4552 "Only Smi or IntPtrT first, count, and capacity are allowed");
4553 DCHECK(extract_flags & ExtractFixedArrayFlag::kFixedArrays ||((void) 0)
4554 extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays)((void) 0);
4555 // If we want to replace holes, ExtractFixedArrayFlag::kDontCopyCOW should
4556 // not be used, because that disables the iteration which detects holes.
4557 DCHECK_IMPLIES(var_holes_converted != nullptr,((void) 0)
4558 !(extract_flags & ExtractFixedArrayFlag::kDontCopyCOW))((void) 0);
4559 HoleConversionMode convert_holes =
4560 var_holes_converted != nullptr ? HoleConversionMode::kConvertToUndefined
4561 : HoleConversionMode::kDontConvert;
4562 TVARIABLE(FixedArrayBase, var_result)TVariable<FixedArrayBase> var_result(this);
4563 auto allocation_flags = AllocationFlag::kAllowLargeObjectAllocation;
4564 if (!first) {
4565 first = IntPtrOrSmiConstant<TIndex>(0);
4566 }
4567 if (!count) {
4568 count = IntPtrOrSmiSub(
4569 TaggedToParameter<TIndex>(LoadFixedArrayBaseLength(source)), *first);
4570
4571 CSA_DCHECK(this, IntPtrOrSmiLessThanOrEqual(IntPtrOrSmiConstant<TIndex>(0),((void)0)
4572 *count))((void)0);
4573 }
4574 if (!capacity) {
4575 capacity = *count;
4576 } else {
4577 CSA_DCHECK(this, Word32BinaryNot(IntPtrOrSmiGreaterThan(((void)0)
4578 IntPtrOrSmiAdd(*first, *count), *capacity)))((void)0);
4579 }
4580
4581 Label if_fixed_double_array(this), empty(this), done(this, &var_result);
4582 TNode<Map> source_map = LoadMap(source);
4583 GotoIf(IntPtrOrSmiEqual(IntPtrOrSmiConstant<TIndex>(0), *capacity), &empty);
4584
4585 if (extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays) {
4586 if (extract_flags & ExtractFixedArrayFlag::kFixedArrays) {
4587 GotoIf(IsFixedDoubleArrayMap(source_map), &if_fixed_double_array);
4588 } else {
4589 CSA_DCHECK(this, IsFixedDoubleArrayMap(source_map))((void)0);
4590 }
4591 }
4592
4593 if (extract_flags & ExtractFixedArrayFlag::kFixedArrays) {
4594 // Here we can only get |source| as FixedArray, never FixedDoubleArray.
4595 // PACKED_ELEMENTS is used to signify that the source is a FixedArray.
4596 TNode<FixedArray> to_elements = ExtractToFixedArray(
4597 source, *first, *count, *capacity, source_map, PACKED_ELEMENTS,
4598 allocation_flags, extract_flags, convert_holes, var_holes_converted,
4599 source_elements_kind);
4600 var_result = to_elements;
4601 Goto(&done);
4602 }
4603
4604 if (extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays) {
4605 BIND(&if_fixed_double_array)Bind(&if_fixed_double_array);
4606 Comment("Copy FixedDoubleArray");
4607
4608 if (convert_holes == HoleConversionMode::kConvertToUndefined) {
4609 TNode<FixedArrayBase> to_elements = ExtractFixedDoubleArrayFillingHoles(
4610 source, *first, *count, *capacity, source_map, var_holes_converted,
4611 allocation_flags, extract_flags);
4612 var_result = to_elements;
4613 } else {
4614 // We use PACKED_DOUBLE_ELEMENTS to signify that both the source and
4615 // the target are FixedDoubleArray. That it is PACKED or HOLEY does not
4616 // matter.
4617 ElementsKind kind = PACKED_DOUBLE_ELEMENTS;
4618 TNode<FixedArrayBase> to_elements =
4619 AllocateFixedArray(kind, *capacity, allocation_flags, source_map);
4620 FillFixedArrayWithValue(kind, to_elements, *count, *capacity,
4621 RootIndex::kTheHoleValue);
4622 CopyElements(kind, to_elements, IntPtrConstant(0), source,
4623 ParameterToIntPtr(*first), ParameterToIntPtr(*count));
4624 var_result = to_elements;
4625 }
4626
4627 Goto(&done);
4628 }
4629
4630 BIND(&empty)Bind(&empty);
4631 {
4632 Comment("Copy empty array");
4633
4634 var_result = EmptyFixedArrayConstant();
4635 Goto(&done);
4636 }
4637
4638 BIND(&done)Bind(&done);
4639 return var_result.value();
4640}
4641
4642template V8_EXPORT_PRIVATE TNode<FixedArrayBase>
4643CodeStubAssembler::ExtractFixedArray<Smi>(
4644 TNode<FixedArrayBase>, base::Optional<TNode<Smi>>,
4645 base::Optional<TNode<Smi>>, base::Optional<TNode<Smi>>,
4646 ExtractFixedArrayFlags, TVariable<BoolT>*, base::Optional<TNode<Int32T>>);
4647
4648template V8_EXPORT_PRIVATE TNode<FixedArrayBase>
4649CodeStubAssembler::ExtractFixedArray<IntPtrT>(
4650 TNode<FixedArrayBase>, base::Optional<TNode<IntPtrT>>,
4651 base::Optional<TNode<IntPtrT>>, base::Optional<TNode<IntPtrT>>,
4652 ExtractFixedArrayFlags, TVariable<BoolT>*, base::Optional<TNode<Int32T>>);
4653
4654void CodeStubAssembler::InitializePropertyArrayLength(
4655 TNode<PropertyArray> property_array, TNode<IntPtrT> length) {
4656 CSA_DCHECK(this, IntPtrGreaterThan(length, IntPtrConstant(0)))((void)0);
4657 CSA_DCHECK(this,((void)0)
4658 IntPtrLessThanOrEqual(((void)0)
4659 length, IntPtrConstant(PropertyArray::LengthField::kMax)))((void)0);
4660 StoreObjectFieldNoWriteBarrier(
4661 property_array, PropertyArray::kLengthAndHashOffset, SmiTag(length));
4662}
4663
4664TNode<PropertyArray> CodeStubAssembler::AllocatePropertyArray(
4665 TNode<IntPtrT> capacity) {
4666 CSA_DCHECK(this, IntPtrGreaterThan(capacity, IntPtrConstant(0)))((void)0);
4667 TNode<IntPtrT> total_size = GetPropertyArrayAllocationSize(capacity);
4668
4669 TNode<HeapObject> array = Allocate(total_size, AllocationFlag::kNone);
4670 RootIndex map_index = RootIndex::kPropertyArrayMap;
4671 DCHECK(RootsTable::IsImmortalImmovable(map_index))((void) 0);
4672 StoreMapNoWriteBarrier(array, map_index);
4673 TNode<PropertyArray> property_array = CAST(array)Cast(array);
4674 InitializePropertyArrayLength(property_array, capacity);
4675 return property_array;
4676}
4677
4678void CodeStubAssembler::FillPropertyArrayWithUndefined(
4679 TNode<PropertyArray> array, TNode<IntPtrT> from_index,
4680 TNode<IntPtrT> to_index) {
4681 ElementsKind kind = PACKED_ELEMENTS;
4682 TNode<Oddball> value = UndefinedConstant();
4683 BuildFastArrayForEach(
4684 array, kind, from_index, to_index,
4685 [this, value](TNode<HeapObject> array, TNode<IntPtrT> offset) {
4686 StoreNoWriteBarrier(MachineRepresentation::kTagged, array, offset,
4687 value);
4688 });
4689}
4690
4691template <typename TIndex>
4692void CodeStubAssembler::FillFixedArrayWithValue(ElementsKind kind,
4693 TNode<FixedArrayBase> array,
4694 TNode<TIndex> from_index,
4695 TNode<TIndex> to_index,
4696 RootIndex value_root_index) {
4697 static_assert(
4698 std::is_same<TIndex, Smi>::value || std::is_same<TIndex, IntPtrT>::value,
4699 "Only Smi or IntPtrT from and to are allowed");
4700 CSA_SLOW_DCHECK(this, IsFixedArrayWithKind(array, kind))((void)0);
4701 DCHECK(value_root_index == RootIndex::kTheHoleValue ||((void) 0)
4702 value_root_index == RootIndex::kUndefinedValue)((void) 0);
4703
4704 // Determine the value to initialize the {array} based
4705 // on the {value_root_index} and the elements {kind}.
4706 TNode<Object> value = LoadRoot(value_root_index);
4707 TNode<Float64T> float_value;
4708 if (IsDoubleElementsKind(kind)) {
4709 float_value = LoadHeapNumberValue(CAST(value)Cast(value));
4710 }
4711
4712 BuildFastArrayForEach(
4713 array, kind, from_index, to_index,
4714 [this, value, float_value, kind](TNode<HeapObject> array,
4715 TNode<IntPtrT> offset) {
4716 if (IsDoubleElementsKind(kind)) {
4717 StoreNoWriteBarrier(MachineRepresentation::kFloat64, array, offset,
4718 float_value);
4719 } else {
4720 StoreNoWriteBarrier(MachineRepresentation::kTagged, array, offset,
4721 value);
4722 }
4723 });
4724}
4725
4726template V8_EXPORT_PRIVATE void
4727 CodeStubAssembler::FillFixedArrayWithValue<IntPtrT>(ElementsKind,
4728 TNode<FixedArrayBase>,
4729 TNode<IntPtrT>,
4730 TNode<IntPtrT>,
4731 RootIndex);
4732template V8_EXPORT_PRIVATE void CodeStubAssembler::FillFixedArrayWithValue<Smi>(
4733 ElementsKind, TNode<FixedArrayBase>, TNode<Smi>, TNode<Smi>, RootIndex);
4734
4735void CodeStubAssembler::StoreDoubleHole(TNode<HeapObject> object,
4736 TNode<IntPtrT> offset) {
4737 TNode<UintPtrT> double_hole =
4738 Is64() ? ReinterpretCast<UintPtrT>(Int64Constant(kHoleNanInt64))
4739 : ReinterpretCast<UintPtrT>(Int32Constant(kHoleNanLower32));
4740 // TODO(danno): When we have a Float32/Float64 wrapper class that
4741 // preserves double bits during manipulation, remove this code/change
4742 // this to an indexed Float64 store.
4743 if (Is64()) {
4744 StoreNoWriteBarrier(MachineRepresentation::kWord64, object, offset,
4745 double_hole);
4746 } else {
4747 StoreNoWriteBarrier(MachineRepresentation::kWord32, object, offset,
4748 double_hole);
4749 StoreNoWriteBarrier(MachineRepresentation::kWord32, object,
4750 IntPtrAdd(offset, IntPtrConstant(kInt32Size)),
4751 double_hole);
4752 }
4753}
4754
4755void CodeStubAssembler::StoreFixedDoubleArrayHole(TNode<FixedDoubleArray> array,
4756 TNode<IntPtrT> index) {
4757 TNode<IntPtrT> offset = ElementOffsetFromIndex(
4758 index, PACKED_DOUBLE_ELEMENTS, FixedArray::kHeaderSize - kHeapObjectTag);
4759 CSA_DCHECK(this, IsOffsetInBounds(((void)0)
4760 offset, LoadAndUntagFixedArrayBaseLength(array),((void)0)
4761 FixedDoubleArray::kHeaderSize, PACKED_DOUBLE_ELEMENTS))((void)0);
4762 StoreDoubleHole(array, offset);
4763}
4764
4765void CodeStubAssembler::FillFixedArrayWithSmiZero(TNode<FixedArray> array,
4766 TNode<IntPtrT> length) {
4767 CSA_DCHECK(this, WordEqual(length, LoadAndUntagFixedArrayBaseLength(array)))((void)0);
4768
4769 TNode<IntPtrT> byte_length = TimesTaggedSize(length);
4770 CSA_DCHECK(this, UintPtrLessThan(length, byte_length))((void)0);
4771
4772 static const int32_t fa_base_data_offset =
4773 FixedArray::kHeaderSize - kHeapObjectTag;
4774 TNode<IntPtrT> backing_store = IntPtrAdd(BitcastTaggedToWord(array),
4775 IntPtrConstant(fa_base_data_offset));
4776
4777 // Call out to memset to perform initialization.
4778 TNode<ExternalReference> memset =
4779 ExternalConstant(ExternalReference::libc_memset_function());
4780 STATIC_ASSERT(kSizetSize == kIntptrSize)static_assert(kSizetSize == kIntptrSize, "kSizetSize == kIntptrSize"
)
;
4781 CallCFunction(memset, MachineType::Pointer(),
4782 std::make_pair(MachineType::Pointer(), backing_store),
4783 std::make_pair(MachineType::IntPtr(), IntPtrConstant(0)),
4784 std::make_pair(MachineType::UintPtr(), byte_length));
4785}
4786
4787void CodeStubAssembler::FillFixedDoubleArrayWithZero(
4788 TNode<FixedDoubleArray> array, TNode<IntPtrT> length) {
4789 CSA_DCHECK(this, WordEqual(length, LoadAndUntagFixedArrayBaseLength(array)))((void)0);
4790
4791 TNode<IntPtrT> byte_length = TimesDoubleSize(length);
4792 CSA_DCHECK(this, UintPtrLessThan(length, byte_length))((void)0);
4793
4794 static const int32_t fa_base_data_offset =
4795 FixedDoubleArray::kHeaderSize - kHeapObjectTag;
4796 TNode<IntPtrT> backing_store = IntPtrAdd(BitcastTaggedToWord(array),
4797 IntPtrConstant(fa_base_data_offset));
4798
4799 // Call out to memset to perform initialization.
4800 TNode<ExternalReference> memset =
4801 ExternalConstant(ExternalReference::libc_memset_function());
4802 STATIC_ASSERT(kSizetSize == kIntptrSize)static_assert(kSizetSize == kIntptrSize, "kSizetSize == kIntptrSize"
)
;
4803 CallCFunction(memset, MachineType::Pointer(),
4804 std::make_pair(MachineType::Pointer(), backing_store),
4805 std::make_pair(MachineType::IntPtr(), IntPtrConstant(0)),
4806 std::make_pair(MachineType::UintPtr(), byte_length));
4807}
4808
4809void CodeStubAssembler::JumpIfPointersFromHereAreInteresting(
4810 TNode<Object> object, Label* interesting) {
4811 Label finished(this);
4812 TNode<IntPtrT> object_word = BitcastTaggedToWord(object);
4813 TNode<IntPtrT> object_page = PageFromAddress(object_word);
4814 TNode<IntPtrT> page_flags = UncheckedCast<IntPtrT>(Load(
4815 MachineType::IntPtr(), object_page, IntPtrConstant(Page::kFlagsOffset)));
4816 Branch(
4817 WordEqual(WordAnd(page_flags,
4818 IntPtrConstant(
4819 MemoryChunk::kPointersFromHereAreInterestingMask)),
4820 IntPtrConstant(0)),
4821 &finished, interesting);
4822 BIND(&finished)Bind(&finished);
4823}
4824
4825void CodeStubAssembler::MoveElements(ElementsKind kind,
4826 TNode<FixedArrayBase> elements,
4827 TNode<IntPtrT> dst_index,
4828 TNode<IntPtrT> src_index,
4829 TNode<IntPtrT> length) {
4830 Label finished(this);
4831 Label needs_barrier(this);
4832#ifdef V8_DISABLE_WRITE_BARRIERS
4833 const bool needs_barrier_check = false;
4834#else
4835 const bool needs_barrier_check = !IsDoubleElementsKind(kind);
4836#endif // V8_DISABLE_WRITE_BARRIERS
4837
4838 DCHECK(IsFastElementsKind(kind))((void) 0);
4839 CSA_DCHECK(this, IsFixedArrayWithKind(elements, kind))((void)0);
4840 CSA_DCHECK(this,((void)0)
4841 IntPtrLessThanOrEqual(IntPtrAdd(dst_index, length),((void)0)
4842 LoadAndUntagFixedArrayBaseLength(elements)))((void)0);
4843 CSA_DCHECK(this,((void)0)
4844 IntPtrLessThanOrEqual(IntPtrAdd(src_index, length),((void)0)
4845 LoadAndUntagFixedArrayBaseLength(elements)))((void)0);
4846
4847 // The write barrier can be ignored if {dst_elements} is in new space, or if
4848 // the elements pointer is FixedDoubleArray.
4849 if (needs_barrier_check) {
4850 JumpIfPointersFromHereAreInteresting(elements, &needs_barrier);
4851 }
4852
4853 const TNode<IntPtrT> source_byte_length =
4854 IntPtrMul(length, IntPtrConstant(ElementsKindToByteSize(kind)));
4855 static const int32_t fa_base_data_offset =
4856 FixedArrayBase::kHeaderSize - kHeapObjectTag;
4857 TNode<IntPtrT> elements_intptr = BitcastTaggedToWord(elements);
4858 TNode<IntPtrT> target_data_ptr =
4859 IntPtrAdd(elements_intptr,
4860 ElementOffsetFromIndex(dst_index, kind, fa_base_data_offset));
4861 TNode<IntPtrT> source_data_ptr =
4862 IntPtrAdd(elements_intptr,
4863 ElementOffsetFromIndex(src_index, kind, fa_base_data_offset));
4864 TNode<ExternalReference> memmove =
4865 ExternalConstant(ExternalReference::libc_memmove_function());
4866 CallCFunction(memmove, MachineType::Pointer(),
4867 std::make_pair(MachineType::Pointer(), target_data_ptr),
4868 std::make_pair(MachineType::Pointer(), source_data_ptr),
4869 std::make_pair(MachineType::UintPtr(), source_byte_length));
4870
4871 if (needs_barrier_check) {
4872 Goto(&finished);
4873
4874 BIND(&needs_barrier)Bind(&needs_barrier);
4875 {
4876 const TNode<IntPtrT> begin = src_index;
4877 const TNode<IntPtrT> end = IntPtrAdd(begin, length);
4878
4879 // If dst_index is less than src_index, then walk forward.
4880 const TNode<IntPtrT> delta =
4881 IntPtrMul(IntPtrSub(dst_index, begin),
4882 IntPtrConstant(ElementsKindToByteSize(kind)));
4883 auto loop_body = [&](TNode<HeapObject> array, TNode<IntPtrT> offset) {
4884 const TNode<AnyTaggedT> element = Load<AnyTaggedT>(array, offset);
4885 const TNode<WordT> delta_offset = IntPtrAdd(offset, delta);
4886 Store(array, delta_offset, element);
4887 };
4888
4889 Label iterate_forward(this);
4890 Label iterate_backward(this);
4891 Branch(IntPtrLessThan(delta, IntPtrConstant(0)), &iterate_forward,
4892 &iterate_backward);
4893 BIND(&iterate_forward)Bind(&iterate_forward);
4894 {
4895 // Make a loop for the stores.
4896 BuildFastArrayForEach(elements, kind, begin, end, loop_body,
4897 ForEachDirection::kForward);
4898 Goto(&finished);
4899 }
4900
4901 BIND(&iterate_backward)Bind(&iterate_backward);
4902 {
4903 BuildFastArrayForEach(elements, kind, begin, end, loop_body,
4904 ForEachDirection::kReverse);
4905 Goto(&finished);
4906 }
4907 }
4908 BIND(&finished)Bind(&finished);
4909 }
4910}
4911
4912void CodeStubAssembler::CopyElements(ElementsKind kind,
4913 TNode<FixedArrayBase> dst_elements,
4914 TNode<IntPtrT> dst_index,
4915 TNode<FixedArrayBase> src_elements,
4916 TNode<IntPtrT> src_index,
4917 TNode<IntPtrT> length,
4918 WriteBarrierMode write_barrier) {
4919 Label finished(this);
4920 Label needs_barrier(this);
4921#ifdef V8_DISABLE_WRITE_BARRIERS
4922 const bool needs_barrier_check = false;
4923#else
4924 const bool needs_barrier_check = !IsDoubleElementsKind(kind);
4925#endif // V8_DISABLE_WRITE_BARRIERS
4926
4927 DCHECK(IsFastElementsKind(kind))((void) 0);
4928 CSA_DCHECK(this, IsFixedArrayWithKind(dst_elements, kind))((void)0);
4929 CSA_DCHECK(this, IsFixedArrayWithKind(src_elements, kind))((void)0);
4930 CSA_DCHECK(this, IntPtrLessThanOrEqual(((void)0)
4931 IntPtrAdd(dst_index, length),((void)0)
4932 LoadAndUntagFixedArrayBaseLength(dst_elements)))((void)0);
4933 CSA_DCHECK(this, IntPtrLessThanOrEqual(((void)0)
4934 IntPtrAdd(src_index, length),((void)0)
4935 LoadAndUntagFixedArrayBaseLength(src_elements)))((void)0);
4936 CSA_DCHECK(this, Word32Or(TaggedNotEqual(dst_elements, src_elements),((void)0)
4937 IntPtrEqual(length, IntPtrConstant(0))))((void)0);
4938
4939 // The write barrier can be ignored if {dst_elements} is in new space, or if
4940 // the elements pointer is FixedDoubleArray.
4941 if (needs_barrier_check) {
4942 JumpIfPointersFromHereAreInteresting(dst_elements, &needs_barrier);
4943 }
4944
4945 TNode<IntPtrT> source_byte_length =
4946 IntPtrMul(length, IntPtrConstant(ElementsKindToByteSize(kind)));
4947 static const int32_t fa_base_data_offset =
4948 FixedArrayBase::kHeaderSize - kHeapObjectTag;
4949 TNode<IntPtrT> src_offset_start =
4950 ElementOffsetFromIndex(src_index, kind, fa_base_data_offset);
4951 TNode<IntPtrT> dst_offset_start =
4952 ElementOffsetFromIndex(dst_index, kind, fa_base_data_offset);
4953 TNode<IntPtrT> src_elements_intptr = BitcastTaggedToWord(src_elements);
4954 TNode<IntPtrT> source_data_ptr =
4955 IntPtrAdd(src_elements_intptr, src_offset_start);
4956 TNode<IntPtrT> dst_elements_intptr = BitcastTaggedToWord(dst_elements);
4957 TNode<IntPtrT> dst_data_ptr =
4958 IntPtrAdd(dst_elements_intptr, dst_offset_start);
4959 TNode<ExternalReference> memcpy =
4960 ExternalConstant(ExternalReference::libc_memcpy_function());
4961 CallCFunction(memcpy, MachineType::Pointer(),
4962 std::make_pair(MachineType::Pointer(), dst_data_ptr),
4963 std::make_pair(MachineType::Pointer(), source_data_ptr),
4964 std::make_pair(MachineType::UintPtr(), source_byte_length));
4965
4966 if (needs_barrier_check) {
4967 Goto(&finished);
4968
4969 BIND(&needs_barrier)Bind(&needs_barrier);
4970 {
4971 const TNode<IntPtrT> begin = src_index;
4972 const TNode<IntPtrT> end = IntPtrAdd(begin, length);
4973 const TNode<IntPtrT> delta =
4974 IntPtrMul(IntPtrSub(dst_index, src_index),
4975 IntPtrConstant(ElementsKindToByteSize(kind)));
4976 BuildFastArrayForEach(
4977 src_elements, kind, begin, end,
4978 [&](TNode<HeapObject> array, TNode<IntPtrT> offset) {
4979 const TNode<AnyTaggedT> element = Load<AnyTaggedT>(array, offset);
4980 const TNode<WordT> delta_offset = IntPtrAdd(offset, delta);
4981 if (write_barrier == SKIP_WRITE_BARRIER) {
4982 StoreNoWriteBarrier(MachineRepresentation::kTagged, dst_elements,
4983 delta_offset, element);
4984 } else {
4985 Store(dst_elements, delta_offset, element);
4986 }
4987 },
4988 ForEachDirection::kForward);
4989 Goto(&finished);
4990 }
4991 BIND(&finished)Bind(&finished);
4992 }
4993}
4994
4995template <typename TIndex>
4996void CodeStubAssembler::CopyFixedArrayElements(
4997 ElementsKind from_kind, TNode<FixedArrayBase> from_array,
4998 ElementsKind to_kind, TNode<FixedArrayBase> to_array,
4999 TNode<TIndex> first_element, TNode<TIndex> element_count,
5000 TNode<TIndex> capacity, WriteBarrierMode barrier_mode,
5001 HoleConversionMode convert_holes, TVariable<BoolT>* var_holes_converted) {
5002 DCHECK_IMPLIES(var_holes_converted != nullptr,((void) 0)
5003 convert_holes == HoleConversionMode::kConvertToUndefined)((void) 0);
5004 CSA_SLOW_DCHECK(this, IsFixedArrayWithKindOrEmpty(from_array, from_kind))((void)0);
5005 CSA_SLOW_DCHECK(this, IsFixedArrayWithKindOrEmpty(to_array, to_kind))((void)0);
5006 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize)static_assert(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize
, "FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize")
;
5007 static_assert(
5008 std::is_same<TIndex, Smi>::value || std::is_same<TIndex, IntPtrT>::value,
5009 "Only Smi or IntPtrT indices are allowed");
5010
5011 const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
5012 Comment("[ CopyFixedArrayElements");
5013
5014 // Typed array elements are not supported.
5015 DCHECK(!IsTypedArrayElementsKind(from_kind))((void) 0);
5016 DCHECK(!IsTypedArrayElementsKind(to_kind))((void) 0);
5017
5018 Label done(this);
5019 bool from_double_elements = IsDoubleElementsKind(from_kind);
5020 bool to_double_elements = IsDoubleElementsKind(to_kind);
5021 bool doubles_to_objects_conversion =
5022 IsDoubleElementsKind(from_kind) && IsObjectElementsKind(to_kind);
5023 bool needs_write_barrier =
5024 doubles_to_objects_conversion ||
5025 (barrier_mode == UPDATE_WRITE_BARRIER && IsObjectElementsKind(to_kind));
5026 bool element_offset_matches =
5027 !needs_write_barrier &&
5028 (kTaggedSize == kDoubleSize ||
5029 IsDoubleElementsKind(from_kind) == IsDoubleElementsKind(to_kind));
5030 TNode<UintPtrT> double_hole =
5031 Is64() ? ReinterpretCast<UintPtrT>(Int64Constant(kHoleNanInt64))
5032 : ReinterpretCast<UintPtrT>(Int32Constant(kHoleNanLower32));
5033
5034 // If copying might trigger a GC, we pre-initialize the FixedArray such that
5035 // it's always in a consistent state.
5036 if (convert_holes == HoleConversionMode::kConvertToUndefined) {
5037 DCHECK(IsObjectElementsKind(to_kind))((void) 0);
5038 // Use undefined for the part that we copy and holes for the rest.
5039 // Later if we run into a hole in the source we can just skip the writing
5040 // to the target and are still guaranteed that we get an undefined.
5041 FillFixedArrayWithValue(to_kind, to_array, IntPtrOrSmiConstant<TIndex>(0),
5042 element_count, RootIndex::kUndefinedValue);
5043 FillFixedArrayWithValue(to_kind, to_array, element_count, capacity,
5044 RootIndex::kTheHoleValue);
5045 } else if (doubles_to_objects_conversion) {
5046 // Pre-initialized the target with holes so later if we run into a hole in
5047 // the source we can just skip the writing to the target.
5048 FillFixedArrayWithValue(to_kind, to_array, IntPtrOrSmiConstant<TIndex>(0),
5049 capacity, RootIndex::kTheHoleValue);
5050 } else if (element_count != capacity) {
5051 FillFixedArrayWithValue(to_kind, to_array, element_count, capacity,
5052 RootIndex::kTheHoleValue);
5053 }
5054
5055 TNode<IntPtrT> first_from_element_offset =
5056 ElementOffsetFromIndex(first_element, from_kind, 0);
5057 TNode<IntPtrT> limit_offset = Signed(IntPtrAdd(
5058 first_from_element_offset, IntPtrConstant(first_element_offset)));
5059 TVARIABLE(IntPtrT, var_from_offset,TVariable<IntPtrT> var_from_offset(ElementOffsetFromIndex
(IntPtrOrSmiAdd(first_element, element_count), from_kind, first_element_offset
), this)
5060 ElementOffsetFromIndex(IntPtrOrSmiAdd(first_element, element_count),TVariable<IntPtrT> var_from_offset(ElementOffsetFromIndex
(IntPtrOrSmiAdd(first_element, element_count), from_kind, first_element_offset
), this)
5061 from_kind, first_element_offset))TVariable<IntPtrT> var_from_offset(ElementOffsetFromIndex
(IntPtrOrSmiAdd(first_element, element_count), from_kind, first_element_offset
), this)
;
5062 // This second variable is used only when the element sizes of source and
5063 // destination arrays do not match.
5064 TVARIABLE(IntPtrT, var_to_offset)TVariable<IntPtrT> var_to_offset(this);
5065 if (element_offset_matches) {
5066 var_to_offset = var_from_offset.value();
5067 } else {
5068 var_to_offset =
5069 ElementOffsetFromIndex(element_count, to_kind, first_element_offset);
5070 }
5071
5072 VariableList vars({&var_from_offset, &var_to_offset}, zone());
5073 if (var_holes_converted != nullptr) vars.push_back(var_holes_converted);
5074 Label decrement(this, vars);
5075
5076 TNode<IntPtrT> to_array_adjusted =
5077 element_offset_matches
5078 ? IntPtrSub(BitcastTaggedToWord(to_array), first_from_element_offset)
5079 : ReinterpretCast<IntPtrT>(to_array);
5080
5081 Branch(WordEqual(var_from_offset.value(), limit_offset), &done, &decrement);
5082
5083 BIND(&decrement)Bind(&decrement);
5084 {
5085 TNode<IntPtrT> from_offset = Signed(IntPtrSub(
5086 var_from_offset.value(),
5087 IntPtrConstant(from_double_elements ? kDoubleSize : kTaggedSize)));
5088 var_from_offset = from_offset;
5089
5090 TNode<IntPtrT> to_offset;
5091 if (element_offset_matches) {
5092 to_offset = from_offset;
5093 } else {
5094 to_offset = IntPtrSub(
5095 var_to_offset.value(),
5096 IntPtrConstant(to_double_elements ? kDoubleSize : kTaggedSize));
5097 var_to_offset = to_offset;
5098 }
5099
5100 Label next_iter(this), store_double_hole(this), signal_hole(this);
5101 Label* if_hole;
5102 if (convert_holes == HoleConversionMode::kConvertToUndefined) {
5103 // The target elements array is already preinitialized with undefined
5104 // so we only need to signal that a hole was found and continue the loop.
5105 if_hole = &signal_hole;
5106 } else if (doubles_to_objects_conversion) {
5107 // The target elements array is already preinitialized with holes, so we
5108 // can just proceed with the next iteration.
5109 if_hole = &next_iter;
5110 } else if (IsDoubleElementsKind(to_kind)) {
5111 if_hole = &store_double_hole;
5112 } else {
5113 // In all the other cases don't check for holes and copy the data as is.
5114 if_hole = nullptr;
5115 }
5116
5117 if (to_double_elements) {
5118 DCHECK(!needs_write_barrier)((void) 0);
5119 TNode<Float64T> value = LoadElementAndPrepareForStore<Float64T>(
5120 from_array, var_from_offset.value(), from_kind, to_kind, if_hole);
5121 StoreNoWriteBarrier(MachineRepresentation::kFloat64, to_array_adjusted,
5122 to_offset, value);
5123 } else {
5124 TNode<Object> value = LoadElementAndPrepareForStore<Object>(
5125 from_array, var_from_offset.value(), from_kind, to_kind, if_hole);
5126 if (needs_write_barrier) {
5127 CHECK_EQ(to_array, to_array_adjusted)do { bool _cmp = ::v8::base::CmpEQImpl< typename ::v8::base
::pass_value_or_ref<decltype(to_array)>::type, typename
::v8::base::pass_value_or_ref<decltype(to_array_adjusted)
>::type>((to_array), (to_array_adjusted)); do { if ((__builtin_expect
(!!(!(_cmp)), 0))) { V8_Fatal("Check failed: %s.", "to_array"
" " "==" " " "to_array_adjusted"); } } while (false); } while
(false)
;
5128 Store(to_array_adjusted, to_offset, value);
5129 } else {
5130 UnsafeStoreNoWriteBarrier(MachineRepresentation::kTagged,
5131 to_array_adjusted, to_offset, value);
5132 }
5133 }
5134
5135 Goto(&next_iter);
5136
5137 if (if_hole == &store_double_hole) {
5138 BIND(&store_double_hole)Bind(&store_double_hole);
5139 // Don't use doubles to store the hole double, since manipulating the
5140 // signaling NaN used for the hole in C++, e.g. with bit_cast, will
5141 // change its value on ia32 (the x87 stack is used to return values
5142 // and stores to the stack silently clear the signalling bit).
5143 //
5144 // TODO(danno): When we have a Float32/Float64 wrapper class that
5145 // preserves double bits during manipulation, remove this code/change
5146 // this to an indexed Float64 store.
5147 if (Is64()) {
5148 StoreNoWriteBarrier(MachineRepresentation::kWord64, to_array_adjusted,
5149 to_offset, double_hole);
5150 } else {
5151 StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array_adjusted,
5152 to_offset, double_hole);
5153 StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array_adjusted,
5154 IntPtrAdd(to_offset, IntPtrConstant(kInt32Size)),
5155 double_hole);
5156 }
5157 Goto(&next_iter);
5158 } else if (if_hole == &signal_hole) {
5159 // This case happens only when IsObjectElementsKind(to_kind).
5160 BIND(&signal_hole)Bind(&signal_hole);
5161 if (var_holes_converted != nullptr) {
5162 *var_holes_converted = Int32TrueConstant();
5163 }
5164 Goto(&next_iter);
5165 }
5166
5167 BIND(&next_iter)Bind(&next_iter);
5168 TNode<BoolT> compare = WordNotEqual(from_offset, limit_offset);
5169 Branch(compare, &decrement, &done);
5170 }
5171
5172 BIND(&done)Bind(&done);
5173 Comment("] CopyFixedArrayElements");
5174}
5175
5176TNode<FixedArray> CodeStubAssembler::HeapObjectToFixedArray(
5177 TNode<HeapObject> base, Label* cast_fail) {
5178 Label fixed_array(this);
5179 TNode<Map> map = LoadMap(base);
5180 GotoIf(TaggedEqual(map, FixedArrayMapConstant()), &fixed_array);
5181 GotoIf(TaggedNotEqual(map, FixedCOWArrayMapConstant()), cast_fail);
5182 Goto(&fixed_array);
5183 BIND(&fixed_array)Bind(&fixed_array);
5184 return UncheckedCast<FixedArray>(base);
5185}
5186
5187void CodeStubAssembler::CopyPropertyArrayValues(TNode<HeapObject> from_array,
5188 TNode<PropertyArray> to_array,
5189 TNode<IntPtrT> property_count,
5190 WriteBarrierMode barrier_mode,
5191 DestroySource destroy_source) {
5192 CSA_SLOW_DCHECK(this, Word32Or(IsPropertyArray(from_array),((void)0)
5193 IsEmptyFixedArray(from_array)))((void)0);
5194 Comment("[ CopyPropertyArrayValues");
5195
5196 bool needs_write_barrier = barrier_mode == UPDATE_WRITE_BARRIER;
5197
5198 if (destroy_source == DestroySource::kNo) {
5199 // PropertyArray may contain mutable HeapNumbers, which will be cloned on
5200 // the heap, requiring a write barrier.
5201 needs_write_barrier = true;
5202 }
5203
5204 TNode<IntPtrT> start = IntPtrConstant(0);
5205 ElementsKind kind = PACKED_ELEMENTS;
5206 BuildFastArrayForEach(
5207 from_array, kind, start, property_count,
5208 [this, to_array, needs_write_barrier, destroy_source](
5209 TNode<HeapObject> array, TNode<IntPtrT> offset) {
5210 TNode<AnyTaggedT> value = Load<AnyTaggedT>(array, offset);
5211
5212 if (destroy_source == DestroySource::kNo) {
5213 value = CloneIfMutablePrimitive(CAST(value)Cast(value));
5214 }
5215
5216 if (needs_write_barrier) {
5217 Store(to_array, offset, value);
5218 } else {
5219 StoreNoWriteBarrier(MachineRepresentation::kTagged, to_array, offset,
5220 value);
5221 }
5222 });
5223
5224#ifdef DEBUG
5225 // Zap {from_array} if the copying above has made it invalid.
5226 if (destroy_source == DestroySource::kYes) {
5227 Label did_zap(this);
5228 GotoIf(IsEmptyFixedArray(from_array), &did_zap);
5229 FillPropertyArrayWithUndefined(CAST(from_array)Cast(from_array), start, property_count);
5230
5231 Goto(&did_zap);
5232 BIND(&did_zap)Bind(&did_zap);
5233 }
5234#endif
5235 Comment("] CopyPropertyArrayValues");
5236}
5237
5238TNode<FixedArrayBase> CodeStubAssembler::CloneFixedArray(
5239 TNode<FixedArrayBase> source, ExtractFixedArrayFlags flags) {
5240 return ExtractFixedArray(
5241 source, base::Optional<TNode<BInt>>(IntPtrOrSmiConstant<BInt>(0)),
5242 base::Optional<TNode<BInt>>(base::nullopt),
5243 base::Optional<TNode<BInt>>(base::nullopt), flags);
5244}
5245
5246template <>
5247TNode<Object> CodeStubAssembler::LoadElementAndPrepareForStore(
5248 TNode<FixedArrayBase> array, TNode<IntPtrT> offset, ElementsKind from_kind,
5249 ElementsKind to_kind, Label* if_hole) {
5250 CSA_DCHECK(this, IsFixedArrayWithKind(array, from_kind))((void)0);
5251 DCHECK(!IsDoubleElementsKind(to_kind))((void) 0);
5252 if (IsDoubleElementsKind(from_kind)) {
5253 TNode<Float64T> value =
5254 LoadDoubleWithHoleCheck(array, offset, if_hole, MachineType::Float64());
5255 return AllocateHeapNumberWithValue(value);
5256 } else {
5257 TNode<Object> value = Load<Object>(array, offset);
5258 if (if_hole) {
5259 GotoIf(TaggedEqual(value, TheHoleConstant()), if_hole);
5260 }
5261 return value;
5262 }
5263}
5264
5265template <>
5266TNode<Float64T> CodeStubAssembler::LoadElementAndPrepareForStore(
5267 TNode<FixedArrayBase> array, TNode<IntPtrT> offset, ElementsKind from_kind,
5268 ElementsKind to_kind, Label* if_hole) {
5269 CSA_DCHECK(this, IsFixedArrayWithKind(array, from_kind))((void)0);
5270 DCHECK(IsDoubleElementsKind(to_kind))((void) 0);
5271 if (IsDoubleElementsKind(from_kind)) {
5272 return LoadDoubleWithHoleCheck(array, offset, if_hole,
5273 MachineType::Float64());
5274 } else {
5275 TNode<Object> value = Load<Object>(array, offset);
5276 if (if_hole) {
5277 GotoIf(TaggedEqual(value, TheHoleConstant()), if_hole);
5278 }
5279 if (IsSmiElementsKind(from_kind)) {
5280 return SmiToFloat64(CAST(value)Cast(value));
5281 }
5282 return LoadHeapNumberValue(CAST(value)Cast(value));
5283 }
5284}
5285
5286template <typename TIndex>
5287TNode<TIndex> CodeStubAssembler::CalculateNewElementsCapacity(
5288 TNode<TIndex> old_capacity) {
5289 static_assert(
5290 std::is_same<TIndex, Smi>::value || std::is_same<TIndex, IntPtrT>::value,
5291 "Only Smi or IntPtrT old_capacity is allowed");
5292 Comment("TryGrowElementsCapacity");
5293 TNode<TIndex> half_old_capacity = WordOrSmiShr(old_capacity, 1);
5294 TNode<TIndex> new_capacity = IntPtrOrSmiAdd(half_old_capacity, old_capacity);
5295 TNode<TIndex> padding =
5296 IntPtrOrSmiConstant<TIndex>(JSObject::kMinAddedElementsCapacity);
5297 return IntPtrOrSmiAdd(new_capacity, padding);
5298}
5299
5300template V8_EXPORT_PRIVATE TNode<IntPtrT>
5301 CodeStubAssembler::CalculateNewElementsCapacity<IntPtrT>(TNode<IntPtrT>);
5302template V8_EXPORT_PRIVATE TNode<Smi>
5303 CodeStubAssembler::CalculateNewElementsCapacity<Smi>(TNode<Smi>);
5304
5305TNode<FixedArrayBase> CodeStubAssembler::TryGrowElementsCapacity(
5306 TNode<HeapObject> object, TNode<FixedArrayBase> elements, ElementsKind kind,
5307 TNode<Smi> key, Label* bailout) {
5308 CSA_SLOW_DCHECK(this, IsFixedArrayWithKindOrEmpty(elements, kind))((void)0);
5309 TNode<Smi> capacity = LoadFixedArrayBaseLength(elements);
5310
5311 return TryGrowElementsCapacity(object, elements, kind,
5312 TaggedToParameter<BInt>(key),
5313 TaggedToParameter<BInt>(capacity), bailout);
5314}
5315
5316template <typename TIndex>
5317TNode<FixedArrayBase> CodeStubAssembler::TryGrowElementsCapacity(
5318 TNode<HeapObject> object, TNode<FixedArrayBase> elements, ElementsKind kind,
5319 TNode<TIndex> key, TNode<TIndex> capacity, Label* bailout) {
5320 static_assert(
5321 std::is_same<TIndex, Smi>::value || std::is_same<TIndex, IntPtrT>::value,
5322 "Only Smi or IntPtrT key and capacity nodes are allowed");
5323 Comment("TryGrowElementsCapacity");
5324 CSA_SLOW_DCHECK(this, IsFixedArrayWithKindOrEmpty(elements, kind))((void)0);
5325
5326 // If the gap growth is too big, fall back to the runtime.
5327 TNode<TIndex> max_gap = IntPtrOrSmiConstant<TIndex>(JSObject::kMaxGap);
5328 TNode<TIndex> max_capacity = IntPtrOrSmiAdd(capacity, max_gap);
5329 GotoIf(UintPtrOrSmiGreaterThanOrEqual(key, max_capacity), bailout);
5330
5331 // Calculate the capacity of the new backing store.
5332 TNode<TIndex> new_capacity = CalculateNewElementsCapacity(
5333 IntPtrOrSmiAdd(key, IntPtrOrSmiConstant<TIndex>(1)));
5334
5335 return GrowElementsCapacity(object, elements, kind, kind, capacity,
5336 new_capacity, bailout);
5337}
5338
5339template <typename TIndex>
5340TNode<FixedArrayBase> CodeStubAssembler::GrowElementsCapacity(
5341 TNode<HeapObject> object, TNode<FixedArrayBase> elements,
5342 ElementsKind from_kind, ElementsKind to_kind, TNode<TIndex> capacity,
5343 TNode<TIndex> new_capacity, Label* bailout) {
5344 static_assert(
5345 std::is_same<TIndex, Smi>::value || std::is_same<TIndex, IntPtrT>::value,
5346 "Only Smi or IntPtrT capacities are allowed");
5347 Comment("[ GrowElementsCapacity");
5348 CSA_SLOW_DCHECK(this, IsFixedArrayWithKindOrEmpty(elements, from_kind))((void)0);
5349
5350 // If size of the allocation for the new capacity doesn't fit in a page
5351 // that we can bump-pointer allocate from, fall back to the runtime.
5352 int max_size = FixedArrayBase::GetMaxLengthForNewSpaceAllocation(to_kind);
5353 GotoIf(UintPtrOrSmiGreaterThanOrEqual(new_capacity,
5354 IntPtrOrSmiConstant<TIndex>(max_size)),
5355 bailout);
5356
5357 // Allocate the new backing store.
5358 TNode<FixedArrayBase> new_elements =
5359 AllocateFixedArray(to_kind, new_capacity);
5360
5361 // Copy the elements from the old elements store to the new.
5362 // The size-check above guarantees that the |new_elements| is allocated
5363 // in new space so we can skip the write barrier.
5364 CopyFixedArrayElements(from_kind, elements, to_kind, new_elements, capacity,
5365 new_capacity, SKIP_WRITE_BARRIER);
5366
5367 StoreObjectField(object, JSObject::kElementsOffset, new_elements);
5368 Comment("] GrowElementsCapacity");
5369 return new_elements;
5370}
5371
5372template TNode<FixedArrayBase> CodeStubAssembler::GrowElementsCapacity<IntPtrT>(
5373 TNode<HeapObject>, TNode<FixedArrayBase>, ElementsKind, ElementsKind,
5374 TNode<IntPtrT>, TNode<IntPtrT>, compiler::CodeAssemblerLabel*);
5375
5376namespace {
5377
5378// Helper function for folded memento allocation.
5379// Memento objects are designed to be put right after the objects they are
5380// tracking on. So memento allocations have to be folded together with previous
5381// object allocations.
5382TNode<HeapObject> InnerAllocateMemento(CodeStubAssembler* csa,
5383 TNode<HeapObject> previous,
5384 TNode<IntPtrT> offset) {
5385 return csa->UncheckedCast<HeapObject>(csa->BitcastWordToTagged(
5386 csa->IntPtrAdd(csa->BitcastTaggedToWord(previous), offset)));
5387}
5388
5389} // namespace
5390
5391void CodeStubAssembler::InitializeAllocationMemento(
5392 TNode<HeapObject> base, TNode<IntPtrT> base_allocation_size,
5393 TNode<AllocationSite> allocation_site) {
5394 DCHECK(V8_ALLOCATION_SITE_TRACKING_BOOL)((void) 0);
5395 Comment("[Initialize AllocationMemento");
5396 TNode<HeapObject> memento =
5397 InnerAllocateMemento(this, base, base_allocation_size);
5398 StoreMapNoWriteBarrier(memento, RootIndex::kAllocationMementoMap);
5399 StoreObjectFieldNoWriteBarrier(
5400 memento, AllocationMemento::kAllocationSiteOffset, allocation_site);
5401 if (FLAG_allocation_site_pretenuring) {
5402 TNode<Int32T> count = LoadObjectField<Int32T>(
5403 allocation_site, AllocationSite::kPretenureCreateCountOffset);
5404
5405 TNode<Int32T> incremented_count = Int32Add(count, Int32Constant(1));
5406 StoreObjectFieldNoWriteBarrier(allocation_site,
5407 AllocationSite::kPretenureCreateCountOffset,
5408 incremented_count);
5409 }
5410 Comment("]");
5411}
5412
5413TNode<IntPtrT> CodeStubAssembler::TryTaggedToInt32AsIntPtr(
5414 TNode<Object> acc, Label* if_not_possible) {
5415 TVARIABLE(IntPtrT, acc_intptr)TVariable<IntPtrT> acc_intptr(this);
5416 Label is_not_smi(this), have_int32(this);
5417
5418 GotoIfNot(TaggedIsSmi(acc), &is_not_smi);
5419 acc_intptr = SmiUntag(CAST(acc)Cast(acc));
5420 Goto(&have_int32);
5421
5422 BIND(&is_not_smi)Bind(&is_not_smi);
5423 GotoIfNot(IsHeapNumber(CAST(acc)Cast(acc)), if_not_possible);
5424 TNode<Float64T> value = LoadHeapNumberValue(CAST(acc)Cast(acc));
5425 TNode<Int32T> value32 = RoundFloat64ToInt32(value);
5426 TNode<Float64T> value64 = ChangeInt32ToFloat64(value32);
5427 GotoIfNot(Float64Equal(value, value64), if_not_possible);
5428 acc_intptr = ChangeInt32ToIntPtr(value32);
5429 Goto(&have_int32);
5430
5431 BIND(&have_int32)Bind(&have_int32);
5432 return acc_intptr.value();
5433}
5434
5435TNode<Float64T> CodeStubAssembler::TryTaggedToFloat64(
5436 TNode<Object> value, Label* if_valueisnotnumber) {
5437 return Select<Float64T>(
5438 TaggedIsSmi(value), [&]() { return SmiToFloat64(CAST(value)Cast(value)); },
5439 [&]() {
5440 GotoIfNot(IsHeapNumber(CAST(value)Cast(value)), if_valueisnotnumber);
5441 return LoadHeapNumberValue(CAST(value)Cast(value));
5442 });
5443}
5444
5445TNode<Float64T> CodeStubAssembler::TruncateTaggedToFloat64(
5446 TNode<Context> context, TNode<Object> value) {
5447 // We might need to loop once due to ToNumber conversion.
5448 TVARIABLE(Object, var_value, value)TVariable<Object> var_value(value, this);
5449 TVARIABLE(Float64T, var_result)TVariable<Float64T> var_result(this);
5450 Label loop(this, &var_value), done_loop(this, &var_result);
5451 Goto(&loop);
5452 BIND(&loop)Bind(&loop);
5453 {
5454 Label if_valueisnotnumber(this, Label::kDeferred);
5455
5456 // Load the current {value}.
5457 value = var_value.value();
5458
5459 // Convert {value} to Float64 if it is a number and convert it to a number
5460 // otherwise.
5461 var_result = TryTaggedToFloat64(value, &if_valueisnotnumber);
5462 Goto(&done_loop);
5463
5464 BIND(&if_valueisnotnumber)Bind(&if_valueisnotnumber);
5465 {
5466 // Convert the {value} to a Number first.
5467 var_value = CallBuiltin(Builtin::kNonNumberToNumber, context, value);
5468 Goto(&loop);
5469 }
5470 }
5471 BIND(&done_loop)Bind(&done_loop);
5472 return var_result.value();
5473}
5474
5475TNode<Word32T> CodeStubAssembler::TruncateTaggedToWord32(TNode<Context> context,
5476 TNode<Object> value) {
5477 TVARIABLE(Word32T, var_result)TVariable<Word32T> var_result(this);
5478 Label done(this);
5479 TaggedToWord32OrBigIntImpl<Object::Conversion::kToNumber>(
5480 context, value, &done, &var_result, IsKnownTaggedPointer::kNo);
5481 BIND(&done)Bind(&done);
5482 return var_result.value();
5483}
5484
5485// Truncate {value} to word32 and jump to {if_number} if it is a Number,
5486// or find that it is a BigInt and jump to {if_bigint}.
5487void CodeStubAssembler::TaggedToWord32OrBigInt(
5488 TNode<Context> context, TNode<Object> value, Label* if_number,
5489 TVariable<Word32T>* var_word32, Label* if_bigint,
5490 TVariable<BigInt>* var_maybe_bigint) {
5491 TaggedToWord32OrBigIntImpl<Object::Conversion::kToNumeric>(
5492 context, value, if_number, var_word32, IsKnownTaggedPointer::kNo,
5493 if_bigint, var_maybe_bigint);
5494}
5495
5496// Truncate {value} to word32 and jump to {if_number} if it is a Number,
5497// or find that it is a BigInt and jump to {if_bigint}. In either case,
5498// store the type feedback in {var_feedback}.
5499void CodeStubAssembler::TaggedToWord32OrBigIntWithFeedback(
5500 TNode<Context> context, TNode<Object> value, Label* if_number,
5501 TVariable<Word32T>* var_word32, Label* if_bigint,
5502 TVariable<BigInt>* var_maybe_bigint, TVariable<Smi>* var_feedback) {
5503 TaggedToWord32OrBigIntImpl<Object::Conversion::kToNumeric>(
5504 context, value, if_number, var_word32, IsKnownTaggedPointer::kNo,
5505 if_bigint, var_maybe_bigint, var_feedback);
5506}
5507
5508// Truncate {pointer} to word32 and jump to {if_number} if it is a Number,
5509// or find that it is a BigInt and jump to {if_bigint}. In either case,
5510// store the type feedback in {var_feedback}.
5511void CodeStubAssembler::TaggedPointerToWord32OrBigIntWithFeedback(
5512 TNode<Context> context, TNode<HeapObject> pointer, Label* if_number,
5513 TVariable<Word32T>* var_word32, Label* if_bigint,
5514 TVariable<BigInt>* var_maybe_bigint, TVariable<Smi>* var_feedback) {
5515 TaggedToWord32OrBigIntImpl<Object::Conversion::kToNumeric>(
5516 context, pointer, if_number, var_word32, IsKnownTaggedPointer::kYes,
5517 if_bigint, var_maybe_bigint, var_feedback);
5518}
5519
5520template <Object::Conversion conversion>
5521void CodeStubAssembler::TaggedToWord32OrBigIntImpl(
5522 TNode<Context> context, TNode<Object> value, Label* if_number,
5523 TVariable<Word32T>* var_word32,
5524 IsKnownTaggedPointer is_known_tagged_pointer, Label* if_bigint,
5525 TVariable<BigInt>* var_maybe_bigint, TVariable<Smi>* var_feedback) {
5526 // We might need to loop after conversion.
5527 TVARIABLE(Object, var_value, value)TVariable<Object> var_value(value, this);
5528 OverwriteFeedback(var_feedback, BinaryOperationFeedback::kNone);
5529 VariableList loop_vars({&var_value}, zone());
5530 if (var_feedback != nullptr) loop_vars.push_back(var_feedback);
5531 Label loop(this, loop_vars);
5532 if (is_known_tagged_pointer == IsKnownTaggedPointer::kNo) {
5533 GotoIf(TaggedIsNotSmi(value), &loop);
5534
5535 // {value} is a Smi.
5536 *var_word32 = SmiToInt32(CAST(value)Cast(value));
5537 CombineFeedback(var_feedback, BinaryOperationFeedback::kSignedSmall);
5538 Goto(if_number);
5539 } else {
5540 Goto(&loop);
5541 }
5542 BIND(&loop)Bind(&loop);
5543 {
5544 value = var_value.value();
5545 Label not_smi(this), is_heap_number(this), is_oddball(this),
5546 is_bigint(this), check_if_smi(this);
5547
5548 TNode<HeapObject> value_heap_object = CAST(value)Cast(value);
5549 TNode<Map> map = LoadMap(value_heap_object);
5550 GotoIf(IsHeapNumberMap(map), &is_heap_number);
5551 TNode<Uint16T> instance_type = LoadMapInstanceType(map);
5552 if (conversion == Object::Conversion::kToNumeric) {
5553 GotoIf(IsBigIntInstanceType(instance_type), &is_bigint);
5554 }
5555
5556 // Not HeapNumber (or BigInt if conversion == kToNumeric).
5557 {
5558 if (var_feedback != nullptr) {
5559 // We do not require an Or with earlier feedback here because once we
5560 // convert the value to a Numeric, we cannot reach this path. We can
5561 // only reach this path on the first pass when the feedback is kNone.
5562 CSA_DCHECK(this, SmiEqual(var_feedback->value(),((void)0)
5563 SmiConstant(BinaryOperationFeedback::kNone)))((void)0);
5564 }
5565 GotoIf(InstanceTypeEqual(instance_type, ODDBALL_TYPE), &is_oddball);
5566 // Not an oddball either -> convert.
5567 auto builtin = conversion == Object::Conversion::kToNumeric
5568 ? Builtin::kNonNumberToNumeric
5569 : Builtin::kNonNumberToNumber;
5570 var_value = CallBuiltin(builtin, context, value);
5571 OverwriteFeedback(var_feedback, BinaryOperationFeedback::kAny);
5572 Goto(&check_if_smi);
5573
5574 BIND(&is_oddball)Bind(&is_oddball);
5575 var_value = LoadObjectField(value_heap_object, Oddball::kToNumberOffset);
5576 OverwriteFeedback(var_feedback,
5577 BinaryOperationFeedback::kNumberOrOddball);
5578 Goto(&check_if_smi);
5579 }
5580
5581 BIND(&is_heap_number)Bind(&is_heap_number);
5582 *var_word32 = TruncateHeapNumberValueToWord32(CAST(value)Cast(value));
5583 CombineFeedback(var_feedback, BinaryOperationFeedback::kNumber);
5584 Goto(if_number);
5585
5586 if (conversion == Object::Conversion::kToNumeric) {
5587 BIND(&is_bigint)Bind(&is_bigint);
5588 *var_maybe_bigint = CAST(value)Cast(value);
5589 CombineFeedback(var_feedback, BinaryOperationFeedback::kBigInt);
5590 Goto(if_bigint);
5591 }
5592
5593 BIND(&check_if_smi)Bind(&check_if_smi);
5594 value = var_value.value();
5595 GotoIf(TaggedIsNotSmi(value), &loop);
5596
5597 // {value} is a Smi.
5598 *var_word32 = SmiToInt32(CAST(value)Cast(value));
5599 CombineFeedback(var_feedback, BinaryOperationFeedback::kSignedSmall);
5600 Goto(if_number);
5601 }
5602}
5603
5604TNode<Int32T> CodeStubAssembler::TruncateNumberToWord32(TNode<Number> number) {
5605 TVARIABLE(Int32T, var_result)TVariable<Int32T> var_result(this);
5606 Label done(this), if_heapnumber(this);
5607 GotoIfNot(TaggedIsSmi(number), &if_heapnumber);
5608 var_result = SmiToInt32(CAST(number)Cast(number));
5609 Goto(&done);
5610
5611 BIND(&if_heapnumber)Bind(&if_heapnumber);
5612 TNode<Float64T> value = LoadHeapNumberValue(CAST(number)Cast(number));
5613 var_result = Signed(TruncateFloat64ToWord32(value));
5614 Goto(&done);
5615
5616 BIND(&done)Bind(&done);
5617 return var_result.value();
5618}
5619
5620TNode<Int32T> CodeStubAssembler::TruncateHeapNumberValueToWord32(
5621 TNode<HeapNumber> object) {
5622 TNode<Float64T> value = LoadHeapNumberValue(object);
5623 return Signed(TruncateFloat64ToWord32(value));
5624}
5625
5626void CodeStubAssembler::TryHeapNumberToSmi(TNode<HeapNumber> number,
5627 TVariable<Smi>* var_result_smi,
5628 Label* if_smi) {
5629 TNode<Float64T> value = LoadHeapNumberValue(number);
5630 TryFloat64ToSmi(value, var_result_smi, if_smi);
5631}
5632
5633void CodeStubAssembler::TryFloat32ToSmi(TNode<Float32T> value,
5634 TVariable<Smi>* var_result_smi,
5635 Label* if_smi) {
5636 TNode<Int32T> ivalue = TruncateFloat32ToInt32(value);
5637 TNode<Float32T> fvalue = RoundInt32ToFloat32(ivalue);
5638
5639 Label if_int32(this), if_heap_number(this);
5640
5641 GotoIfNot(Float32Equal(value, fvalue), &if_heap_number);
5642 GotoIfNot(Word32Equal(ivalue, Int32Constant(0)), &if_int32);
5643 Branch(Int32LessThan(UncheckedCast<Int32T>(BitcastFloat32ToInt32(value)),
5644 Int32Constant(0)),
5645 &if_heap_number, &if_int32);
5646
5647 TVARIABLE(Number, var_result)TVariable<Number> var_result(this);
5648 BIND(&if_int32)Bind(&if_int32);
5649 {
5650 if (SmiValuesAre32Bits()) {
5651 *var_result_smi = SmiTag(ChangeInt32ToIntPtr(ivalue));
5652 } else {
5653 DCHECK(SmiValuesAre31Bits())((void) 0);
5654 TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(ivalue, ivalue);
5655 TNode<BoolT> overflow = Projection<1>(pair);
5656 GotoIf(overflow, &if_heap_number);
5657 *var_result_smi =
5658 BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(Projection<0>(pair)));
5659 }
5660 Goto(if_smi);
5661 }
5662 BIND(&if_heap_number)Bind(&if_heap_number);
5663}
5664
5665void CodeStubAssembler::TryFloat64ToSmi(TNode<Float64T> value,
5666 TVariable<Smi>* var_result_smi,
5667 Label* if_smi) {
5668 TNode<Int32T> value32 = RoundFloat64ToInt32(value);
5669 TNode<Float64T> value64 = ChangeInt32ToFloat64(value32);
5670
5671 Label if_int32(this), if_heap_number(this, Label::kDeferred);
5672
5673 GotoIfNot(Float64Equal(value, value64), &if_heap_number);
5674 GotoIfNot(Word32Equal(value32, Int32Constant(0)), &if_int32);
5675 Branch(Int32LessThan(UncheckedCast<Int32T>(Float64ExtractHighWord32(value)),
5676 Int32Constant(0)),
5677 &if_heap_number, &if_int32);
5678
5679 TVARIABLE(Number, var_result)TVariable<Number> var_result(this);
5680 BIND(&if_int32)Bind(&if_int32);
5681 {
5682 if (SmiValuesAre32Bits()) {
5683 *var_result_smi = SmiTag(ChangeInt32ToIntPtr(value32));
5684 } else {
5685 DCHECK(SmiValuesAre31Bits())((void) 0);
5686 TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(value32, value32);
5687 TNode<BoolT> overflow = Projection<1>(pair);
5688 GotoIf(overflow, &if_heap_number);
5689 *var_result_smi =
5690 BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(Projection<0>(pair)));
5691 }
5692 Goto(if_smi);
5693 }
5694 BIND(&if_heap_number)Bind(&if_heap_number);
5695}
5696
5697TNode<Number> CodeStubAssembler::ChangeFloat32ToTagged(TNode<Float32T> value) {
5698 Label if_smi(this), done(this);
5699 TVARIABLE(Smi, var_smi_result)TVariable<Smi> var_smi_result(this);
5700 TVARIABLE(Number, var_result)TVariable<Number> var_result(this);
5701 TryFloat32ToSmi(value, &var_smi_result, &if_smi);
5702
5703 var_result = AllocateHeapNumberWithValue(ChangeFloat32ToFloat64(value));
5704 Goto(&done);
5705
5706 BIND(&if_smi)Bind(&if_smi);
5707 {
5708 var_result = var_smi_result.value();
5709 Goto(&done);
5710 }
5711 BIND(&done)Bind(&done);
5712 return var_result.value();
5713}
5714
5715TNode<Number> CodeStubAssembler::ChangeFloat64ToTagged(TNode<Float64T> value) {
5716 Label if_smi(this), done(this);
5717 TVARIABLE(Smi, var_smi_result)TVariable<Smi> var_smi_result(this);
5718 TVARIABLE(Number, var_result)TVariable<Number> var_result(this);
5719 TryFloat64ToSmi(value, &var_smi_result, &if_smi);
5720
5721 var_result = AllocateHeapNumberWithValue(value);
5722 Goto(&done);
5723
5724 BIND(&if_smi)Bind(&if_smi);
5725 {
5726 var_result = var_smi_result.value();
5727 Goto(&done);
5728 }
5729 BIND(&done)Bind(&done);
5730 return var_result.value();
5731}
5732
5733TNode<Number> CodeStubAssembler::ChangeInt32ToTagged(TNode<Int32T> value) {
5734 if (SmiValuesAre32Bits()) {
5735 return SmiTag(ChangeInt32ToIntPtr(value));
5736 }
5737 DCHECK(SmiValuesAre31Bits())((void) 0);
5738 TVARIABLE(Number, var_result)TVariable<Number> var_result(this);
5739 TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(value, value);
5740 TNode<BoolT> overflow = Projection<1>(pair);
5741 Label if_overflow(this, Label::kDeferred), if_notoverflow(this),
5742 if_join(this);
5743 Branch(overflow, &if_overflow, &if_notoverflow);
5744 BIND(&if_overflow)Bind(&if_overflow);
5745 {
5746 TNode<Float64T> value64 = ChangeInt32ToFloat64(value);
5747 TNode<HeapNumber> result = AllocateHeapNumberWithValue(value64);
5748 var_result = result;
5749 Goto(&if_join);
5750 }
5751 BIND(&if_notoverflow)Bind(&if_notoverflow);
5752 {
5753 TNode<IntPtrT> almost_tagged_value =
5754 ChangeInt32ToIntPtr(Projection<0>(pair));
5755 TNode<Smi> result = BitcastWordToTaggedSigned(almost_tagged_value);
5756 var_result = result;
5757 Goto(&if_join);
5758 }
5759 BIND(&if_join)Bind(&if_join);
5760 return var_result.value();
5761}
5762
5763TNode<Number> CodeStubAssembler::ChangeInt32ToTaggedNoOverflow(
5764 TNode<Int32T> value) {
5765 if (SmiValuesAre32Bits()) {
5766 return SmiTag(ChangeInt32ToIntPtr(value));
5767 }
5768 DCHECK(SmiValuesAre31Bits())((void) 0);
5769 TNode<Int32T> result_int32 = Int32Add(value, value);
5770 TNode<IntPtrT> almost_tagged_value = ChangeInt32ToIntPtr(result_int32);
5771 TNode<Smi> result = BitcastWordToTaggedSigned(almost_tagged_value);
5772 return result;
5773}
5774
5775TNode<Number> CodeStubAssembler::ChangeUint32ToTagged(TNode<Uint32T> value) {
5776 Label if_overflow(this, Label::kDeferred), if_not_overflow(this),
5777 if_join(this);
5778 TVARIABLE(Number, var_result)TVariable<Number> var_result(this);
5779 // If {value} > 2^31 - 1, we need to store it in a HeapNumber.
5780 Branch(Uint32LessThan(Uint32Constant(Smi::kMaxValue), value), &if_overflow,
5781 &if_not_overflow);
5782
5783 BIND(&if_not_overflow)Bind(&if_not_overflow);
5784 {
5785 // The {value} is definitely in valid Smi range.
5786 var_result = SmiTag(Signed(ChangeUint32ToWord(value)));
5787 }
5788 Goto(&if_join);
5789
5790 BIND(&if_overflow)Bind(&if_overflow);
5791 {
5792 TNode<Float64T> float64_value = ChangeUint32ToFloat64(value);
5793 var_result = AllocateHeapNumberWithValue(float64_value);
5794 }
5795 Goto(&if_join);
5796
5797 BIND(&if_join)Bind(&if_join);
5798 return var_result.value();
5799}
5800
5801TNode<Number> CodeStubAssembler::ChangeUintPtrToTagged(TNode<UintPtrT> value) {
5802 Label if_overflow(this, Label::kDeferred), if_not_overflow(this),
5803 if_join(this);
5804 TVARIABLE(Number, var_result)TVariable<Number> var_result(this);
5805 // If {value} > 2^31 - 1, we need to store it in a HeapNumber.
5806 Branch(UintPtrLessThan(UintPtrConstant(Smi::kMaxValue), value), &if_overflow,
5807 &if_not_overflow);
5808
5809 BIND(&if_not_overflow)Bind(&if_not_overflow);
5810 {
5811 // The {value} is definitely in valid Smi range.
5812 var_result = SmiTag(Signed(value));
5813 }
5814 Goto(&if_join);
5815
5816 BIND(&if_overflow)Bind(&if_overflow);
5817 {
5818 TNode<Float64T> float64_value = ChangeUintPtrToFloat64(value);
5819 var_result = AllocateHeapNumberWithValue(float64_value);
5820 }
5821 Goto(&if_join);
5822
5823 BIND(&if_join)Bind(&if_join);
5824 return var_result.value();
5825}
5826
5827TNode<Int32T> CodeStubAssembler::ChangeBoolToInt32(TNode<BoolT> b) {
5828 return UncheckedCast<Int32T>(b);
5829}
5830
5831TNode<String> CodeStubAssembler::ToThisString(TNode<Context> context,
5832 TNode<Object> value,
5833 TNode<String> method_name) {
5834 TVARIABLE(Object, var_value, value)TVariable<Object> var_value(value, this);
5835
5836 // Check if the {value} is a Smi or a HeapObject.
5837 Label if_valueissmi(this, Label::kDeferred), if_valueisnotsmi(this),
5838 if_valueisstring(this);
5839 Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
5840 BIND(&if_valueisnotsmi)Bind(&if_valueisnotsmi);
5841 {
5842 // Load the instance type of the {value}.
5843 TNode<Uint16T> value_instance_type = LoadInstanceType(CAST(value)Cast(value));
5844
5845 // Check if the {value} is already String.
5846 Label if_valueisnotstring(this, Label::kDeferred);
5847 Branch(IsStringInstanceType(value_instance_type), &if_valueisstring,
5848 &if_valueisnotstring);
5849 BIND(&if_valueisnotstring)Bind(&if_valueisnotstring);
5850 {
5851 // Check if the {value} is null.
5852 Label if_valueisnullorundefined(this, Label::kDeferred);
5853 GotoIf(IsNullOrUndefined(value), &if_valueisnullorundefined);
5854 // Convert the {value} to a String.
5855 var_value = CallBuiltin(Builtin::kToString, context, value);
5856 Goto(&if_valueisstring);
5857
5858 BIND(&if_valueisnullorundefined)Bind(&if_valueisnullorundefined);
5859 {
5860 // The {value} is either null or undefined.
5861 ThrowTypeError(context, MessageTemplate::kCalledOnNullOrUndefined,
5862 method_name);
5863 }
5864 }
5865 }
5866 BIND(&if_valueissmi)Bind(&if_valueissmi);
5867 {
5868 // The {value} is a Smi, convert it to a String.
5869 var_value = CallBuiltin(Builtin::kNumberToString, context, value);
5870 Goto(&if_valueisstring);
5871 }
5872 BIND(&if_valueisstring)Bind(&if_valueisstring);
5873 return CAST(var_value.value())Cast(var_value.value());
5874}
5875
5876TNode<Uint32T> CodeStubAssembler::ChangeNumberToUint32(TNode<Number> value) {
5877 TVARIABLE(Uint32T, var_result)TVariable<Uint32T> var_result(this);
5878 Label if_smi(this), if_heapnumber(this, Label::kDeferred), done(this);
5879 Branch(TaggedIsSmi(value), &if_smi, &if_heapnumber);
5880 BIND(&if_smi)Bind(&if_smi);
5881 {
5882 var_result = Unsigned(SmiToInt32(CAST(value)Cast(value)));
5883 Goto(&done);
5884 }
5885 BIND(&if_heapnumber)Bind(&if_heapnumber);
5886 {
5887 var_result = ChangeFloat64ToUint32(LoadHeapNumberValue(CAST(value)Cast(value)));
5888 Goto(&done);
5889 }
5890 BIND(&done)Bind(&done);
5891 return var_result.value();
5892}
5893
5894TNode<Float64T> CodeStubAssembler::ChangeNumberToFloat64(TNode<Number> value) {
5895 TVARIABLE(Float64T, result)TVariable<Float64T> result(this);
5896 Label smi(this);
5897 Label done(this, &result);
5898 GotoIf(TaggedIsSmi(value), &smi);
5899 result = LoadHeapNumberValue(CAST(value)Cast(value));
5900 Goto(&done);
5901
5902 BIND(&smi)Bind(&smi);
5903 {
5904 result = SmiToFloat64(CAST(value)Cast(value));
5905 Goto(&done);
5906 }
5907
5908 BIND(&done)Bind(&done);
5909 return result.value();
5910}
5911
5912TNode<Int32T> CodeStubAssembler::ChangeTaggedNonSmiToInt32(
5913 TNode<Context> context, TNode<HeapObject> input) {
5914 return Select<Int32T>(
5915 IsHeapNumber(input),
5916 [=] {
5917 return Signed(TruncateFloat64ToWord32(LoadHeapNumberValue(input)));
5918 },
5919 [=] {
5920 return TruncateNumberToWord32(
5921 CAST(CallBuiltin(Builtin::kNonNumberToNumber, context, input))Cast(CallBuiltin(Builtin::kNonNumberToNumber, context, input)
)
);
5922 });
5923}
5924
5925TNode<Float64T> CodeStubAssembler::ChangeTaggedToFloat64(TNode<Context> context,
5926 TNode<Object> input) {
5927 TVARIABLE(Float64T, var_result)TVariable<Float64T> var_result(this);
5928 Label end(this), not_smi(this);
5929
5930 GotoIfNot(TaggedIsSmi(input), &not_smi);
5931 var_result = SmiToFloat64(CAST(input)Cast(input));
5932 Goto(&end);
5933
5934 BIND(&not_smi)Bind(&not_smi);
5935 var_result = Select<Float64T>(
5936 IsHeapNumber(CAST(input)Cast(input)),
5937 [=] { return LoadHeapNumberValue(CAST(input)Cast(input)); },
5938 [=] {
5939 return ChangeNumberToFloat64(
5940 CAST(CallBuiltin(Builtin::kNonNumberToNumber, context, input))Cast(CallBuiltin(Builtin::kNonNumberToNumber, context, input)
)
);
5941 });
5942 Goto(&end);
5943
5944 BIND(&end)Bind(&end);
5945 return var_result.value();
5946}
5947
5948TNode<WordT> CodeStubAssembler::TimesSystemPointerSize(TNode<WordT> value) {
5949 return WordShl(value, kSystemPointerSizeLog2);
5950}
5951
5952TNode<WordT> CodeStubAssembler::TimesTaggedSize(TNode<WordT> value) {
5953 return WordShl(value, kTaggedSizeLog2);
5954}
5955
5956TNode<WordT> CodeStubAssembler::TimesDoubleSize(TNode<WordT> value) {
5957 return WordShl(value, kDoubleSizeLog2);
5958}
5959
5960TNode<Object> CodeStubAssembler::ToThisValue(TNode<Context> context,
5961 TNode<Object> input_value,
5962 PrimitiveType primitive_type,
5963 char const* method_name) {
5964 // We might need to loop once due to JSPrimitiveWrapper unboxing.
5965 TVARIABLE(Object, var_value, input_value)TVariable<Object> var_value(input_value, this);
5966 Label loop(this, &var_value), done_loop(this),
5967 done_throw(this, Label::kDeferred);
5968 Goto(&loop);
5969 BIND(&loop)Bind(&loop);
5970 {
5971 // Check if the {value} is a Smi or a HeapObject.
5972 GotoIf(
5973 TaggedIsSmi(var_value.value()),
5974 (primitive_type == PrimitiveType::kNumber) ? &done_loop : &done_throw);
5975
5976 TNode<HeapObject> value = CAST(var_value.value())Cast(var_value.value());
5977
5978 // Load the map of the {value}.
5979 TNode<Map> value_map = LoadMap(value);
5980
5981 // Load the instance type of the {value}.
5982 TNode<Uint16T> value_instance_type = LoadMapInstanceType(value_map);
5983
5984 // Check if {value} is a JSPrimitiveWrapper.
5985 Label if_valueiswrapper(this, Label::kDeferred), if_valueisnotwrapper(this);
5986 Branch(InstanceTypeEqual(value_instance_type, JS_PRIMITIVE_WRAPPER_TYPE),
5987 &if_valueiswrapper, &if_valueisnotwrapper);
5988
5989 BIND(&if_valueiswrapper)Bind(&if_valueiswrapper);
5990 {
5991 // Load the actual value from the {value}.
5992 var_value = LoadObjectField(value, JSPrimitiveWrapper::kValueOffset);
5993 Goto(&loop);
5994 }
5995
5996 BIND(&if_valueisnotwrapper)Bind(&if_valueisnotwrapper);
5997 {
5998 switch (primitive_type) {
5999 case PrimitiveType::kBoolean:
6000 GotoIf(TaggedEqual(value_map, BooleanMapConstant()), &done_loop);
6001 break;
6002 case PrimitiveType::kNumber:
6003 GotoIf(TaggedEqual(value_map, HeapNumberMapConstant()), &done_loop);
6004 break;
6005 case PrimitiveType::kString:
6006 GotoIf(IsStringInstanceType(value_instance_type), &done_loop);
6007 break;
6008 case PrimitiveType::kSymbol:
6009 GotoIf(TaggedEqual(value_map, SymbolMapConstant()), &done_loop);
6010 break;
6011 }
6012 Goto(&done_throw);
6013 }
6014 }
6015
6016 BIND(&done_throw)Bind(&done_throw);
6017 {
6018 const char* primitive_name = nullptr;
6019 switch (primitive_type) {
6020 case PrimitiveType::kBoolean:
6021 primitive_name = "Boolean";
6022 break;
6023 case PrimitiveType::kNumber:
6024 primitive_name = "Number";
6025 break;
6026 case PrimitiveType::kString:
6027 primitive_name = "String";
6028 break;
6029 case PrimitiveType::kSymbol:
6030 primitive_name = "Symbol";
6031 break;
6032 }
6033 CHECK_NOT_NULL(primitive_name)do { if ((__builtin_expect(!!(!((primitive_name) != nullptr))
, 0))) { V8_Fatal("Check failed: %s.", "(primitive_name) != nullptr"
); } } while (false)
;
6034
6035 // The {value} is not a compatible receiver for this method.
6036 ThrowTypeError(context, MessageTemplate::kNotGeneric, method_name,
6037 primitive_name);
6038 }
6039
6040 BIND(&done_loop)Bind(&done_loop);
6041 return var_value.value();
6042}
6043
6044void CodeStubAssembler::ThrowIfNotInstanceType(TNode<Context> context,
6045 TNode<Object> value,
6046 InstanceType instance_type,
6047 char const* method_name) {
6048 Label out(this), throw_exception(this, Label::kDeferred);
6049
6050 GotoIf(TaggedIsSmi(value), &throw_exception);
6051
6052 // Load the instance type of the {value}.
6053 TNode<Map> map = LoadMap(CAST(value)Cast(value));
6054 const TNode<Uint16T> value_instance_type = LoadMapInstanceType(map);
6055
6056 Branch(Word32Equal(value_instance_type, Int32Constant(instance_type)), &out,
6057 &throw_exception);
6058
6059 // The {value} is not a compatible receiver for this method.
6060 BIND(&throw_exception)Bind(&throw_exception);
6061 ThrowTypeError(context, MessageTemplate::kIncompatibleMethodReceiver,
6062 StringConstant(method_name), value);
6063
6064 BIND(&out)Bind(&out);
6065}
6066
6067void CodeStubAssembler::ThrowIfNotJSReceiver(TNode<Context> context,
6068 TNode<Object> value,
6069 MessageTemplate msg_template,
6070 const char* method_name) {
6071 Label done(this), throw_exception(this, Label::kDeferred);
6072
6073 GotoIf(TaggedIsSmi(value), &throw_exception);
6074
6075 // Load the instance type of the {value}.
6076 TNode<Map> value_map = LoadMap(CAST(value)Cast(value));
6077 const TNode<Uint16T> value_instance_type = LoadMapInstanceType(value_map);
6078
6079 Branch(IsJSReceiverInstanceType(value_instance_type), &done,
6080 &throw_exception);
6081
6082 // The {value} is not a compatible receiver for this method.
6083 BIND(&throw_exception)Bind(&throw_exception);
6084 ThrowTypeError(context, msg_template, StringConstant(method_name), value);
6085
6086 BIND(&done)Bind(&done);
6087}
6088
6089void CodeStubAssembler::ThrowIfNotCallable(TNode<Context> context,
6090 TNode<Object> value,
6091 const char* method_name) {
6092 Label out(this), throw_exception(this, Label::kDeferred);
6093
6094 GotoIf(TaggedIsSmi(value), &throw_exception);
6095 Branch(IsCallable(CAST(value)Cast(value)), &out, &throw_exception);
6096
6097 // The {value} is not a compatible receiver for this method.
6098 BIND(&throw_exception)Bind(&throw_exception);
6099 ThrowTypeError(context, MessageTemplate::kCalledNonCallable, method_name);
6100
6101 BIND(&out)Bind(&out);
6102}
6103
6104void CodeStubAssembler::ThrowRangeError(TNode<Context> context,
6105 MessageTemplate message,
6106 base::Optional<TNode<Object>> arg0,
6107 base::Optional<TNode<Object>> arg1,
6108 base::Optional<TNode<Object>> arg2) {
6109 TNode<Smi> template_index = SmiConstant(static_cast<int>(message));
6110 if (!arg0) {
6111 CallRuntime(Runtime::kThrowRangeError, context, template_index);
6112 } else if (!arg1) {
6113 CallRuntime(Runtime::kThrowRangeError, context, template_index, *arg0);
6114 } else if (!arg2) {
6115 CallRuntime(Runtime::kThrowRangeError, context, template_index, *arg0,
6116 *arg1);
6117 } else {
6118 CallRuntime(Runtime::kThrowRangeError, context, template_index, *arg0,
6119 *arg1, *arg2);
6120 }
6121 Unreachable();
6122}
6123
6124void CodeStubAssembler::ThrowTypeError(TNode<Context> context,
6125 MessageTemplate message,
6126 char const* arg0, char const* arg1) {
6127 base::Optional<TNode<Object>> arg0_node;
6128 if (arg0) arg0_node = StringConstant(arg0);
6129 base::Optional<TNode<Object>> arg1_node;
6130 if (arg1) arg1_node = StringConstant(arg1);
6131 ThrowTypeError(context, message, arg0_node, arg1_node);
6132}
6133
6134void CodeStubAssembler::ThrowTypeError(TNode<Context> context,
6135 MessageTemplate message,
6136 base::Optional<TNode<Object>> arg0,
6137 base::Optional<TNode<Object>> arg1,
6138 base::Optional<TNode<Object>> arg2) {
6139 TNode<Smi> template_index = SmiConstant(static_cast<int>(message));
6140 if (!arg0) {
6141 CallRuntime(Runtime::kThrowTypeError, context, template_index);
6142 } else if (!arg1) {
6143 CallRuntime(Runtime::kThrowTypeError, context, template_index, *arg0);
6144 } else if (!arg2) {
6145 CallRuntime(Runtime::kThrowTypeError, context, template_index, *arg0,
6146 *arg1);
6147 } else {
6148 CallRuntime(Runtime::kThrowTypeError, context, template_index, *arg0, *arg1,
6149 *arg2);
6150 }
6151 Unreachable();
6152}
6153
6154TNode<HeapObject> CodeStubAssembler::GetPendingMessage() {
6155 TNode<ExternalReference> pending_message = ExternalConstant(
6156 ExternalReference::address_of_pending_message(isolate()));
6157 return UncheckedCast<HeapObject>(LoadFullTagged(pending_message));
6158}
6159void CodeStubAssembler::SetPendingMessage(TNode<HeapObject> message) {
6160 CSA_DCHECK(this, Word32Or(IsTheHole(message),((void)0)
6161 InstanceTypeEqual(LoadInstanceType(message),((void)0)
6162 JS_MESSAGE_OBJECT_TYPE)))((void)0);
6163 TNode<ExternalReference> pending_message = ExternalConstant(
6164 ExternalReference::address_of_pending_message(isolate()));
6165 StoreFullTaggedNoWriteBarrier(pending_message, message);
6166}
6167
6168TNode<BoolT> CodeStubAssembler::InstanceTypeEqual(TNode<Int32T> instance_type,
6169 int type) {
6170 return Word32Equal(instance_type, Int32Constant(type));
6171}
6172
6173TNode<BoolT> CodeStubAssembler::IsDictionaryMap(TNode<Map> map) {
6174 return IsSetWord32<Map::Bits3::IsDictionaryMapBit>(LoadMapBitField3(map));
6175}
6176
6177TNode<BoolT> CodeStubAssembler::IsExtensibleMap(TNode<Map> map) {
6178 return IsSetWord32<Map::Bits3::IsExtensibleBit>(LoadMapBitField3(map));
6179}
6180
6181TNode<BoolT> CodeStubAssembler::IsExtensibleNonPrototypeMap(TNode<Map> map) {
6182 int kMask =
6183 Map::Bits3::IsExtensibleBit::kMask | Map::Bits3::IsPrototypeMapBit::kMask;
6184 int kExpected = Map::Bits3::IsExtensibleBit::kMask;
6185 return Word32Equal(Word32And(LoadMapBitField3(map), Int32Constant(kMask)),
6186 Int32Constant(kExpected));
6187}
6188
6189TNode<BoolT> CodeStubAssembler::IsCallableMap(TNode<Map> map) {
6190 return IsSetWord32<Map::Bits1::IsCallableBit>(LoadMapBitField(map));
6191}
6192
6193TNode<BoolT> CodeStubAssembler::IsDeprecatedMap(TNode<Map> map) {
6194 return IsSetWord32<Map::Bits3::IsDeprecatedBit>(LoadMapBitField3(map));
6195}
6196
6197TNode<BoolT> CodeStubAssembler::IsUndetectableMap(TNode<Map> map) {
6198 return IsSetWord32<Map::Bits1::IsUndetectableBit>(LoadMapBitField(map));
6199}
6200
6201TNode<BoolT> CodeStubAssembler::IsNoElementsProtectorCellInvalid() {
6202 TNode<Smi> invalid = SmiConstant(Protectors::kProtectorInvalid);
6203 TNode<PropertyCell> cell = NoElementsProtectorConstant();
6204 TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
6205 return TaggedEqual(cell_value, invalid);
6206}
6207
6208TNode<BoolT> CodeStubAssembler::IsMegaDOMProtectorCellInvalid() {
6209 TNode<Smi> invalid = SmiConstant(Protectors::kProtectorInvalid);
6210 TNode<PropertyCell> cell = MegaDOMProtectorConstant();
6211 TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
6212 return TaggedEqual(cell_value, invalid);
6213}
6214
6215TNode<BoolT> CodeStubAssembler::IsArrayIteratorProtectorCellInvalid() {
6216 TNode<Smi> invalid = SmiConstant(Protectors::kProtectorInvalid);
6217 TNode<PropertyCell> cell = ArrayIteratorProtectorConstant();
6218 TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
6219 return TaggedEqual(cell_value, invalid);
6220}
6221
6222TNode<BoolT> CodeStubAssembler::IsPromiseResolveProtectorCellInvalid() {
6223 TNode<Smi> invalid = SmiConstant(Protectors::kProtectorInvalid);
6224 TNode<PropertyCell> cell = PromiseResolveProtectorConstant();
6225 TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
6226 return TaggedEqual(cell_value, invalid);
6227}
6228
6229TNode<BoolT> CodeStubAssembler::IsPromiseThenProtectorCellInvalid() {
6230 TNode<Smi> invalid = SmiConstant(Protectors::kProtectorInvalid);
6231 TNode<PropertyCell> cell = PromiseThenProtectorConstant();
6232 TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
6233 return TaggedEqual(cell_value, invalid);
6234}
6235
6236TNode<BoolT> CodeStubAssembler::IsArraySpeciesProtectorCellInvalid() {
6237 TNode<Smi> invalid = SmiConstant(Protectors::kProtectorInvalid);
6238 TNode<PropertyCell> cell = ArraySpeciesProtectorConstant();
6239 TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
6240 return TaggedEqual(cell_value, invalid);
6241}
6242
6243TNode<BoolT> CodeStubAssembler::IsIsConcatSpreadableProtectorCellInvalid() {
6244 TNode<Smi> invalid = SmiConstant(Protectors::kProtectorInvalid);
6245 TNode<PropertyCell> cell = IsConcatSpreadableProtectorConstant();
6246 TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
6247 return TaggedEqual(cell_value, invalid);
6248}
6249
6250TNode<BoolT> CodeStubAssembler::IsTypedArraySpeciesProtectorCellInvalid() {
6251 TNode<Smi> invalid = SmiConstant(Protectors::kProtectorInvalid);
6252 TNode<PropertyCell> cell = TypedArraySpeciesProtectorConstant();
6253 TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
6254 return TaggedEqual(cell_value, invalid);
6255}
6256
6257TNode<BoolT> CodeStubAssembler::IsRegExpSpeciesProtectorCellInvalid() {
6258 TNode<Smi> invalid = SmiConstant(Protectors::kProtectorInvalid);
6259 TNode<PropertyCell> cell = RegExpSpeciesProtectorConstant();
6260 TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
6261 return TaggedEqual(cell_value, invalid);
6262}
6263
6264TNode<BoolT> CodeStubAssembler::IsPromiseSpeciesProtectorCellInvalid() {
6265 TNode<Smi> invalid = SmiConstant(Protectors::kProtectorInvalid);
6266 TNode<PropertyCell> cell = PromiseSpeciesProtectorConstant();
6267 TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
6268 return TaggedEqual(cell_value, invalid);
6269}
6270
6271TNode<BoolT> CodeStubAssembler::IsPrototypeInitialArrayPrototype(
6272 TNode<Context> context, TNode<Map> map) {
6273 const TNode<NativeContext> native_context = LoadNativeContext(context);
6274 const TNode<Object> initial_array_prototype = LoadContextElement(
6275 native_context, Context::INITIAL_ARRAY_PROTOTYPE_INDEX);
6276 TNode<HeapObject> proto = LoadMapPrototype(map);
6277 return TaggedEqual(proto, initial_array_prototype);
6278}
6279
6280TNode<BoolT> CodeStubAssembler::IsPrototypeTypedArrayPrototype(
6281 TNode<Context> context, TNode<Map> map) {
6282 const TNode<NativeContext> native_context = LoadNativeContext(context);
6283 const TNode<Object> typed_array_prototype =
6284 LoadContextElement(native_context, Context::TYPED_ARRAY_PROTOTYPE_INDEX);
6285 TNode<HeapObject> proto = LoadMapPrototype(map);
6286 TNode<HeapObject> proto_of_proto = Select<HeapObject>(
6287 IsJSObject(proto), [=] { return LoadMapPrototype(LoadMap(proto)); },
6288 [=] { return NullConstant(); });
6289 return TaggedEqual(proto_of_proto, typed_array_prototype);
6290}
6291
6292TNode<BoolT> CodeStubAssembler::IsFastAliasedArgumentsMap(
6293 TNode<Context> context, TNode<Map> map) {
6294 const TNode<NativeContext> native_context = LoadNativeContext(context);
6295 const TNode<Object> arguments_map = LoadContextElement(
6296 native_context, Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
6297 return TaggedEqual(arguments_map, map);
6298}
6299
6300TNode<BoolT> CodeStubAssembler::IsSlowAliasedArgumentsMap(
6301 TNode<Context> context, TNode<Map> map) {
6302 const TNode<NativeContext> native_context = LoadNativeContext(context);
6303 const TNode<Object> arguments_map = LoadContextElement(
6304 native_context, Context::SLOW_ALIASED_ARGUMENTS_MAP_INDEX);
6305 return TaggedEqual(arguments_map, map);
6306}
6307
6308TNode<BoolT> CodeStubAssembler::IsSloppyArgumentsMap(TNode<Context> context,
6309 TNode<Map> map) {
6310 const TNode<NativeContext> native_context = LoadNativeContext(context);
6311 const TNode<Object> arguments_map =
6312 LoadContextElement(native_context, Context::SLOPPY_ARGUMENTS_MAP_INDEX);
6313 return TaggedEqual(arguments_map, map);
6314}
6315
6316TNode<BoolT> CodeStubAssembler::IsStrictArgumentsMap(TNode<Context> context,
6317 TNode<Map> map) {
6318 const TNode<NativeContext> native_context = LoadNativeContext(context);
6319 const TNode<Object> arguments_map =
6320 LoadContextElement(native_context, Context::STRICT_ARGUMENTS_MAP_INDEX);
6321 return TaggedEqual(arguments_map, map);
6322}
6323
6324TNode<BoolT> CodeStubAssembler::TaggedIsCallable(TNode<Object> object) {
6325 return Select<BoolT>(
6326 TaggedIsSmi(object), [=] { return Int32FalseConstant(); },
6327 [=] {
6328 return IsCallableMap(LoadMap(UncheckedCast<HeapObject>(object)));
6329 });
6330}
6331
6332TNode<BoolT> CodeStubAssembler::IsCallable(TNode<HeapObject> object) {
6333 return IsCallableMap(LoadMap(object));
6334}
6335
6336TNode<BoolT> CodeStubAssembler::IsConstructorMap(TNode<Map> map) {
6337 return IsSetWord32<Map::Bits1::IsConstructorBit>(LoadMapBitField(map));
6338}
6339
6340TNode<BoolT> CodeStubAssembler::IsConstructor(TNode<HeapObject> object) {
6341 return IsConstructorMap(LoadMap(object));
6342}
6343
6344TNode<BoolT> CodeStubAssembler::IsFunctionWithPrototypeSlotMap(TNode<Map> map) {
6345 return IsSetWord32<Map::Bits1::HasPrototypeSlotBit>(LoadMapBitField(map));
6346}
6347
6348TNode<BoolT> CodeStubAssembler::IsSpecialReceiverInstanceType(
6349 TNode<Int32T> instance_type) {
6350 STATIC_ASSERT(JS_GLOBAL_OBJECT_TYPE <= LAST_SPECIAL_RECEIVER_TYPE)static_assert(JS_GLOBAL_OBJECT_TYPE <= LAST_SPECIAL_RECEIVER_TYPE
, "JS_GLOBAL_OBJECT_TYPE <= LAST_SPECIAL_RECEIVER_TYPE")
;
6351 return Int32LessThanOrEqual(instance_type,
6352 Int32Constant(LAST_SPECIAL_RECEIVER_TYPE));
6353}
6354
6355TNode<BoolT> CodeStubAssembler::IsCustomElementsReceiverInstanceType(
6356 TNode<Int32T> instance_type) {
6357 return Int32LessThanOrEqual(instance_type,
6358 Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER));
6359}
6360
6361TNode<BoolT> CodeStubAssembler::IsStringInstanceType(
6362 TNode<Int32T> instance_type) {
6363 STATIC_ASSERT(INTERNALIZED_STRING_TYPE == FIRST_TYPE)static_assert(INTERNALIZED_STRING_TYPE == FIRST_TYPE, "INTERNALIZED_STRING_TYPE == FIRST_TYPE"
)
;
6364 return Int32LessThan(instance_type, Int32Constant(FIRST_NONSTRING_TYPE));
6365}
6366
6367TNode<BoolT> CodeStubAssembler::IsTemporalInstantInstanceType(
6368 TNode<Int32T> instance_type) {
6369 return InstanceTypeEqual(instance_type, JS_TEMPORAL_INSTANT_TYPE);
6370}
6371
6372TNode<BoolT> CodeStubAssembler::IsOneByteStringInstanceType(
6373 TNode<Int32T> instance_type) {
6374 CSA_DCHECK(this, IsStringInstanceType(instance_type))((void)0);
6375 return Word32Equal(
6376 Word32And(instance_type, Int32Constant(kStringEncodingMask)),
6377 Int32Constant(kOneByteStringTag));
6378}
6379
6380TNode<BoolT> CodeStubAssembler::IsSequentialStringInstanceType(
6381 TNode<Int32T> instance_type) {
6382 CSA_DCHECK(this, IsStringInstanceType(instance_type))((void)0);
6383 return Word32Equal(
6384 Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
6385 Int32Constant(kSeqStringTag));
6386}
6387
6388TNode<BoolT> CodeStubAssembler::IsSeqOneByteStringInstanceType(
6389 TNode<Int32T> instance_type) {
6390 CSA_DCHECK(this, IsStringInstanceType(instance_type))((void)0);
6391 return Word32Equal(
6392 Word32And(instance_type,
6393 Int32Constant(kStringRepresentationAndEncodingMask)),
6394 Int32Constant(kSeqOneByteStringTag));
6395}
6396
6397TNode<BoolT> CodeStubAssembler::IsConsStringInstanceType(
6398 TNode<Int32T> instance_type) {
6399 CSA_DCHECK(this, IsStringInstanceType(instance_type))((void)0);
6400 return Word32Equal(
6401 Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
6402 Int32Constant(kConsStringTag));
6403}
6404
6405TNode<BoolT> CodeStubAssembler::IsIndirectStringInstanceType(
6406 TNode<Int32T> instance_type) {
6407 CSA_DCHECK(this, IsStringInstanceType(instance_type))((void)0);
6408 STATIC_ASSERT(kIsIndirectStringMask == 0x1)static_assert(kIsIndirectStringMask == 0x1, "kIsIndirectStringMask == 0x1"
)
;
6409 STATIC_ASSERT(kIsIndirectStringTag == 0x1)static_assert(kIsIndirectStringTag == 0x1, "kIsIndirectStringTag == 0x1"
)
;
6410 return UncheckedCast<BoolT>(
6411 Word32And(instance_type, Int32Constant(kIsIndirectStringMask)));
6412}
6413
6414TNode<BoolT> CodeStubAssembler::IsExternalStringInstanceType(
6415 TNode<Int32T> instance_type) {
6416 CSA_DCHECK(this, IsStringInstanceType(instance_type))((void)0);
6417 return Word32Equal(
6418 Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
6419 Int32Constant(kExternalStringTag));
6420}
6421
6422TNode<BoolT> CodeStubAssembler::IsUncachedExternalStringInstanceType(
6423 TNode<Int32T> instance_type) {
6424 CSA_DCHECK(this, IsStringInstanceType(instance_type))((void)0);
6425 STATIC_ASSERT(kUncachedExternalStringTag != 0)static_assert(kUncachedExternalStringTag != 0, "kUncachedExternalStringTag != 0"
)
;
6426 return IsSetWord32(instance_type, kUncachedExternalStringMask);
6427}
6428
6429TNode<BoolT> CodeStubAssembler::IsJSReceiverInstanceType(
6430 TNode<Int32T> instance_type) {
6431 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE)static_assert(LAST_JS_RECEIVER_TYPE == LAST_TYPE, "LAST_JS_RECEIVER_TYPE == LAST_TYPE"
)
;
6432 return Int32GreaterThanOrEqual(instance_type,
6433 Int32Constant(FIRST_JS_RECEIVER_TYPE));
6434}
6435
6436TNode<BoolT> CodeStubAssembler::IsJSReceiverMap(TNode<Map> map) {
6437 return IsJSReceiverInstanceType(LoadMapInstanceType(map));
6438}
6439
6440TNode<BoolT> CodeStubAssembler::IsJSReceiver(TNode<HeapObject> object) {
6441 return IsJSReceiverMap(LoadMap(object));
6442}
6443
6444TNode<BoolT> CodeStubAssembler::IsNullOrJSReceiver(TNode<HeapObject> object) {
6445 return UncheckedCast<BoolT>(Word32Or(IsJSReceiver(object), IsNull(object)));
6446}
6447
6448TNode<BoolT> CodeStubAssembler::IsNullOrUndefined(TNode<Object> value) {
6449 return UncheckedCast<BoolT>(Word32Or(IsUndefined(value), IsNull(value)));
6450}
6451
6452TNode<BoolT> CodeStubAssembler::IsJSGlobalProxyInstanceType(
6453 TNode<Int32T> instance_type) {
6454 return InstanceTypeEqual(instance_type, JS_GLOBAL_PROXY_TYPE);
6455}
6456
6457TNode<BoolT> CodeStubAssembler::IsJSGlobalProxyMap(TNode<Map> map) {
6458 return IsJSGlobalProxyInstanceType(LoadMapInstanceType(map));
6459}
6460
6461TNode<BoolT> CodeStubAssembler::IsJSGlobalProxy(TNode<HeapObject> object) {
6462 return IsJSGlobalProxyMap(LoadMap(object));
6463}
6464
6465TNode<BoolT> CodeStubAssembler::IsJSGeneratorMap(TNode<Map> map) {
6466 return InstanceTypeEqual(LoadMapInstanceType(map), JS_GENERATOR_OBJECT_TYPE);
6467}
6468
6469TNode<BoolT> CodeStubAssembler::IsJSObjectInstanceType(
6470 TNode<Int32T> instance_type) {
6471 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE)static_assert(LAST_JS_OBJECT_TYPE == LAST_TYPE, "LAST_JS_OBJECT_TYPE == LAST_TYPE"
)
;
6472 return Int32GreaterThanOrEqual(instance_type,
6473 Int32Constant(FIRST_JS_OBJECT_TYPE));
6474}
6475
6476TNode<BoolT> CodeStubAssembler::IsJSApiObjectInstanceType(
6477 TNode<Int32T> instance_type) {
6478 return InstanceTypeEqual(instance_type, JS_API_OBJECT_TYPE);
6479}
6480
6481TNode<BoolT> CodeStubAssembler::IsJSObjectMap(TNode<Map> map) {
6482 return IsJSObjectInstanceType(LoadMapInstanceType(map));
6483}
6484
6485TNode<BoolT> CodeStubAssembler::IsJSApiObjectMap(TNode<Map> map) {
6486 return IsJSApiObjectInstanceType(LoadMapInstanceType(map));
6487}
6488
6489TNode<BoolT> CodeStubAssembler::IsJSObject(TNode<HeapObject> object) {
6490 return IsJSObjectMap(LoadMap(object));
6491}
6492
6493TNode<BoolT> CodeStubAssembler::IsJSApiObject(TNode<HeapObject> object) {
6494 return IsJSApiObjectMap(LoadMap(object));
6495}
6496
6497TNode<BoolT> CodeStubAssembler::IsJSFinalizationRegistryMap(TNode<Map> map) {
6498 return InstanceTypeEqual(LoadMapInstanceType(map),
6499 JS_FINALIZATION_REGISTRY_TYPE);
6500}
6501
6502TNode<BoolT> CodeStubAssembler::IsJSFinalizationRegistry(
6503 TNode<HeapObject> object) {
6504 return IsJSFinalizationRegistryMap(LoadMap(object));
6505}
6506
6507TNode<BoolT> CodeStubAssembler::IsJSPromiseMap(TNode<Map> map) {
6508 return InstanceTypeEqual(LoadMapInstanceType(map), JS_PROMISE_TYPE);
6509}
6510
6511TNode<BoolT> CodeStubAssembler::IsJSPromise(TNode<HeapObject> object) {
6512 return IsJSPromiseMap(LoadMap(object));
6513}
6514
6515TNode<BoolT> CodeStubAssembler::IsJSProxy(TNode<HeapObject> object) {
6516 return HasInstanceType(object, JS_PROXY_TYPE);
6517}
6518
6519TNode<BoolT> CodeStubAssembler::IsJSStringIterator(TNode<HeapObject> object) {
6520 return HasInstanceType(object, JS_STRING_ITERATOR_TYPE);
6521}
6522
6523TNode<BoolT> CodeStubAssembler::IsJSRegExpStringIterator(
6524 TNode<HeapObject> object) {
6525 return HasInstanceType(object, JS_REG_EXP_STRING_ITERATOR_TYPE);
6526}
6527
6528TNode<BoolT> CodeStubAssembler::IsMap(TNode<HeapObject> map) {
6529 return IsMetaMap(LoadMap(map));
6530}
6531
6532TNode<BoolT> CodeStubAssembler::IsJSPrimitiveWrapperInstanceType(
6533 TNode<Int32T> instance_type) {
6534 return InstanceTypeEqual(instance_type, JS_PRIMITIVE_WRAPPER_TYPE);
6535}
6536
6537TNode<BoolT> CodeStubAssembler::IsJSPrimitiveWrapper(TNode<HeapObject> object) {
6538 return IsJSPrimitiveWrapperMap(LoadMap(object));
6539}
6540
6541TNode<BoolT> CodeStubAssembler::IsJSPrimitiveWrapperMap(TNode<Map> map) {
6542 return IsJSPrimitiveWrapperInstanceType(LoadMapInstanceType(map));
6543}
6544
6545TNode<BoolT> CodeStubAssembler::IsJSWrappedFunction(TNode<HeapObject> object) {
6546 return HasInstanceType(object, JS_WRAPPED_FUNCTION_TYPE);
6547}
6548
6549TNode<BoolT> CodeStubAssembler::IsJSArrayInstanceType(
6550 TNode<Int32T> instance_type) {
6551 return InstanceTypeEqual(instance_type, JS_ARRAY_TYPE);
6552}
6553
6554TNode<BoolT> CodeStubAssembler::IsJSArray(TNode<HeapObject> object) {
6555 return IsJSArrayMap(LoadMap(object));
6556}
6557
6558TNode<BoolT> CodeStubAssembler::IsJSArrayMap(TNode<Map> map) {
6559 return IsJSArrayInstanceType(LoadMapInstanceType(map));
6560}
6561
6562TNode<BoolT> CodeStubAssembler::IsJSArrayIterator(TNode<HeapObject> object) {
6563 return HasInstanceType(object, JS_ARRAY_ITERATOR_TYPE);
6564}
6565
6566TNode<BoolT> CodeStubAssembler::IsJSSharedStructInstanceType(
6567 TNode<Int32T> instance_type) {
6568 return InstanceTypeEqual(instance_type, JS_SHARED_STRUCT_TYPE);
6569}
6570
6571TNode<BoolT> CodeStubAssembler::IsJSSharedStructMap(TNode<Map> map) {
6572 return IsJSSharedStructInstanceType(LoadMapInstanceType(map));
6573}
6574
6575TNode<BoolT> CodeStubAssembler::IsJSSharedStruct(TNode<HeapObject> object) {
6576 return IsJSSharedStructMap(LoadMap(object));
6577}
6578
6579TNode<BoolT> CodeStubAssembler::IsJSSharedStruct(TNode<Object> object) {
6580 return Select<BoolT>(
6581 TaggedIsSmi(object), [=] { return Int32FalseConstant(); },
6582 [=] {
6583 TNode<HeapObject> heap_object = CAST(object)Cast(object);
6584 return IsJSSharedStruct(heap_object);
6585 });
6586}
6587
6588TNode<BoolT> CodeStubAssembler::IsJSAsyncGeneratorObject(
6589 TNode<HeapObject> object) {
6590 return HasInstanceType(object, JS_ASYNC_GENERATOR_OBJECT_TYPE);
6591}
6592
6593TNode<BoolT> CodeStubAssembler::IsFixedArray(TNode<HeapObject> object) {
6594 return HasInstanceType(object, FIXED_ARRAY_TYPE);
6595}
6596
6597TNode<BoolT> CodeStubAssembler::IsFixedArraySubclass(TNode<HeapObject> object) {
6598 TNode<Uint16T> instance_type = LoadInstanceType(object);
6599 return UncheckedCast<BoolT>(
6600 Word32And(Int32GreaterThanOrEqual(instance_type,
6601 Int32Constant(FIRST_FIXED_ARRAY_TYPE)),
6602 Int32LessThanOrEqual(instance_type,
6603 Int32Constant(LAST_FIXED_ARRAY_TYPE))));
6604}
6605
6606TNode<BoolT> CodeStubAssembler::IsNotWeakFixedArraySubclass(
6607 TNode<HeapObject> object) {
6608 TNode<Uint16T> instance_type = LoadInstanceType(object);
6609 return UncheckedCast<BoolT>(Word32Or(
6610 Int32LessThan(instance_type, Int32Constant(FIRST_WEAK_FIXED_ARRAY_TYPE)),
6611 Int32GreaterThan(instance_type,
6612 Int32Constant(LAST_WEAK_FIXED_ARRAY_TYPE))));
6613}
6614
6615TNode<BoolT> CodeStubAssembler::IsPropertyArray(TNode<HeapObject> object) {
6616 return HasInstanceType(object, PROPERTY_ARRAY_TYPE);
6617}
6618
6619TNode<BoolT> CodeStubAssembler::IsPromiseReactionJobTask(
6620 TNode<HeapObject> object) {
6621 TNode<Uint16T> instance_type = LoadInstanceType(object);
6622 return IsInRange(instance_type, FIRST_PROMISE_REACTION_JOB_TASK_TYPE,
6623 LAST_PROMISE_REACTION_JOB_TASK_TYPE);
6624}
6625
6626// This complicated check is due to elements oddities. If a smi array is empty
6627// after Array.p.shift, it is replaced by the empty array constant. If it is
6628// later filled with a double element, we try to grow it but pass in a double
6629// elements kind. Usually this would cause a size mismatch (since the source
6630// fixed array has HOLEY_ELEMENTS and destination has
6631// HOLEY_DOUBLE_ELEMENTS), but we don't have to worry about it when the
6632// source array is empty.
6633// TODO(jgruber): It might we worth creating an empty_double_array constant to
6634// simplify this case.
6635TNode<BoolT> CodeStubAssembler::IsFixedArrayWithKindOrEmpty(
6636 TNode<FixedArrayBase> object, ElementsKind kind) {
6637 Label out(this);
6638 TVARIABLE(BoolT, var_result, Int32TrueConstant())TVariable<BoolT> var_result(Int32TrueConstant(), this);
6639
6640 GotoIf(IsFixedArrayWithKind(object, kind), &out);
6641
6642 const TNode<Smi> length = LoadFixedArrayBaseLength(object);
6643 GotoIf(SmiEqual(length, SmiConstant(0)), &out);
6644
6645 var_result = Int32FalseConstant();
6646 Goto(&out);
6647
6648 BIND(&out)Bind(&out);
6649 return var_result.value();
6650}
6651
6652TNode<BoolT> CodeStubAssembler::IsFixedArrayWithKind(TNode<HeapObject> object,
6653 ElementsKind kind) {
6654 if (IsDoubleElementsKind(kind)) {
6655 return IsFixedDoubleArray(object);
6656 } else {
6657 DCHECK(IsSmiOrObjectElementsKind(kind) || IsSealedElementsKind(kind) ||((void) 0)
6658 IsNonextensibleElementsKind(kind))((void) 0);
6659 return IsFixedArraySubclass(object);
6660 }
6661}
6662
6663TNode<BoolT> CodeStubAssembler::IsBoolean(TNode<HeapObject> object) {
6664 return IsBooleanMap(LoadMap(object));
6665}
6666
6667TNode<BoolT> CodeStubAssembler::IsPropertyCell(TNode<HeapObject> object) {
6668 return IsPropertyCellMap(LoadMap(object));
6669}
6670
6671TNode<BoolT> CodeStubAssembler::IsHeapNumberInstanceType(
6672 TNode<Int32T> instance_type) {
6673 return InstanceTypeEqual(instance_type, HEAP_NUMBER_TYPE);
6674}
6675
6676TNode<BoolT> CodeStubAssembler::IsOddball(TNode<HeapObject> object) {
6677 return IsOddballInstanceType(LoadInstanceType(object));
6678}
6679
6680TNode<BoolT> CodeStubAssembler::IsOddballInstanceType(
6681 TNode<Int32T> instance_type) {
6682 return InstanceTypeEqual(instance_type, ODDBALL_TYPE);
6683}
6684
6685TNode<BoolT> CodeStubAssembler::IsName(TNode<HeapObject> object) {
6686 return IsNameInstanceType(LoadInstanceType(object));
6687}
6688
6689TNode<BoolT> CodeStubAssembler::IsNameInstanceType(
6690 TNode<Int32T> instance_type) {
6691 return Int32LessThanOrEqual(instance_type, Int32Constant(LAST_NAME_TYPE));
6692}
6693
6694TNode<BoolT> CodeStubAssembler::IsString(TNode<HeapObject> object) {
6695 return IsStringInstanceType(LoadInstanceType(object));
6696}
6697
6698TNode<BoolT> CodeStubAssembler::IsSeqOneByteString(TNode<HeapObject> object) {
6699 return IsSeqOneByteStringInstanceType(LoadInstanceType(object));
6700}
6701
6702TNode<BoolT> CodeStubAssembler::IsSymbolInstanceType(
6703 TNode<Int32T> instance_type) {
6704 return InstanceTypeEqual(instance_type, SYMBOL_TYPE);
6705}
6706
6707TNode<BoolT> CodeStubAssembler::IsInternalizedStringInstanceType(
6708 TNode<Int32T> instance_type) {
6709 STATIC_ASSERT(kNotInternalizedTag != 0)static_assert(kNotInternalizedTag != 0, "kNotInternalizedTag != 0"
)
;
6710 return Word32Equal(
6711 Word32And(instance_type,
6712 Int32Constant(kIsNotStringMask | kIsNotInternalizedMask)),
6713 Int32Constant(kStringTag | kInternalizedTag));
6714}
6715
6716TNode<BoolT> CodeStubAssembler::IsSharedStringInstanceType(
6717 TNode<Int32T> instance_type) {
6718 TNode<BoolT> is_shared = Word32Equal(
6719 Word32And(instance_type,
6720 Int32Constant(kIsNotStringMask | kSharedStringMask)),
6721 Int32Constant(kStringTag | kSharedStringTag));
6722 // TODO(v8:12007): Internalized strings do not have kSharedStringTag until
6723 // the shared string table ships.
6724 return Word32Or(is_shared,
6725 Word32And(HasSharedStringTableFlag(),
6726 IsInternalizedStringInstanceType(instance_type)));
6727}
6728
6729TNode<BoolT> CodeStubAssembler::IsUniqueName(TNode<HeapObject> object) {
6730 TNode<Uint16T> instance_type = LoadInstanceType(object);
6731 return Select<BoolT>(
6732 IsInternalizedStringInstanceType(instance_type),
6733 [=] { return Int32TrueConstant(); },
6734 [=] { return IsSymbolInstanceType(instance_type); });
6735}
6736
6737// Semantics: guaranteed not to be an integer index (i.e. contains non-digit
6738// characters, or is outside MAX_SAFE_INTEGER/size_t range). Note that for
6739// non-TypedArray receivers, there are additional strings that must be treated
6740// as named property keys, namely the range [0xFFFFFFFF, MAX_SAFE_INTEGER].
6741TNode<BoolT> CodeStubAssembler::IsUniqueNameNoIndex(TNode<HeapObject> object) {
6742 TNode<Uint16T> instance_type = LoadInstanceType(object);
6743 return Select<BoolT>(
6744 IsInternalizedStringInstanceType(instance_type),
6745 [=] {
6746 return IsNotEqualInWord32<Name::HashFieldTypeBits>(
6747 LoadNameRawHashField(CAST(object)Cast(object)),
6748 Name::HashFieldType::kIntegerIndex);
6749 },
6750 [=] { return IsSymbolInstanceType(instance_type); });
6751}
6752
6753// Semantics: {object} is a Symbol, or a String that doesn't have a cached
6754// index. This returns {true} for strings containing representations of
6755// integers in the range above 9999999 (per kMaxCachedArrayIndexLength)
6756// and below MAX_SAFE_INTEGER. For CSA_DCHECKs ensuring correct usage, this is
6757// better than no checking; and we don't have a good/fast way to accurately
6758// check such strings for being within "array index" (uint32_t) range.
6759TNode<BoolT> CodeStubAssembler::IsUniqueNameNoCachedIndex(
6760 TNode<HeapObject> object) {
6761 TNode<Uint16T> instance_type = LoadInstanceType(object);
6762 return Select<BoolT>(
6763 IsInternalizedStringInstanceType(instance_type),
6764 [=] {
6765 return IsSetWord32(LoadNameRawHashField(CAST(object)Cast(object)),
6766 Name::kDoesNotContainCachedArrayIndexMask);
6767 },
6768 [=] { return IsSymbolInstanceType(instance_type); });
6769}
6770
6771TNode<BoolT> CodeStubAssembler::IsBigIntInstanceType(
6772 TNode<Int32T> instance_type) {
6773 return InstanceTypeEqual(instance_type, BIGINT_TYPE);
6774}
6775
6776TNode<BoolT> CodeStubAssembler::IsBigInt(TNode<HeapObject> object) {
6777 return IsBigIntInstanceType(LoadInstanceType(object));
6778}
6779
6780TNode<BoolT> CodeStubAssembler::IsPrimitiveInstanceType(
6781 TNode<Int32T> instance_type) {
6782 return Int32LessThanOrEqual(instance_type,
6783 Int32Constant(LAST_PRIMITIVE_HEAP_OBJECT_TYPE));
6784}
6785
6786TNode<BoolT> CodeStubAssembler::IsPrivateName(TNode<Symbol> symbol) {
6787 TNode<Uint32T> flags = LoadObjectField<Uint32T>(symbol, Symbol::kFlagsOffset);
6788 return IsSetWord32<Symbol::IsPrivateNameBit>(flags);
6789}
6790
6791TNode<BoolT> CodeStubAssembler::IsHashTable(TNode<HeapObject> object) {
6792 TNode<Uint16T> instance_type = LoadInstanceType(object);
6793 return UncheckedCast<BoolT>(
6794 Word32And(Int32GreaterThanOrEqual(instance_type,
6795 Int32Constant(FIRST_HASH_TABLE_TYPE)),
6796 Int32LessThanOrEqual(instance_type,
6797 Int32Constant(LAST_HASH_TABLE_TYPE))));
6798}
6799
6800TNode<BoolT> CodeStubAssembler::IsEphemeronHashTable(TNode<HeapObject> object) {
6801 return HasInstanceType(object, EPHEMERON_HASH_TABLE_TYPE);
6802}
6803
6804TNode<BoolT> CodeStubAssembler::IsNameDictionary(TNode<HeapObject> object) {
6805 return HasInstanceType(object, NAME_DICTIONARY_TYPE);
6806}
6807TNode<BoolT> CodeStubAssembler::IsOrderedNameDictionary(
6808 TNode<HeapObject> object) {
6809 return HasInstanceType(object, ORDERED_NAME_DICTIONARY_TYPE);
6810}
6811
6812TNode<BoolT> CodeStubAssembler::IsSwissNameDictionary(
6813 TNode<HeapObject> object) {
6814 return HasInstanceType(object, SWISS_NAME_DICTIONARY_TYPE);
6815}
6816
6817TNode<BoolT> CodeStubAssembler::IsGlobalDictionary(TNode<HeapObject> object) {
6818 return HasInstanceType(object, GLOBAL_DICTIONARY_TYPE);
6819}
6820
6821TNode<BoolT> CodeStubAssembler::IsNumberDictionary(TNode<HeapObject> object) {
6822 return HasInstanceType(object, NUMBER_DICTIONARY_TYPE);
6823}
6824
6825TNode<BoolT> CodeStubAssembler::IsJSGeneratorObject(TNode<HeapObject> object) {
6826 return HasInstanceType(object, JS_GENERATOR_OBJECT_TYPE);
6827}
6828
6829TNode<BoolT> CodeStubAssembler::IsFunctionInstanceType(
6830 TNode<Int32T> instance_type) {
6831 return IsInRange(instance_type,
6832 FIRST_JS_FUNCTION_OR_BOUND_FUNCTION_OR_WRAPPED_FUNCTION_TYPE,
6833 LAST_JS_FUNCTION_OR_BOUND_FUNCTION_OR_WRAPPED_FUNCTION_TYPE);
6834}
6835TNode<BoolT> CodeStubAssembler::IsJSFunctionInstanceType(
6836 TNode<Int32T> instance_type) {
6837 return IsInRange(instance_type, FIRST_JS_FUNCTION_TYPE,
6838 LAST_JS_FUNCTION_TYPE);
6839}
6840
6841TNode<BoolT> CodeStubAssembler::IsJSFunction(TNode<HeapObject> object) {
6842 return IsJSFunctionMap(LoadMap(object));
6843}
6844
6845TNode<BoolT> CodeStubAssembler::IsJSBoundFunction(TNode<HeapObject> object) {
6846 return HasInstanceType(object, JS_BOUND_FUNCTION_TYPE);
6847}
6848
6849TNode<BoolT> CodeStubAssembler::IsJSFunctionMap(TNode<Map> map) {
6850 return IsJSFunctionInstanceType(LoadMapInstanceType(map));
6851}
6852
6853TNode<BoolT> CodeStubAssembler::IsJSTypedArrayInstanceType(
6854 TNode<Int32T> instance_type) {
6855 return InstanceTypeEqual(instance_type, JS_TYPED_ARRAY_TYPE);
6856}
6857
6858TNode<BoolT> CodeStubAssembler::IsJSTypedArrayMap(TNode<Map> map) {
6859 return IsJSTypedArrayInstanceType(LoadMapInstanceType(map));
6860}
6861
6862TNode<BoolT> CodeStubAssembler::IsJSTypedArray(TNode<HeapObject> object) {
6863 return IsJSTypedArrayMap(LoadMap(object));
6864}
6865
6866TNode<BoolT> CodeStubAssembler::IsJSArrayBuffer(TNode<HeapObject> object) {
6867 return HasInstanceType(object, JS_ARRAY_BUFFER_TYPE);
6868}
6869
6870TNode<BoolT> CodeStubAssembler::IsJSDataView(TNode<HeapObject> object) {
6871 return HasInstanceType(object, JS_DATA_VIEW_TYPE);
6872}
6873
6874TNode<BoolT> CodeStubAssembler::IsJSRegExp(TNode<HeapObject> object) {
6875 return HasInstanceType(object, JS_REG_EXP_TYPE);
6876}
6877
6878TNode<BoolT> CodeStubAssembler::IsNumeric(TNode<Object> object) {
6879 return Select<BoolT>(
6880 TaggedIsSmi(object), [=] { return Int32TrueConstant(); },
6881 [=] {
6882 return UncheckedCast<BoolT>(
6883 Word32Or(IsHeapNumber(CAST(object)Cast(object)), IsBigInt(CAST(object)Cast(object))));
6884 });
6885}
6886
6887TNode<BoolT> CodeStubAssembler::IsNumberNormalized(TNode<Number> number) {
6888 TVARIABLE(BoolT, var_result, Int32TrueConstant())TVariable<BoolT> var_result(Int32TrueConstant(), this);
6889 Label out(this);
6890
6891 GotoIf(TaggedIsSmi(number), &out);
6892
6893 TNode<Float64T> value = LoadHeapNumberValue(CAST(number)Cast(number));
6894 TNode<Float64T> smi_min =
6895 Float64Constant(static_cast<double>(Smi::kMinValue));
6896 TNode<Float64T> smi_max =
6897 Float64Constant(static_cast<double>(Smi::kMaxValue));
6898
6899 GotoIf(Float64LessThan(value, smi_min), &out);
6900 GotoIf(Float64GreaterThan(value, smi_max), &out);
6901 GotoIfNot(Float64Equal(value, value), &out); // NaN.
6902
6903 var_result = Int32FalseConstant();
6904 Goto(&out);
6905
6906 BIND(&out)Bind(&out);
6907 return var_result.value();
6908}
6909
6910TNode<BoolT> CodeStubAssembler::IsNumberPositive(TNode<Number> number) {
6911 return Select<BoolT>(
6912 TaggedIsSmi(number), [=] { return TaggedIsPositiveSmi(number); },
6913 [=] { return IsHeapNumberPositive(CAST(number)Cast(number)); });
6914}
6915
6916// TODO(cbruni): Use TNode<HeapNumber> instead of custom name.
6917TNode<BoolT> CodeStubAssembler::IsHeapNumberPositive(TNode<HeapNumber> number) {
6918 TNode<Float64T> value = LoadHeapNumberValue(number);
6919 TNode<Float64T> float_zero = Float64Constant(0.);
6920 return Float64GreaterThanOrEqual(value, float_zero);
6921}
6922
6923TNode<BoolT> CodeStubAssembler::IsNumberNonNegativeSafeInteger(
6924 TNode<Number> number) {
6925 return Select<BoolT>(
6926 // TODO(cbruni): Introduce TaggedIsNonNegateSmi to avoid confusion.
6927 TaggedIsSmi(number), [=] { return TaggedIsPositiveSmi(number); },
6928 [=] {
6929 TNode<HeapNumber> heap_number = CAST(number)Cast(number);
6930 return Select<BoolT>(
6931 IsInteger(heap_number),
6932 [=] { return IsHeapNumberPositive(heap_number); },
6933 [=] { return Int32FalseConstant(); });
6934 });
6935}
6936
6937TNode<BoolT> CodeStubAssembler::IsSafeInteger(TNode<Object> number) {
6938 return Select<BoolT>(
6939 TaggedIsSmi(number), [=] { return Int32TrueConstant(); },
6940 [=] {
6941 return Select<BoolT>(
6942 IsHeapNumber(CAST(number)Cast(number)),
6943 [=] { return IsSafeInteger(UncheckedCast<HeapNumber>(number)); },
6944 [=] { return Int32FalseConstant(); });
6945 });
6946}
6947
6948TNode<BoolT> CodeStubAssembler::IsSafeInteger(TNode<HeapNumber> number) {
6949 // Load the actual value of {number}.
6950 TNode<Float64T> number_value = LoadHeapNumberValue(number);
6951 // Truncate the value of {number} to an integer (or an infinity).
6952 TNode<Float64T> integer = Float64Trunc(number_value);
6953
6954 return Select<BoolT>(
6955 // Check if {number}s value matches the integer (ruling out the
6956 // infinities).
6957 Float64Equal(Float64Sub(number_value, integer), Float64Constant(0.0)),
6958 [=] {
6959 // Check if the {integer} value is in safe integer range.
6960 return Float64LessThanOrEqual(Float64Abs(integer),
6961 Float64Constant(kMaxSafeInteger));
6962 },
6963 [=] { return Int32FalseConstant(); });
6964}
6965
6966TNode<BoolT> CodeStubAssembler::IsInteger(TNode<Object> number) {
6967 return Select<BoolT>(
6968 TaggedIsSmi(number), [=] { return Int32TrueConstant(); },
6969 [=] {
6970 return Select<BoolT>(
6971 IsHeapNumber(CAST(number)Cast(number)),
6972 [=] { return IsInteger(UncheckedCast<HeapNumber>(number)); },
6973 [=] { return Int32FalseConstant(); });
6974 });
6975}
6976
6977TNode<BoolT> CodeStubAssembler::IsInteger(TNode<HeapNumber> number) {
6978 TNode<Float64T> number_value = LoadHeapNumberValue(number);
6979 // Truncate the value of {number} to an integer (or an infinity).
6980 TNode<Float64T> integer = Float64Trunc(number_value);
6981 // Check if {number}s value matches the integer (ruling out the infinities).
6982 return Float64Equal(Float64Sub(number_value, integer), Float64Constant(0.0));
6983}
6984
6985TNode<BoolT> CodeStubAssembler::IsHeapNumberUint32(TNode<HeapNumber> number) {
6986 // Check that the HeapNumber is a valid uint32
6987 return Select<BoolT>(
6988 IsHeapNumberPositive(number),
6989 [=] {
6990 TNode<Float64T> value = LoadHeapNumberValue(number);
6991 TNode<Uint32T> int_value = TruncateFloat64ToWord32(value);
6992 return Float64Equal(value, ChangeUint32ToFloat64(int_value));
6993 },
6994 [=] { return Int32FalseConstant(); });
6995}
6996
6997TNode<BoolT> CodeStubAssembler::IsNumberArrayIndex(TNode<Number> number) {
6998 return Select<BoolT>(
6999 TaggedIsSmi(number), [=] { return TaggedIsPositiveSmi(number); },
7000 [=] { return IsHeapNumberUint32(CAST(number)Cast(number)); });
7001}
7002
7003TNode<IntPtrT> CodeStubAssembler::LoadBasicMemoryChunkFlags(
7004 TNode<HeapObject> object) {
7005 TNode<IntPtrT> object_word = BitcastTaggedToWord(object);
7006 TNode<IntPtrT> page = PageFromAddress(object_word);
7007 return UncheckedCast<IntPtrT>(
7008 Load(MachineType::Pointer(), page,
7009 IntPtrConstant(BasicMemoryChunk::kFlagsOffset)));
7010}
7011
7012template <typename TIndex>
7013TNode<BoolT> CodeStubAssembler::FixedArraySizeDoesntFitInNewSpace(
7014 TNode<TIndex> element_count, int base_size) {
7015 static_assert(
7016 std::is_same<TIndex, Smi>::value || std::is_same<TIndex, IntPtrT>::value,
7017 "Only Smi or IntPtrT element_count is allowed");
7018 int max_newspace_elements =
7019 (kMaxRegularHeapObjectSize - base_size) / kTaggedSize;
7020 return IntPtrOrSmiGreaterThan(
7021 element_count, IntPtrOrSmiConstant<TIndex>(max_newspace_elements));
7022}
7023
7024TNode<Uint16T> CodeStubAssembler::StringCharCodeAt(TNode<String> string,
7025 TNode<UintPtrT> index) {
7026 CSA_DCHECK(this, UintPtrLessThan(index, LoadStringLengthAsWord(string)))((void)0);
7027
7028 TVARIABLE(Uint16T, var_result)TVariable<Uint16T> var_result(this);
7029
7030 Label return_result(this), if_runtime(this, Label::kDeferred),
7031 if_stringistwobyte(this), if_stringisonebyte(this);
7032
7033 ToDirectStringAssembler to_direct(state(), string);
7034 to_direct.TryToDirect(&if_runtime);
7035 const TNode<UintPtrT> offset =
7036 UintPtrAdd(index, Unsigned(to_direct.offset()));
7037 const TNode<Int32T> instance_type = to_direct.instance_type();
7038 const TNode<RawPtrT> string_data = to_direct.PointerToData(&if_runtime);
7039
7040 // Check if the {string} is a TwoByteSeqString or a OneByteSeqString.
7041 Branch(IsOneByteStringInstanceType(instance_type), &if_stringisonebyte,
7042 &if_stringistwobyte);
7043
7044 BIND(&if_stringisonebyte)Bind(&if_stringisonebyte);
7045 {
7046 var_result = Load<Uint8T>(string_data, offset);
7047 Goto(&return_result);
7048 }
7049
7050 BIND(&if_stringistwobyte)Bind(&if_stringistwobyte);
7051 {
7052 var_result = Load<Uint16T>(string_data, WordShl(offset, IntPtrConstant(1)));
7053 Goto(&return_result);
7054 }
7055
7056 BIND(&if_runtime)Bind(&if_runtime);
7057 {
7058 TNode<Object> result =
7059 CallRuntime(Runtime::kStringCharCodeAt, NoContextConstant(), string,
7060 ChangeUintPtrToTagged(index));
7061 var_result = UncheckedCast<Uint16T>(SmiToInt32(CAST(result)Cast(result)));
7062 Goto(&return_result);
7063 }
7064
7065 BIND(&return_result)Bind(&return_result);
7066 return var_result.value();
7067}
7068
7069TNode<String> CodeStubAssembler::StringFromSingleCharCode(TNode<Int32T> code) {
7070 TVARIABLE(String, var_result)TVariable<String> var_result(this);
7071
7072 // Check if the {code} is a one-byte char code.
7073 Label if_codeisonebyte(this), if_codeistwobyte(this, Label::kDeferred),
7074 if_done(this);
7075 Branch(Int32LessThanOrEqual(code, Int32Constant(String::kMaxOneByteCharCode)),
7076 &if_codeisonebyte, &if_codeistwobyte);
7077 BIND(&if_codeisonebyte)Bind(&if_codeisonebyte);
7078 {
7079 // Load the isolate wide single character string cache.
7080 TNode<FixedArray> cache = SingleCharacterStringCacheConstant();
7081 TNode<IntPtrT> code_index = Signed(ChangeUint32ToWord(code));
7082
7083 // Check if we have an entry for the {code} in the single character string
7084 // cache already.
7085 Label if_entryisundefined(this, Label::kDeferred),
7086 if_entryisnotundefined(this);
7087 TNode<Object> entry = UnsafeLoadFixedArrayElement(cache, code_index);
7088 Branch(IsUndefined(entry), &if_entryisundefined, &if_entryisnotundefined);
7089
7090 BIND(&if_entryisundefined)Bind(&if_entryisundefined);
7091 {
7092 // Allocate a new SeqOneByteString for {code} and store it in the {cache}.
7093 TNode<String> result = AllocateSeqOneByteString(1);
7094 StoreNoWriteBarrier(
7095 MachineRepresentation::kWord8, result,
7096 IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag), code);
7097 StoreFixedArrayElement(cache, code_index, result);
7098 var_result = result;
7099 Goto(&if_done);
7100 }
7101
7102 BIND(&if_entryisnotundefined)Bind(&if_entryisnotundefined);
7103 {
7104 // Return the entry from the {cache}.
7105 var_result = CAST(entry)Cast(entry);
7106 Goto(&if_done);
7107 }
7108 }
7109
7110 BIND(&if_codeistwobyte)Bind(&if_codeistwobyte);
7111 {
7112 // Allocate a new SeqTwoByteString for {code}.
7113 TNode<String> result = AllocateSeqTwoByteString(1);
7114 StoreNoWriteBarrier(
7115 MachineRepresentation::kWord16, result,
7116 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), code);
7117 var_result = result;
7118 Goto(&if_done);
7119 }
7120
7121 BIND(&if_done)Bind(&if_done);
7122 return var_result.value();
7123}
7124
7125ToDirectStringAssembler::ToDirectStringAssembler(
7126 compiler::CodeAssemblerState* state, TNode<String> string, Flags flags)
7127 : CodeStubAssembler(state),
7128 var_string_(string, this),
7129 var_instance_type_(LoadInstanceType(string), this),
7130 var_offset_(IntPtrConstant(0), this),
7131 var_is_external_(Int32Constant(0), this),
7132 flags_(flags) {}
7133
7134TNode<String> ToDirectStringAssembler::TryToDirect(Label* if_bailout) {
7135 Label dispatch(this, {&var_string_, &var_offset_, &var_instance_type_});
7136 Label if_iscons(this);
7137 Label if_isexternal(this);
7138 Label if_issliced(this);
7139 Label if_isthin(this);
7140 Label out(this);
7141
7142 Branch(IsSequentialStringInstanceType(var_instance_type_.value()), &out,
7143 &dispatch);
7144
7145 // Dispatch based on string representation.
7146 BIND(&dispatch)Bind(&dispatch);
7147 {
7148 int32_t values[] = {
7149 kSeqStringTag, kConsStringTag, kExternalStringTag,
7150 kSlicedStringTag, kThinStringTag,
7151 };
7152 Label* labels[] = {
7153 &out, &if_iscons, &if_isexternal, &if_issliced, &if_isthin,
7154 };
7155 STATIC_ASSERT(arraysize(values) == arraysize(labels))static_assert((sizeof(ArraySizeHelper(values))) == (sizeof(ArraySizeHelper
(labels))), "arraysize(values) == arraysize(labels)")
;
7156
7157 const TNode<Int32T> representation = Word32And(
7158 var_instance_type_.value(), Int32Constant(kStringRepresentationMask));
7159 Switch(representation, if_bailout, values, labels, arraysize(values)(sizeof(ArraySizeHelper(values))));
7160 }
7161
7162 // Cons string. Check whether it is flat, then fetch first part.
7163 // Flat cons strings have an empty second part.
7164 BIND(&if_iscons)Bind(&if_iscons);
7165 {
7166 const TNode<String> string = var_string_.value();
7167 GotoIfNot(IsEmptyString(
7168 LoadObjectField<String>(string, ConsString::kSecondOffset)),
7169 if_bailout);
7170
7171 const TNode<String> lhs =
7172 LoadObjectField<String>(string, ConsString::kFirstOffset);
7173 var_string_ = lhs;
7174 var_instance_type_ = LoadInstanceType(lhs);
7175
7176 Goto(&dispatch);
7177 }
7178
7179 // Sliced string. Fetch parent and correct start index by offset.
7180 BIND(&if_issliced)Bind(&if_issliced);
7181 {
7182 if (!FLAG_string_slices || (flags_ & kDontUnpackSlicedStrings)) {
7183 Goto(if_bailout);
7184 } else {
7185 const TNode<String> string = var_string_.value();
7186 const TNode<IntPtrT> sliced_offset =
7187 LoadAndUntagObjectField(string, SlicedString::kOffsetOffset);
7188 var_offset_ = IntPtrAdd(var_offset_.value(), sliced_offset);
7189
7190 const TNode<String> parent =
7191 LoadObjectField<String>(string, SlicedString::kParentOffset);
7192 var_string_ = parent;
7193 var_instance_type_ = LoadInstanceType(parent);
7194
7195 Goto(&dispatch);
7196 }
7197 }
7198
7199 // Thin string. Fetch the actual string.
7200 BIND(&if_isthin)Bind(&if_isthin);
7201 {
7202 const TNode<String> string = var_string_.value();
7203 const TNode<String> actual_string =
7204 LoadObjectField<String>(string, ThinString::kActualOffset);
7205 const TNode<Uint16T> actual_instance_type = LoadInstanceType(actual_string);
7206
7207 var_string_ = actual_string;
7208 var_instance_type_ = actual_instance_type;
7209
7210 Goto(&dispatch);
7211 }
7212
7213 // External string.
7214 BIND(&if_isexternal)Bind(&if_isexternal);
7215 var_is_external_ = Int32Constant(1);
7216 Goto(&out);
7217
7218 BIND(&out)Bind(&out);
7219 return var_string_.value();
7220}
7221
7222TNode<RawPtrT> ToDirectStringAssembler::TryToSequential(
7223 StringPointerKind ptr_kind, Label* if_bailout) {
7224 CHECK(ptr_kind == PTR_TO_DATA || ptr_kind == PTR_TO_STRING)do { if ((__builtin_expect(!!(!(ptr_kind == PTR_TO_DATA || ptr_kind
== PTR_TO_STRING)), 0))) { V8_Fatal("Check failed: %s.", "ptr_kind == PTR_TO_DATA || ptr_kind == PTR_TO_STRING"
); } } while (false)
;
7225
7226 TVARIABLE(RawPtrT, var_result)TVariable<RawPtrT> var_result(this);
7227 Label out(this), if_issequential(this), if_isexternal(this, Label::kDeferred);
7228 Branch(is_external(), &if_isexternal, &if_issequential);
7229
7230 BIND(&if_issequential)Bind(&if_issequential);
7231 {
7232 STATIC_ASSERT(SeqOneByteString::kHeaderSize ==static_assert(SeqOneByteString::kHeaderSize == SeqTwoByteString
::kHeaderSize, "SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize"
)
7233 SeqTwoByteString::kHeaderSize)static_assert(SeqOneByteString::kHeaderSize == SeqTwoByteString
::kHeaderSize, "SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize"
)
;
7234 TNode<RawPtrT> result =
7235 ReinterpretCast<RawPtrT>(BitcastTaggedToWord(var_string_.value()));
7236 if (ptr_kind == PTR_TO_DATA) {
7237 result = RawPtrAdd(result, IntPtrConstant(SeqOneByteString::kHeaderSize -
7238 kHeapObjectTag));
7239 }
7240 var_result = result;
7241 Goto(&out);
7242 }
7243
7244 BIND(&if_isexternal)Bind(&if_isexternal);
7245 {
7246 GotoIf(IsUncachedExternalStringInstanceType(var_instance_type_.value()),
7247 if_bailout);
7248
7249 TNode<String> string = var_string_.value();
7250 TNode<RawPtrT> result = LoadExternalStringResourceDataPtr(CAST(string)Cast(string));
7251 if (ptr_kind == PTR_TO_STRING) {
7252 result = RawPtrSub(result, IntPtrConstant(SeqOneByteString::kHeaderSize -
7253 kHeapObjectTag));
7254 }
7255 var_result = result;
7256 Goto(&out);
7257 }
7258
7259 BIND(&out)Bind(&out);
7260 return var_result.value();
7261}
7262
7263TNode<Number> CodeStubAssembler::StringToNumber(TNode<String> input) {
7264 Label runtime(this, Label::kDeferred);
7265 Label end(this);
7266
7267 TVARIABLE(Number, var_result)TVariable<Number> var_result(this);
7268
7269 // Check if string has a cached array index.
7270 TNode<Uint32T> raw_hash_field = LoadNameRawHashField(input);
7271 GotoIf(IsSetWord32(raw_hash_field, Name::kDoesNotContainCachedArrayIndexMask),
7272 &runtime);
7273
7274 var_result = SmiTag(Signed(
7275 DecodeWordFromWord32<String::ArrayIndexValueBits>(raw_hash_field)));
7276 Goto(&end);
7277
7278 BIND(&runtime)Bind(&runtime);
7279 {
7280 var_result =
7281 CAST(CallRuntime(Runtime::kStringToNumber, NoContextConstant(), input))Cast(CallRuntime(Runtime::kStringToNumber, NoContextConstant(
), input))
;
7282 Goto(&end);
7283 }
7284
7285 BIND(&end)Bind(&end);
7286 return var_result.value();
7287}
7288
7289TNode<String> CodeStubAssembler::NumberToString(TNode<Number> input,
7290 Label* bailout) {
7291 TVARIABLE(String, result)TVariable<String> result(this);
7292 TVARIABLE(Smi, smi_input)TVariable<Smi> smi_input(this);
7293 Label if_smi(this), if_heap_number(this), done(this, &result);
7294
7295 // Load the number string cache.
7296 TNode<FixedArray> number_string_cache = NumberStringCacheConstant();
7297
7298 // Make the hash mask from the length of the number string cache. It
7299 // contains two elements (number and string) for each cache entry.
7300 TNode<IntPtrT> number_string_cache_length =
7301 LoadAndUntagFixedArrayBaseLength(number_string_cache);
7302 TNode<Int32T> one = Int32Constant(1);
7303 TNode<Word32T> mask = Int32Sub(
7304 Word32Shr(TruncateWordToInt32(number_string_cache_length), one), one);
7305
7306 GotoIfNot(TaggedIsSmi(input), &if_heap_number);
7307 smi_input = CAST(input)Cast(input);
7308 Goto(&if_smi);
7309
7310 BIND(&if_heap_number)Bind(&if_heap_number);
7311 {
7312 Comment("NumberToString - HeapNumber");
7313 TNode<HeapNumber> heap_number_input = CAST(input)Cast(input);
7314 // Try normalizing the HeapNumber.
7315 TryHeapNumberToSmi(heap_number_input, &smi_input, &if_smi);
7316
7317 // Make a hash from the two 32-bit values of the double.
7318 TNode<Int32T> low =
7319 LoadObjectField<Int32T>(heap_number_input, HeapNumber::kValueOffset);
7320 TNode<Int32T> high = LoadObjectField<Int32T>(
7321 heap_number_input, HeapNumber::kValueOffset + kIntSize);
7322 TNode<Word32T> hash = Word32And(Word32Xor(low, high), mask);
7323 TNode<IntPtrT> entry_index =
7324 Signed(ChangeUint32ToWord(Int32Add(hash, hash)));
7325
7326 // Cache entry's key must be a heap number
7327 TNode<Object> number_key =
7328 UnsafeLoadFixedArrayElement(number_string_cache, entry_index);
7329 GotoIf(TaggedIsSmi(number_key), bailout);
7330 TNode<HeapObject> number_key_heap_object = CAST(number_key)Cast(number_key);
7331 GotoIfNot(IsHeapNumber(number_key_heap_object), bailout);
7332
7333 // Cache entry's key must match the heap number value we're looking for.
7334 TNode<Int32T> low_compare = LoadObjectField<Int32T>(
7335 number_key_heap_object, HeapNumber::kValueOffset);
7336 TNode<Int32T> high_compare = LoadObjectField<Int32T>(
7337 number_key_heap_object, HeapNumber::kValueOffset + kIntSize);
7338 GotoIfNot(Word32Equal(low, low_compare), bailout);
7339 GotoIfNot(Word32Equal(high, high_compare), bailout);
7340
7341 // Heap number match, return value from cache entry.
7342 result = CAST(UnsafeLoadFixedArrayElement(number_string_cache, entry_index,Cast(UnsafeLoadFixedArrayElement(number_string_cache, entry_index
, kTaggedSize))
7343 kTaggedSize))Cast(UnsafeLoadFixedArrayElement(number_string_cache, entry_index
, kTaggedSize))
;
7344 Goto(&done);
7345 }
7346
7347 BIND(&if_smi)Bind(&if_smi);
7348 {
7349 Comment("NumberToString - Smi");
7350 // Load the smi key, make sure it matches the smi we're looking for.
7351 TNode<Word32T> hash = Word32And(SmiToInt32(smi_input.value()), mask);
7352 TNode<IntPtrT> entry_index =
7353 Signed(ChangeUint32ToWord(Int32Add(hash, hash)));
7354 TNode<Object> smi_key =
7355 UnsafeLoadFixedArrayElement(number_string_cache, entry_index);
7356 Label if_smi_cache_missed(this);
7357 GotoIf(TaggedNotEqual(smi_key, smi_input.value()), &if_smi_cache_missed);
7358
7359 // Smi match, return value from cache entry.
7360 result = CAST(UnsafeLoadFixedArrayElement(number_string_cache, entry_index,Cast(UnsafeLoadFixedArrayElement(number_string_cache, entry_index
, kTaggedSize))
7361 kTaggedSize))Cast(UnsafeLoadFixedArrayElement(number_string_cache, entry_index
, kTaggedSize))
;
7362 Goto(&done);
7363
7364 BIND(&if_smi_cache_missed)Bind(&if_smi_cache_missed);
7365 {
7366 Label store_to_cache(this);
7367
7368 // Bailout when the cache is not full-size.
7369 const int kFullCacheSize =
7370 isolate()->heap()->MaxNumberToStringCacheSize();
7371 Branch(IntPtrLessThan(number_string_cache_length,
7372 IntPtrConstant(kFullCacheSize)),
7373 bailout, &store_to_cache);
7374
7375 BIND(&store_to_cache)Bind(&store_to_cache);
7376 {
7377 // Generate string and update string hash field.
7378 result = NumberToStringSmi(SmiToInt32(smi_input.value()),
7379 Int32Constant(10), bailout);
7380
7381 // Store string into cache.
7382 StoreFixedArrayElement(number_string_cache, entry_index,
7383 smi_input.value());
7384 StoreFixedArrayElement(number_string_cache,
7385 IntPtrAdd(entry_index, IntPtrConstant(1)),
7386 result.value());
7387 Goto(&done);
7388 }
7389 }
7390 }
7391 BIND(&done)Bind(&done);
7392 return result.value();
7393}
7394
7395TNode<String> CodeStubAssembler::NumberToString(TNode<Number> input) {
7396 TVARIABLE(String, result)TVariable<String> result(this);
7397 Label runtime(this, Label::kDeferred), done(this, &result);
7398
7399 GotoIfForceSlowPath(&runtime);
7400
7401 result = NumberToString(input, &runtime);
7402 Goto(&done);
7403
7404 BIND(&runtime)Bind(&runtime);
7405 {
7406 // No cache entry, go to the runtime.
7407 result = CAST(Cast(CallRuntime(Runtime::kNumberToStringSlow, NoContextConstant
(), input))
7408 CallRuntime(Runtime::kNumberToStringSlow, NoContextConstant(), input))Cast(CallRuntime(Runtime::kNumberToStringSlow, NoContextConstant
(), input))
;
7409 Goto(&done);
7410 }
7411 BIND(&done)Bind(&done);
7412 return result.value();
7413}
7414
7415TNode<Numeric> CodeStubAssembler::NonNumberToNumberOrNumeric(
7416 TNode<Context> context, TNode<HeapObject> input, Object::Conversion mode,
7417 BigIntHandling bigint_handling) {
7418 CSA_DCHECK(this, Word32BinaryNot(IsHeapNumber(input)))((void)0);
7419
7420 TVARIABLE(HeapObject, var_input, input)TVariable<HeapObject> var_input(input, this);
7421 TVARIABLE(Numeric, var_result)TVariable<Numeric> var_result(this);
7422 TVARIABLE(Uint16T, instance_type, LoadInstanceType(var_input.value()))TVariable<Uint16T> instance_type(LoadInstanceType(var_input
.value()), this)
;
7423 Label end(this), if_inputisreceiver(this, Label::kDeferred),
7424 if_inputisnotreceiver(this);
7425
7426 // We need to handle JSReceiver first since we might need to do two
7427 // conversions due to ToPritmive.
7428 Branch(IsJSReceiverInstanceType(instance_type.value()), &if_inputisreceiver,
7429 &if_inputisnotreceiver);
7430
7431 BIND(&if_inputisreceiver)Bind(&if_inputisreceiver);
7432 {
7433 // The {var_input.value()} is a JSReceiver, we need to convert it to a
7434 // Primitive first using the ToPrimitive type conversion, preferably
7435 // yielding a Number.
7436 Callable callable = CodeFactory::NonPrimitiveToPrimitive(
7437 isolate(), ToPrimitiveHint::kNumber);
7438 TNode<Object> result = CallStub(callable, context, var_input.value());
7439
7440 // Check if the {result} is already a Number/Numeric.
7441 Label if_done(this), if_notdone(this);
7442 Branch(mode == Object::Conversion::kToNumber ? IsNumber(result)
7443 : IsNumeric(result),
7444 &if_done, &if_notdone);
7445
7446 BIND(&if_done)Bind(&if_done);
7447 {
7448 // The ToPrimitive conversion already gave us a Number/Numeric, so
7449 // we're done.
7450 var_result = CAST(result)Cast(result);
7451 Goto(&end);
7452 }
7453
7454 BIND(&if_notdone)Bind(&if_notdone);
7455 {
7456 // We now have a Primitive {result}, but it's not yet a
7457 // Number/Numeric.
7458 var_input = CAST(result)Cast(result);
7459 // We have a new input. Redo the check and reload instance_type.
7460 CSA_DCHECK(this, Word32BinaryNot(IsHeapNumber(var_input.value())))((void)0);
7461 instance_type = LoadInstanceType(var_input.value());
7462 Goto(&if_inputisnotreceiver);
7463 }
7464 }
7465
7466 BIND(&if_inputisnotreceiver)Bind(&if_inputisnotreceiver);
7467 {
7468 Label not_plain_primitive(this), if_inputisbigint(this),
7469 if_inputisother(this, Label::kDeferred);
7470
7471 // String and Oddball cases.
7472 TVARIABLE(Number, var_result_number)TVariable<Number> var_result_number(this);
7473 TryPlainPrimitiveNonNumberToNumber(var_input.value(), &var_result_number,
7474 &not_plain_primitive);
7475 var_result = var_result_number.value();
7476 Goto(&end);
7477
7478 BIND(&not_plain_primitive)Bind(&not_plain_primitive);
7479 {
7480 Branch(IsBigIntInstanceType(instance_type.value()), &if_inputisbigint,
7481 &if_inputisother);
7482
7483 BIND(&if_inputisbigint)Bind(&if_inputisbigint);
7484 {
7485 if (mode == Object::Conversion::kToNumeric) {
7486 var_result = CAST(var_input.value())Cast(var_input.value());
7487 Goto(&end);
7488 } else {
7489 DCHECK_EQ(mode, Object::Conversion::kToNumber)((void) 0);
7490 if (bigint_handling == BigIntHandling::kThrow) {
7491 Goto(&if_inputisother);
7492 } else {
7493 DCHECK_EQ(bigint_handling, BigIntHandling::kConvertToNumber)((void) 0);
7494 var_result = CAST(CallRuntime(Runtime::kBigIntToNumber, context,Cast(CallRuntime(Runtime::kBigIntToNumber, context, var_input
.value()))
7495 var_input.value()))Cast(CallRuntime(Runtime::kBigIntToNumber, context, var_input
.value()))
;
7496 Goto(&end);
7497 }
7498 }
7499 }
7500
7501 BIND(&if_inputisother)Bind(&if_inputisother);
7502 {
7503 // The {var_input.value()} is something else (e.g. Symbol), let the
7504 // runtime figure out the correct exception. Note: We cannot tail call
7505 // to the runtime here, as js-to-wasm trampolines also use this code
7506 // currently, and they declare all outgoing parameters as untagged,
7507 // while we would push a tagged object here.
7508 auto function_id = mode == Object::Conversion::kToNumber
7509 ? Runtime::kToNumber
7510 : Runtime::kToNumeric;
7511 var_result = CAST(CallRuntime(function_id, context, var_input.value()))Cast(CallRuntime(function_id, context, var_input.value()));
7512 Goto(&end);
7513 }
7514 }
7515 }
7516
7517 BIND(&end)Bind(&end);
7518 if (mode == Object::Conversion::kToNumber) {
7519 CSA_DCHECK(this, IsNumber(var_result.value()))((void)0);
7520 }
7521 return var_result.value();
7522}
7523
7524TNode<Number> CodeStubAssembler::NonNumberToNumber(
7525 TNode<Context> context, TNode<HeapObject> input,
7526 BigIntHandling bigint_handling) {
7527 return CAST(NonNumberToNumberOrNumeric(Cast(NonNumberToNumberOrNumeric( context, input, Object::Conversion
::kToNumber, bigint_handling))
7528 context, input, Object::Conversion::kToNumber, bigint_handling))Cast(NonNumberToNumberOrNumeric( context, input, Object::Conversion
::kToNumber, bigint_handling))
;
7529}
7530
7531void CodeStubAssembler::TryPlainPrimitiveNonNumberToNumber(
7532 TNode<HeapObject> input, TVariable<Number>* var_result, Label* if_bailout) {
7533 CSA_DCHECK(this, Word32BinaryNot(IsHeapNumber(input)))((void)0);
7534 Label done(this);
7535
7536 // Dispatch on the {input} instance type.
7537 TNode<Uint16T> input_instance_type = LoadInstanceType(input);
7538 Label if_inputisstring(this);
7539 GotoIf(IsStringInstanceType(input_instance_type), &if_inputisstring);
7540 GotoIfNot(InstanceTypeEqual(input_instance_type, ODDBALL_TYPE), if_bailout);
7541
7542 // The {input} is an Oddball, we just need to load the Number value of it.
7543 *var_result = LoadObjectField<Number>(input, Oddball::kToNumberOffset);
7544 Goto(&done);
7545
7546 BIND(&if_inputisstring)Bind(&if_inputisstring);
7547 {
7548 // The {input} is a String, use the fast stub to convert it to a Number.
7549 *var_result = StringToNumber(CAST(input)Cast(input));
7550 Goto(&done);
7551 }
7552
7553 BIND(&done)Bind(&done);
7554}
7555
7556TNode<Numeric> CodeStubAssembler::NonNumberToNumeric(TNode<Context> context,
7557 TNode<HeapObject> input) {
7558 return NonNumberToNumberOrNumeric(context, input,
7559 Object::Conversion::kToNumeric);
7560}
7561
7562TNode<Number> CodeStubAssembler::ToNumber(TNode<Context> context,
7563 TNode<Object> input,
7564 BigIntHandling bigint_handling) {
7565 return CAST(ToNumberOrNumeric([context] { return context; }, input, nullptr,Cast(ToNumberOrNumeric([context] { return context; }, input, nullptr
, Object::Conversion::kToNumber, bigint_handling))
7566 Object::Conversion::kToNumber,Cast(ToNumberOrNumeric([context] { return context; }, input, nullptr
, Object::Conversion::kToNumber, bigint_handling))
7567 bigint_handling))Cast(ToNumberOrNumeric([context] { return context; }, input, nullptr
, Object::Conversion::kToNumber, bigint_handling))
;
7568}
7569
7570TNode<Number> CodeStubAssembler::ToNumber_Inline(TNode<Context> context,
7571 TNode<Object> input) {
7572 TVARIABLE(Number, var_result)TVariable<Number> var_result(this);
7573 Label end(this), not_smi(this, Label::kDeferred);
7574
7575 GotoIfNot(TaggedIsSmi(input), &not_smi);
7576 var_result = CAST(input)Cast(input);
7577 Goto(&end);
7578
7579 BIND(&not_smi)Bind(&not_smi);
7580 {
7581 var_result = Select<Number>(
7582 IsHeapNumber(CAST(input)Cast(input)), [=] { return CAST(input)Cast(input); },
7583 [=] {
7584 return CAST(CallBuiltin(Builtin::kNonNumberToNumber, context, input))Cast(CallBuiltin(Builtin::kNonNumberToNumber, context, input)
)
;
7585 });
7586 Goto(&end);
7587 }
7588
7589 BIND(&end)Bind(&end);
7590 return var_result.value();
7591}
7592
7593TNode<Numeric> CodeStubAssembler::ToNumberOrNumeric(
7594 LazyNode<Context> context, TNode<Object> input,
7595 TVariable<Smi>* var_type_feedback, Object::Conversion mode,
7596 BigIntHandling bigint_handling) {
7597 TVARIABLE(Numeric, var_result)TVariable<Numeric> var_result(this);
7598 Label end(this);
7599
7600 Label not_smi(this, Label::kDeferred);
7601 GotoIfNot(TaggedIsSmi(input), &not_smi);
7602 TNode<Smi> input_smi = CAST(input)Cast(input);
7603 var_result = input_smi;
7604 if (var_type_feedback) {
7605 *var_type_feedback = SmiConstant(BinaryOperationFeedback::kSignedSmall);
7606 }
7607 Goto(&end);
7608
7609 BIND(&not_smi)Bind(&not_smi);
7610 {
7611 Label not_heap_number(this, Label::kDeferred);
7612 TNode<HeapObject> input_ho = CAST(input)Cast(input);
7613 GotoIfNot(IsHeapNumber(input_ho), &not_heap_number);
7614
7615 TNode<HeapNumber> input_hn = CAST(input_ho)Cast(input_ho);
7616 var_result = input_hn;
7617 if (var_type_feedback) {
7618 *var_type_feedback = SmiConstant(BinaryOperationFeedback::kNumber);
7619 }
7620 Goto(&end);
7621
7622 BIND(&not_heap_number)Bind(&not_heap_number);
7623 {
7624 if (mode == Object::Conversion::kToNumeric) {
7625 // Special case for collecting BigInt feedback.
7626 Label not_bigint(this);
7627 GotoIfNot(IsBigInt(input_ho), &not_bigint);
7628 {
7629 var_result = CAST(input_ho)Cast(input_ho);
7630 *var_type_feedback = SmiConstant(BinaryOperationFeedback::kBigInt);
7631 Goto(&end);
7632 }
7633 BIND(&not_bigint)Bind(&not_bigint);
7634 }
7635 var_result = NonNumberToNumberOrNumeric(context(), input_ho, mode,
7636 bigint_handling);
7637 if (var_type_feedback) {
7638 *var_type_feedback = SmiConstant(BinaryOperationFeedback::kAny);
7639 }
7640 Goto(&end);
7641 }
7642 }
7643
7644 BIND(&end)Bind(&end);
7645 return var_result.value();
7646}
7647
7648TNode<Number> CodeStubAssembler::PlainPrimitiveToNumber(TNode<Object> input) {
7649 TVARIABLE(Number, var_result)TVariable<Number> var_result(this);
7650 Label end(this), fallback(this);
7651
7652 Label not_smi(this, Label::kDeferred);
7653 GotoIfNot(TaggedIsSmi(input), &not_smi);
7654 TNode<Smi> input_smi = CAST(input)Cast(input);
7655 var_result = input_smi;
7656 Goto(&end);
7657
7658 BIND(&not_smi)Bind(&not_smi);
7659 {
7660 Label not_heap_number(this, Label::kDeferred);
7661 TNode<HeapObject> input_ho = CAST(input)Cast(input);
7662 GotoIfNot(IsHeapNumber(input_ho), &not_heap_number);
7663
7664 TNode<HeapNumber> input_hn = CAST(input_ho)Cast(input_ho);
7665 var_result = input_hn;
7666 Goto(&end);
7667
7668 BIND(&not_heap_number)Bind(&not_heap_number);
7669 {
7670 TryPlainPrimitiveNonNumberToNumber(input_ho, &var_result, &fallback);
7671 Goto(&end);
7672 BIND(&fallback)Bind(&fallback);
7673 Unreachable();
7674 }
7675 }
7676
7677 BIND(&end)Bind(&end);
7678 return var_result.value();
7679}
7680
7681TNode<BigInt> CodeStubAssembler::ToBigInt(TNode<Context> context,
7682 TNode<Object> input) {
7683 TVARIABLE(BigInt, var_result)TVariable<BigInt> var_result(this);
7684 Label if_bigint(this), done(this), if_throw(this);
7685
7686 GotoIf(TaggedIsSmi(input), &if_throw);
7687 GotoIf(IsBigInt(CAST(input)Cast(input)), &if_bigint);
7688 var_result = CAST(CallRuntime(Runtime::kToBigInt, context, input))Cast(CallRuntime(Runtime::kToBigInt, context, input));
7689 Goto(&done);
7690
7691 BIND(&if_bigint)Bind(&if_bigint);
7692 var_result = CAST(input)Cast(input);
7693 Goto(&done);
7694
7695 BIND(&if_throw)Bind(&if_throw);
7696 ThrowTypeError(context, MessageTemplate::kBigIntFromObject, input);
7697
7698 BIND(&done)Bind(&done);
7699 return var_result.value();
7700}
7701
7702void CodeStubAssembler::TaggedToNumeric(TNode<Context> context,
7703 TNode<Object> value,
7704 TVariable<Numeric>* var_numeric) {
7705 TaggedToNumeric(context, value, var_numeric, nullptr);
7706}
7707
7708void CodeStubAssembler::TaggedToNumericWithFeedback(
7709 TNode<Context> context, TNode<Object> value,
7710 TVariable<Numeric>* var_numeric, TVariable<Smi>* var_feedback) {
7711 DCHECK_NOT_NULL(var_feedback)((void) 0);
7712 TaggedToNumeric(context, value, var_numeric, var_feedback);
7713}
7714
7715void CodeStubAssembler::TaggedToNumeric(TNode<Context> context,
7716 TNode<Object> value,
7717 TVariable<Numeric>* var_numeric,
7718 TVariable<Smi>* var_feedback) {
7719 Label done(this), if_smi(this), if_heapnumber(this), if_bigint(this),
7720 if_oddball(this);
7721 GotoIf(TaggedIsSmi(value), &if_smi);
7722 TNode<HeapObject> heap_object_value = CAST(value)Cast(value);
7723 TNode<Map> map = LoadMap(heap_object_value);
7724 GotoIf(IsHeapNumberMap(map), &if_heapnumber);
7725 TNode<Uint16T> instance_type = LoadMapInstanceType(map);
7726 GotoIf(IsBigIntInstanceType(instance_type), &if_bigint);
7727
7728 // {heap_object_value} is not a Numeric yet.
7729 GotoIf(Word32Equal(instance_type, Int32Constant(ODDBALL_TYPE)), &if_oddball);
7730 *var_numeric = CAST(Cast(CallBuiltin(Builtin::kNonNumberToNumeric, context, heap_object_value
))
7731 CallBuiltin(Builtin::kNonNumberToNumeric, context, heap_object_value))Cast(CallBuiltin(Builtin::kNonNumberToNumeric, context, heap_object_value
))
;
7732 OverwriteFeedback(var_feedback, BinaryOperationFeedback::kAny);
7733 Goto(&done);
7734
7735 BIND(&if_smi)Bind(&if_smi);
7736 *var_numeric = CAST(value)Cast(value);
7737 OverwriteFeedback(var_feedback, BinaryOperationFeedback::kSignedSmall);
7738 Goto(&done);
7739
7740 BIND(&if_heapnumber)Bind(&if_heapnumber);
7741 *var_numeric = CAST(value)Cast(value);
7742 OverwriteFeedback(var_feedback, BinaryOperationFeedback::kNumber);
7743 Goto(&done);
7744
7745 BIND(&if_bigint)Bind(&if_bigint);
7746 *var_numeric = CAST(value)Cast(value);
7747 OverwriteFeedback(var_feedback, BinaryOperationFeedback::kBigInt);
7748 Goto(&done);
7749
7750 BIND(&if_oddball)Bind(&if_oddball);
7751 OverwriteFeedback(var_feedback, BinaryOperationFeedback::kNumberOrOddball);
7752 *var_numeric =
7753 CAST(LoadObjectField(heap_object_value, Oddball::kToNumberOffset))Cast(LoadObjectField(heap_object_value, Oddball::kToNumberOffset
))
;
7754 Goto(&done);
7755
7756 Bind(&done);
7757}
7758
7759// ES#sec-touint32
7760TNode<Number> CodeStubAssembler::ToUint32(TNode<Context> context,
7761 TNode<Object> input) {
7762 const TNode<Float64T> float_zero = Float64Constant(0.0);
7763 const TNode<Float64T> float_two_32 =
7764 Float64Constant(static_cast<double>(1ULL << 32));
7765
7766 Label out(this);
7767
7768 TVARIABLE(Object, var_result, input)TVariable<Object> var_result(input, this);
7769
7770 // Early exit for positive smis.
7771 {
7772 // TODO(jgruber): This branch and the recheck below can be removed once we
7773 // have a ToNumber with multiple exits.
7774 Label next(this, Label::kDeferred);
7775 Branch(TaggedIsPositiveSmi(input), &out, &next);
7776 BIND(&next)Bind(&next);
7777 }
7778
7779 const TNode<Number> number = ToNumber(context, input);
7780 var_result = number;
7781
7782 // Perhaps we have a positive smi now.
7783 {
7784 Label next(this, Label::kDeferred);
7785 Branch(TaggedIsPositiveSmi(number), &out, &next);
7786 BIND(&next)Bind(&next);
7787 }
7788
7789 Label if_isnegativesmi(this), if_isheapnumber(this);
7790 Branch(TaggedIsSmi(number), &if_isnegativesmi, &if_isheapnumber);
7791
7792 BIND(&if_isnegativesmi)Bind(&if_isnegativesmi);
7793 {
7794 const TNode<Int32T> uint32_value = SmiToInt32(CAST(number)Cast(number));
7795 TNode<Float64T> float64_value = ChangeUint32ToFloat64(uint32_value);
7796 var_result = AllocateHeapNumberWithValue(float64_value);
7797 Goto(&out);
7798 }
7799
7800 BIND(&if_isheapnumber)Bind(&if_isheapnumber);
7801 {
7802 Label return_zero(this);
7803 const TNode<Float64T> value = LoadHeapNumberValue(CAST(number)Cast(number));
7804
7805 {
7806 // +-0.
7807 Label next(this);
7808 Branch(Float64Equal(value, float_zero), &return_zero, &next);
7809 BIND(&next)Bind(&next);
7810 }
7811
7812 {
7813 // NaN.
7814 Label next(this);
7815 Branch(Float64Equal(value, value), &next, &return_zero);
7816 BIND(&next)Bind(&next);
7817 }
7818
7819 {
7820 // +Infinity.
7821 Label next(this);
7822 const TNode<Float64T> positive_infinity =
7823 Float64Constant(std::numeric_limits<double>::infinity());
7824 Branch(Float64Equal(value, positive_infinity), &return_zero, &next);
7825 BIND(&next)Bind(&next);
7826 }
7827
7828 {
7829 // -Infinity.
7830 Label next(this);
7831 const TNode<Float64T> negative_infinity =
7832 Float64Constant(-1.0 * std::numeric_limits<double>::infinity());
7833 Branch(Float64Equal(value, negative_infinity), &return_zero, &next);
7834 BIND(&next)Bind(&next);
7835 }
7836
7837 // * Let int be the mathematical value that is the same sign as number and
7838 // whose magnitude is floor(abs(number)).
7839 // * Let int32bit be int modulo 2^32.
7840 // * Return int32bit.
7841 {
7842 TNode<Float64T> x = Float64Trunc(value);
7843 x = Float64Mod(x, float_two_32);
7844 x = Float64Add(x, float_two_32);
7845 x = Float64Mod(x, float_two_32);
7846
7847 const TNode<Number> result = ChangeFloat64ToTagged(x);
7848 var_result = result;
7849 Goto(&out);
7850 }
7851
7852 BIND(&return_zero)Bind(&return_zero);
7853 {
7854 var_result = SmiConstant(0);
7855 Goto(&out);
7856 }
7857 }
7858
7859 BIND(&out)Bind(&out);
7860 return CAST(var_result.value())Cast(var_result.value());
7861}
7862
7863TNode<String> CodeStubAssembler::ToString_Inline(TNode<Context> context,
7864 TNode<Object> input) {
7865 TVARIABLE(Object, var_result, input)TVariable<Object> var_result(input, this);
7866 Label stub_call(this, Label::kDeferred), out(this);
7867
7868 GotoIf(TaggedIsSmi(input), &stub_call);
7869 Branch(IsString(CAST(input)Cast(input)), &out, &stub_call);
7870
7871 BIND(&stub_call)Bind(&stub_call);
7872 var_result = CallBuiltin(Builtin::kToString, context, input);
7873 Goto(&out);
7874
7875 BIND(&out)Bind(&out);
7876 return CAST(var_result.value())Cast(var_result.value());
7877}
7878
7879TNode<JSReceiver> CodeStubAssembler::ToObject(TNode<Context> context,
7880 TNode<Object> input) {
7881 return CAST(CallBuiltin(Builtin::kToObject, context, input))Cast(CallBuiltin(Builtin::kToObject, context, input));
7882}
7883
7884TNode<JSReceiver> CodeStubAssembler::ToObject_Inline(TNode<Context> context,
7885 TNode<Object> input) {
7886 TVARIABLE(JSReceiver, result)TVariable<JSReceiver> result(this);
7887 Label if_isreceiver(this), if_isnotreceiver(this, Label::kDeferred);
7888 Label done(this);
7889
7890 BranchIfJSReceiver(input, &if_isreceiver, &if_isnotreceiver);
7891
7892 BIND(&if_isreceiver)Bind(&if_isreceiver);
7893 {
7894 result = CAST(input)Cast(input);
7895 Goto(&done);
7896 }
7897
7898 BIND(&if_isnotreceiver)Bind(&if_isnotreceiver);
7899 {
7900 result = ToObject(context, input);
7901 Goto(&done);
7902 }
7903
7904 BIND(&done)Bind(&done);
7905 return result.value();
7906}
7907
7908TNode<Number> CodeStubAssembler::ToLength_Inline(TNode<Context> context,
7909 TNode<Object> input) {
7910 TNode<Smi> smi_zero = SmiConstant(0);
7911 return Select<Number>(
7912 TaggedIsSmi(input), [=] { return SmiMax(CAST(input)Cast(input), smi_zero); },
7913 [=] { return CAST(CallBuiltin(Builtin::kToLength, context, input))Cast(CallBuiltin(Builtin::kToLength, context, input)); });
7914}
7915
7916TNode<Object> CodeStubAssembler::OrdinaryToPrimitive(
7917 TNode<Context> context, TNode<Object> input, OrdinaryToPrimitiveHint hint) {
7918 Callable callable = CodeFactory::OrdinaryToPrimitive(isolate(), hint);
7919 return CallStub(callable, context, input);
7920}
7921
7922TNode<Uint32T> CodeStubAssembler::DecodeWord32(TNode<Word32T> word32,
7923 uint32_t shift, uint32_t mask) {
7924 DCHECK_EQ((mask >> shift) << shift, mask)((void) 0);
7925 if ((std::numeric_limits<uint32_t>::max() >> shift) ==
7926 ((std::numeric_limits<uint32_t>::max() & mask) >> shift)) {
7927 return Unsigned(Word32Shr(word32, static_cast<int>(shift)));
7928 } else {
7929 return Unsigned(Word32And(Word32Shr(word32, static_cast<int>(shift)),
7930 Int32Constant(mask >> shift)));
7931 }
7932}
7933
7934TNode<UintPtrT> CodeStubAssembler::DecodeWord(TNode<WordT> word, uint32_t shift,
7935 uintptr_t mask) {
7936 DCHECK_EQ((mask >> shift) << shift, mask)((void) 0);
7937 if ((std::numeric_limits<uintptr_t>::max() >> shift) ==
7938 ((std::numeric_limits<uintptr_t>::max() & mask) >> shift)) {
7939 return Unsigned(WordShr(word, static_cast<int>(shift)));
7940 } else {
7941 return Unsigned(WordAnd(WordShr(word, static_cast<int>(shift)),
7942 IntPtrConstant(mask >> shift)));
7943 }
7944}
7945
7946TNode<Word32T> CodeStubAssembler::UpdateWord32(TNode<Word32T> word,
7947 TNode<Uint32T> value,
7948 uint32_t shift, uint32_t mask,
7949 bool starts_as_zero) {
7950 DCHECK_EQ((mask >> shift) << shift, mask)((void) 0);
7951 // Ensure the {value} fits fully in the mask.
7952 CSA_DCHECK(this, Uint32LessThanOrEqual(value, Uint32Constant(mask >> shift)))((void)0);
7953 TNode<Word32T> encoded_value = Word32Shl(value, Int32Constant(shift));
7954 TNode<Word32T> masked_word;
7955 if (starts_as_zero) {
7956 CSA_DCHECK(this, Word32Equal(Word32And(word, Int32Constant(~mask)), word))((void)0);
7957 masked_word = word;
7958 } else {
7959 masked_word = Word32And(word, Int32Constant(~mask));
7960 }
7961 return Word32Or(masked_word, encoded_value);
7962}
7963
7964TNode<WordT> CodeStubAssembler::UpdateWord(TNode<WordT> word,
7965 TNode<UintPtrT> value,
7966 uint32_t shift, uintptr_t mask,
7967 bool starts_as_zero) {
7968 DCHECK_EQ((mask >> shift) << shift, mask)((void) 0);
7969 // Ensure the {value} fits fully in the mask.
7970 CSA_DCHECK(this,((void)0)
7971 UintPtrLessThanOrEqual(value, UintPtrConstant(mask >> shift)))((void)0);
7972 TNode<WordT> encoded_value = WordShl(value, static_cast<int>(shift));
7973 TNode<WordT> masked_word;
7974 if (starts_as_zero) {
7975 CSA_DCHECK(this, WordEqual(WordAnd(word, UintPtrConstant(~mask)), word))((void)0);
7976 masked_word = word;
7977 } else {
7978 masked_word = WordAnd(word, UintPtrConstant(~mask));
7979 }
7980 return WordOr(masked_word, encoded_value);
7981}
7982
7983void CodeStubAssembler::SetCounter(StatsCounter* counter, int value) {
7984 if (FLAG_native_code_counters && counter->Enabled()) {
7985 TNode<ExternalReference> counter_address =
7986 ExternalConstant(ExternalReference::Create(counter));
7987 StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address,
7988 Int32Constant(value));
7989 }
7990}
7991
7992void CodeStubAssembler::IncrementCounter(StatsCounter* counter, int delta) {
7993 DCHECK_GT(delta, 0)((void) 0);
7994 if (FLAG_native_code_counters && counter->Enabled()) {
7995 TNode<ExternalReference> counter_address =
7996 ExternalConstant(ExternalReference::Create(counter));
7997 // This operation has to be exactly 32-bit wide in case the external
7998 // reference table redirects the counter to a uint32_t dummy_stats_counter_
7999 // field.
8000 TNode<Int32T> value = Load<Int32T>(counter_address);
8001 value = Int32Add(value, Int32Constant(delta));
8002 StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
8003 }
8004}
8005
8006void CodeStubAssembler::DecrementCounter(StatsCounter* counter, int delta) {
8007 DCHECK_GT(delta, 0)((void) 0);
8008 if (FLAG_native_code_counters && counter->Enabled()) {
8009 TNode<ExternalReference> counter_address =
8010 ExternalConstant(ExternalReference::Create(counter));
8011 // This operation has to be exactly 32-bit wide in case the external
8012 // reference table redirects the counter to a uint32_t dummy_stats_counter_
8013 // field.
8014 TNode<Int32T> value = Load<Int32T>(counter_address);
8015 value = Int32Sub(value, Int32Constant(delta));
8016 StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
8017 }
8018}
8019
8020template <typename TIndex>
8021void CodeStubAssembler::Increment(TVariable<TIndex>* variable, int value) {
8022 *variable =
8023 IntPtrOrSmiAdd(variable->value(), IntPtrOrSmiConstant<TIndex>(value));
8024}
8025
8026// Instantiate Increment for Smi and IntPtrT.
8027// TODO(v8:9708): Consider renaming to [Smi|IntPtrT|RawPtrT]Increment.
8028template void CodeStubAssembler::Increment<Smi>(TVariable<Smi>* variable,
8029 int value);
8030template void CodeStubAssembler::Increment<IntPtrT>(
8031 TVariable<IntPtrT>* variable, int value);
8032template void CodeStubAssembler::Increment<RawPtrT>(
8033 TVariable<RawPtrT>* variable, int value);
8034
8035void CodeStubAssembler::Use(Label* label) {
8036 GotoIf(Word32Equal(Int32Constant(0), Int32Constant(1)), label);
8037}
8038
8039void CodeStubAssembler::TryToName(TNode<Object> key, Label* if_keyisindex,
8040 TVariable<IntPtrT>* var_index,
8041 Label* if_keyisunique,
8042 TVariable<Name>* var_unique,
8043 Label* if_bailout,
8044 Label* if_notinternalized) {
8045 Comment("TryToName");
8046
8047 TVARIABLE(Int32T, var_instance_type)TVariable<Int32T> var_instance_type(this);
8048 Label if_keyisnotindex(this);
8049 *var_index = TryToIntptr(key, &if_keyisnotindex, &var_instance_type);
8050 Goto(if_keyisindex);
8051
8052 BIND(&if_keyisnotindex)Bind(&if_keyisnotindex);
8053 {
8054 Label if_symbol(this), if_string(this),
8055 if_keyisother(this, Label::kDeferred);
8056
8057 // Symbols are unique.
8058 GotoIf(IsSymbolInstanceType(var_instance_type.value()), &if_symbol);
8059
8060 // Miss if |key| is not a String.
8061 STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE)static_assert(FIRST_NAME_TYPE == FIRST_TYPE, "FIRST_NAME_TYPE == FIRST_TYPE"
)
;
8062 Branch(IsStringInstanceType(var_instance_type.value()), &if_string,
8063 &if_keyisother);
8064
8065 // Symbols are unique.
8066 BIND(&if_symbol)Bind(&if_symbol);
8067 {
8068 *var_unique = CAST(key)Cast(key);
8069 Goto(if_keyisunique);
8070 }
8071
8072 BIND(&if_string)Bind(&if_string);
8073 {
8074 Label if_thinstring(this), if_has_cached_index(this);
8075
8076 TNode<Uint32T> raw_hash_field = LoadNameRawHashField(CAST(key)Cast(key));
8077 GotoIf(IsClearWord32(raw_hash_field,
8078 Name::kDoesNotContainCachedArrayIndexMask),
8079 &if_has_cached_index);
8080 // No cached array index. If the string knows that it contains an index,
8081 // then it must be an uncacheable index. Handle this case in the runtime.
8082 GotoIf(IsEqualInWord32<Name::HashFieldTypeBits>(
8083 raw_hash_field, Name::HashFieldType::kIntegerIndex),
8084 if_bailout);
8085
8086 GotoIf(InstanceTypeEqual(var_instance_type.value(), THIN_STRING_TYPE),
8087 &if_thinstring);
8088 GotoIf(InstanceTypeEqual(var_instance_type.value(),
8089 THIN_ONE_BYTE_STRING_TYPE),
8090 &if_thinstring);
8091 // Finally, check if |key| is internalized.
8092 STATIC_ASSERT(kNotInternalizedTag != 0)static_assert(kNotInternalizedTag != 0, "kNotInternalizedTag != 0"
)
;
8093 GotoIf(IsSetWord32(var_instance_type.value(), kIsNotInternalizedMask),
8094 if_notinternalized != nullptr ? if_notinternalized : if_bailout);
8095
8096 *var_unique = CAST(key)Cast(key);
8097 Goto(if_keyisunique);
8098
8099 BIND(&if_thinstring)Bind(&if_thinstring);
8100 {
8101 *var_unique =
8102 LoadObjectField<String>(CAST(key)Cast(key), ThinString::kActualOffset);
8103 Goto(if_keyisunique);
8104 }
8105
8106 BIND(&if_has_cached_index)Bind(&if_has_cached_index);
8107 {
8108 TNode<IntPtrT> index = Signed(
8109 DecodeWordFromWord32<String::ArrayIndexValueBits>(raw_hash_field));
8110 CSA_DCHECK(this, IntPtrLessThan(index, IntPtrConstant(INT_MAX)))((void)0);
8111 *var_index = index;
8112 Goto(if_keyisindex);
8113 }
8114 }
8115
8116 BIND(&if_keyisother)Bind(&if_keyisother);
8117 {
8118 GotoIfNot(InstanceTypeEqual(var_instance_type.value(), ODDBALL_TYPE),
8119 if_bailout);
8120 *var_unique =
8121 LoadObjectField<String>(CAST(key)Cast(key), Oddball::kToStringOffset);
8122 Goto(if_keyisunique);
8123 }
8124 }
8125}
8126
8127void CodeStubAssembler::StringWriteToFlatOneByte(TNode<String> source,
8128 TNode<RawPtrT> sink,
8129 TNode<Int32T> start,
8130 TNode<Int32T> length) {
8131 TNode<ExternalReference> function =
8132 ExternalConstant(ExternalReference::string_write_to_flat_one_byte());
8133 CallCFunction(function, base::nullopt,
8134 std::make_pair(MachineType::AnyTagged(), source),
8135 std::make_pair(MachineType::Pointer(), sink),
8136 std::make_pair(MachineType::Int32(), start),
8137 std::make_pair(MachineType::Int32(), length));
8138}
8139
8140void CodeStubAssembler::StringWriteToFlatTwoByte(TNode<String> source,
8141 TNode<RawPtrT> sink,
8142 TNode<Int32T> start,
8143 TNode<Int32T> length) {
8144 TNode<ExternalReference> function =
8145 ExternalConstant(ExternalReference::string_write_to_flat_two_byte());
8146 CallCFunction(function, base::nullopt,
8147 std::make_pair(MachineType::AnyTagged(), source),
8148 std::make_pair(MachineType::Pointer(), sink),
8149 std::make_pair(MachineType::Int32(), start),
8150 std::make_pair(MachineType::Int32(), length));
8151}
8152
8153TNode<RawPtr<Uint8T>> CodeStubAssembler::ExternalOneByteStringGetChars(
8154 TNode<ExternalOneByteString> string) {
8155 TNode<ExternalReference> function =
8156 ExternalConstant(ExternalReference::external_one_byte_string_get_chars());
8157 return UncheckedCast<RawPtr<Uint8T>>(
8158 CallCFunction(function, MachineType::Pointer(),
8159 std::make_pair(MachineType::AnyTagged(), string)));
8160}
8161
8162TNode<RawPtr<Uint16T>> CodeStubAssembler::ExternalTwoByteStringGetChars(
8163 TNode<ExternalTwoByteString> string) {
8164 TNode<ExternalReference> function =
8165 ExternalConstant(ExternalReference::external_two_byte_string_get_chars());
8166 return UncheckedCast<RawPtr<Uint16T>>(
8167 CallCFunction(function, MachineType::Pointer(),
8168 std::make_pair(MachineType::AnyTagged(), string)));
8169}
8170
8171TNode<RawPtr<Uint8T>> CodeStubAssembler::IntlAsciiCollationWeightsL1() {
8172#ifdef V8_INTL_SUPPORT1
8173 TNode<RawPtrT> ptr =
8174 ExternalConstant(ExternalReference::intl_ascii_collation_weights_l1());
8175 return ReinterpretCast<RawPtr<Uint8T>>(ptr);
8176#else
8177 UNREACHABLE()V8_Fatal("unreachable code");
8178#endif
8179}
8180TNode<RawPtr<Uint8T>> CodeStubAssembler::IntlAsciiCollationWeightsL3() {
8181#ifdef V8_INTL_SUPPORT1
8182 TNode<RawPtrT> ptr =
8183 ExternalConstant(ExternalReference::intl_ascii_collation_weights_l3());
8184 return ReinterpretCast<RawPtr<Uint8T>>(ptr);
8185#else
8186 UNREACHABLE()V8_Fatal("unreachable code");
8187#endif
8188}
8189
8190void CodeStubAssembler::TryInternalizeString(
8191 TNode<String> string, Label* if_index, TVariable<IntPtrT>* var_index,
8192 Label* if_internalized, TVariable<Name>* var_internalized,
8193 Label* if_not_internalized, Label* if_bailout) {
8194 TNode<ExternalReference> function = ExternalConstant(
8195 ExternalReference::try_string_to_index_or_lookup_existing());
8196 const TNode<ExternalReference> isolate_ptr =
8197 ExternalConstant(ExternalReference::isolate_address(isolate()));
8198 TNode<Object> result =
8199 CAST(CallCFunction(function, MachineType::AnyTagged(),Cast(CallCFunction(function, MachineType::AnyTagged(), std::make_pair
(MachineType::Pointer(), isolate_ptr), std::make_pair(MachineType
::AnyTagged(), string)))
8200 std::make_pair(MachineType::Pointer(), isolate_ptr),Cast(CallCFunction(function, MachineType::AnyTagged(), std::make_pair
(MachineType::Pointer(), isolate_ptr), std::make_pair(MachineType
::AnyTagged(), string)))
8201 std::make_pair(MachineType::AnyTagged(), string)))Cast(CallCFunction(function, MachineType::AnyTagged(), std::make_pair
(MachineType::Pointer(), isolate_ptr), std::make_pair(MachineType
::AnyTagged(), string)))
;
8202 Label internalized(this);
8203 GotoIf(TaggedIsNotSmi(result), &internalized);
8204 TNode<IntPtrT> word_result = SmiUntag(CAST(result)Cast(result));
8205 GotoIf(IntPtrEqual(word_result, IntPtrConstant(ResultSentinel::kNotFound)),
8206 if_not_internalized);
8207 GotoIf(IntPtrEqual(word_result, IntPtrConstant(ResultSentinel::kUnsupported)),
8208 if_bailout);
8209 *var_index = word_result;
8210 Goto(if_index);
8211
8212 BIND(&internalized)Bind(&internalized);
8213 *var_internalized = CAST(result)Cast(result);
8214 Goto(if_internalized);
8215}
8216
8217template <typename Dictionary>
8218TNode<IntPtrT> CodeStubAssembler::EntryToIndex(TNode<IntPtrT> entry,
8219 int field_index) {
8220 TNode<IntPtrT> entry_index =
8221 IntPtrMul(entry, IntPtrConstant(Dictionary::kEntrySize));
8222 return IntPtrAdd(entry_index, IntPtrConstant(Dictionary::kElementsStartIndex +
8223 field_index));
8224}
8225
8226template <typename T>
8227TNode<T> CodeStubAssembler::LoadDescriptorArrayElement(
8228 TNode<DescriptorArray> object, TNode<IntPtrT> index,
8229 int additional_offset) {
8230 return LoadArrayElement<DescriptorArray, IntPtrT, T>(
8231 object, DescriptorArray::kHeaderSize, index, additional_offset);
8232}
8233
8234TNode<Name> CodeStubAssembler::LoadKeyByKeyIndex(
8235 TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
8236 return CAST(LoadDescriptorArrayElement<HeapObject>(container, key_index, 0))Cast(LoadDescriptorArrayElement<HeapObject>(container, key_index
, 0))
;
8237}
8238
8239TNode<Uint32T> CodeStubAssembler::LoadDetailsByKeyIndex(
8240 TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
8241 const int kKeyToDetailsOffset =
8242 DescriptorArray::kEntryDetailsOffset - DescriptorArray::kEntryKeyOffset;
8243 return Unsigned(LoadAndUntagToWord32ArrayElement(
8244 container, DescriptorArray::kHeaderSize, key_index, kKeyToDetailsOffset));
8245}
8246
8247TNode<Object> CodeStubAssembler::LoadValueByKeyIndex(
8248 TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
8249 const int kKeyToValueOffset =
8250 DescriptorArray::kEntryValueOffset - DescriptorArray::kEntryKeyOffset;
8251 return LoadDescriptorArrayElement<Object>(container, key_index,
8252 kKeyToValueOffset);
8253}
8254
8255TNode<MaybeObject> CodeStubAssembler::LoadFieldTypeByKeyIndex(
8256 TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
8257 const int kKeyToValueOffset =
8258 DescriptorArray::kEntryValueOffset - DescriptorArray::kEntryKeyOffset;
8259 return LoadDescriptorArrayElement<MaybeObject>(container, key_index,
8260 kKeyToValueOffset);
8261}
8262
8263TNode<IntPtrT> CodeStubAssembler::DescriptorEntryToIndex(
8264 TNode<IntPtrT> descriptor_entry) {
8265 return IntPtrMul(descriptor_entry,
8266 IntPtrConstant(DescriptorArray::kEntrySize));
8267}
8268
8269TNode<Name> CodeStubAssembler::LoadKeyByDescriptorEntry(
8270 TNode<DescriptorArray> container, TNode<IntPtrT> descriptor_entry) {
8271 return CAST(LoadDescriptorArrayElement<HeapObject>(Cast(LoadDescriptorArrayElement<HeapObject>( container,
DescriptorEntryToIndex(descriptor_entry), DescriptorArray::ToKeyIndex
(0) * kTaggedSize))
8272 container, DescriptorEntryToIndex(descriptor_entry),Cast(LoadDescriptorArrayElement<HeapObject>( container,
DescriptorEntryToIndex(descriptor_entry), DescriptorArray::ToKeyIndex
(0) * kTaggedSize))
8273 DescriptorArray::ToKeyIndex(0) * kTaggedSize))Cast(LoadDescriptorArrayElement<HeapObject>( container,
DescriptorEntryToIndex(descriptor_entry), DescriptorArray::ToKeyIndex
(0) * kTaggedSize))
;
8274}
8275
8276TNode<Name> CodeStubAssembler::LoadKeyByDescriptorEntry(
8277 TNode<DescriptorArray> container, int descriptor_entry) {
8278 return CAST(LoadDescriptorArrayElement<HeapObject>(Cast(LoadDescriptorArrayElement<HeapObject>( container,
IntPtrConstant(0), DescriptorArray::ToKeyIndex(descriptor_entry
) * kTaggedSize))
8279 container, IntPtrConstant(0),Cast(LoadDescriptorArrayElement<HeapObject>( container,
IntPtrConstant(0), DescriptorArray::ToKeyIndex(descriptor_entry
) * kTaggedSize))
8280 DescriptorArray::ToKeyIndex(descriptor_entry) * kTaggedSize))Cast(LoadDescriptorArrayElement<HeapObject>( container,
IntPtrConstant(0), DescriptorArray::ToKeyIndex(descriptor_entry
) * kTaggedSize))
;
8281}
8282
8283TNode<Uint32T> CodeStubAssembler::LoadDetailsByDescriptorEntry(
8284 TNode<DescriptorArray> container, TNode<IntPtrT> descriptor_entry) {
8285 return Unsigned(LoadAndUntagToWord32ArrayElement(
8286 container, DescriptorArray::kHeaderSize,
8287 DescriptorEntryToIndex(descriptor_entry),
8288 DescriptorArray::ToDetailsIndex(0) * kTaggedSize));
8289}
8290
8291TNode<Uint32T> CodeStubAssembler::LoadDetailsByDescriptorEntry(
8292 TNode<DescriptorArray> container, int descriptor_entry) {
8293 return Unsigned(LoadAndUntagToWord32ArrayElement(
8294 container, DescriptorArray::kHeaderSize, IntPtrConstant(0),
8295 DescriptorArray::ToDetailsIndex(descriptor_entry) * kTaggedSize));
8296}
8297
8298TNode<Object> CodeStubAssembler::LoadValueByDescriptorEntry(
8299 TNode<DescriptorArray> container, TNode<IntPtrT> descriptor_entry) {
8300 return LoadDescriptorArrayElement<Object>(
8301 container, DescriptorEntryToIndex(descriptor_entry),
8302 DescriptorArray::ToValueIndex(0) * kTaggedSize);
8303}
8304
8305TNode<Object> CodeStubAssembler::LoadValueByDescriptorEntry(
8306 TNode<DescriptorArray> container, int descriptor_entry) {
8307 return LoadDescriptorArrayElement<Object>(
8308 container, IntPtrConstant(0),
8309 DescriptorArray::ToValueIndex(descriptor_entry) * kTaggedSize);
8310}
8311
8312TNode<MaybeObject> CodeStubAssembler::LoadFieldTypeByDescriptorEntry(
8313 TNode<DescriptorArray> container, TNode<IntPtrT> descriptor_entry) {
8314 return LoadDescriptorArrayElement<MaybeObject>(
8315 container, DescriptorEntryToIndex(descriptor_entry),
8316 DescriptorArray::ToValueIndex(0) * kTaggedSize);
8317}
8318
8319// Loads the value for the entry with the given key_index.
8320// Returns a tagged value.
8321template <class ContainerType>
8322TNode<Object> CodeStubAssembler::LoadValueByKeyIndex(
8323 TNode<ContainerType> container, TNode<IntPtrT> key_index) {
8324 static_assert(!std::is_same<ContainerType, DescriptorArray>::value,
8325 "Use the non-templatized version for DescriptorArray");
8326 const int kKeyToValueOffset =
8327 (ContainerType::kEntryValueIndex - ContainerType::kEntryKeyIndex) *
8328 kTaggedSize;
8329 return LoadFixedArrayElement(container, key_index, kKeyToValueOffset);
8330}
8331
8332template <>
8333V8_EXPORT_PRIVATE TNode<Object> CodeStubAssembler::LoadValueByKeyIndex(
8334 TNode<SwissNameDictionary> container, TNode<IntPtrT> key_index) {
8335 TNode<IntPtrT> offset_minus_tag = SwissNameDictionaryOffsetIntoDataTableMT(
8336 container, key_index, SwissNameDictionary::kDataTableValueEntryIndex);
8337
8338 return Load<Object>(container, offset_minus_tag);
8339}
8340
8341template <class ContainerType>
8342TNode<Uint32T> CodeStubAssembler::LoadDetailsByKeyIndex(
8343 TNode<ContainerType> container, TNode<IntPtrT> key_index) {
8344 static_assert(!std::is_same<ContainerType, DescriptorArray>::value,
8345 "Use the non-templatized version for DescriptorArray");
8346 const int kKeyToDetailsOffset =
8347 (ContainerType::kEntryDetailsIndex - ContainerType::kEntryKeyIndex) *
8348 kTaggedSize;
8349 return Unsigned(LoadAndUntagToWord32FixedArrayElement(container, key_index,
8350 kKeyToDetailsOffset));
8351}
8352
8353template <>
8354V8_EXPORT_PRIVATE TNode<Uint32T> CodeStubAssembler::LoadDetailsByKeyIndex(
8355 TNode<SwissNameDictionary> container, TNode<IntPtrT> key_index) {
8356 TNode<IntPtrT> capacity =
8357 ChangeInt32ToIntPtr(LoadSwissNameDictionaryCapacity(container));
8358 return LoadSwissNameDictionaryPropertyDetails(container, capacity, key_index);
8359}
8360
8361// Stores the details for the entry with the given key_index.
8362// |details| must be a Smi.
8363template <class ContainerType>
8364void CodeStubAssembler::StoreDetailsByKeyIndex(TNode<ContainerType> container,
8365 TNode<IntPtrT> key_index,
8366 TNode<Smi> details) {
8367 const int kKeyToDetailsOffset =
8368 (ContainerType::kEntryDetailsIndex - ContainerType::kEntryKeyIndex) *
8369 kTaggedSize;
8370 StoreFixedArrayElement(container, key_index, details, kKeyToDetailsOffset);
8371}
8372
8373template <>
8374V8_EXPORT_PRIVATE void CodeStubAssembler::StoreDetailsByKeyIndex(
8375 TNode<SwissNameDictionary> container, TNode<IntPtrT> key_index,
8376 TNode<Smi> details) {
8377 TNode<IntPtrT> capacity =
8378 ChangeInt32ToIntPtr(LoadSwissNameDictionaryCapacity(container));
8379 TNode<Uint8T> details_byte = UncheckedCast<Uint8T>(SmiToInt32(details));
8380 StoreSwissNameDictionaryPropertyDetails(container, capacity, key_index,
8381 details_byte);
8382}
8383
8384// Stores the value for the entry with the given key_index.
8385template <class ContainerType>
8386void CodeStubAssembler::StoreValueByKeyIndex(TNode<ContainerType> container,
8387 TNode<IntPtrT> key_index,
8388 TNode<Object> value,
8389 WriteBarrierMode write_barrier) {
8390 const int kKeyToValueOffset =
8391 (ContainerType::kEntryValueIndex - ContainerType::kEntryKeyIndex) *
8392 kTaggedSize;
8393 StoreFixedArrayElement(container, key_index, value, write_barrier,
8394 kKeyToValueOffset);
8395}
8396
8397template <>
8398V8_EXPORT_PRIVATE void CodeStubAssembler::StoreValueByKeyIndex(
8399 TNode<SwissNameDictionary> container, TNode<IntPtrT> key_index,
8400 TNode<Object> value, WriteBarrierMode write_barrier) {
8401 TNode<IntPtrT> offset_minus_tag = SwissNameDictionaryOffsetIntoDataTableMT(
8402 container, key_index, SwissNameDictionary::kDataTableValueEntryIndex);
8403
8404 StoreToObjectWriteBarrier mode;
8405 switch (write_barrier) {
8406 case UNSAFE_SKIP_WRITE_BARRIER:
8407 case SKIP_WRITE_BARRIER:
8408 mode = StoreToObjectWriteBarrier::kNone;
8409 break;
8410 case UPDATE_WRITE_BARRIER:
8411 mode = StoreToObjectWriteBarrier::kFull;
8412 break;
8413 default:
8414 // We shouldn't see anything else.
8415 UNREACHABLE()V8_Fatal("unreachable code");
8416 }
8417 StoreToObject(MachineRepresentation::kTagged, container, offset_minus_tag,
8418 value, mode);
8419}
8420
8421template V8_EXPORT_PRIVATE TNode<IntPtrT>
8422CodeStubAssembler::EntryToIndex<NameDictionary>(TNode<IntPtrT>, int);
8423template V8_EXPORT_PRIVATE TNode<IntPtrT>
8424CodeStubAssembler::EntryToIndex<GlobalDictionary>(TNode<IntPtrT>, int);
8425template V8_EXPORT_PRIVATE TNode<IntPtrT>
8426CodeStubAssembler::EntryToIndex<NumberDictionary>(TNode<IntPtrT>, int);
8427
8428template TNode<Object> CodeStubAssembler::LoadValueByKeyIndex(
8429 TNode<NameDictionary> container, TNode<IntPtrT> key_index);
8430template TNode<Object> CodeStubAssembler::LoadValueByKeyIndex(
8431 TNode<GlobalDictionary> container, TNode<IntPtrT> key_index);
8432template TNode<Uint32T> CodeStubAssembler::LoadDetailsByKeyIndex(
8433 TNode<NameDictionary> container, TNode<IntPtrT> key_index);
8434template void CodeStubAssembler::StoreDetailsByKeyIndex(
8435 TNode<NameDictionary> container, TNode<IntPtrT> key_index,
8436 TNode<Smi> details);
8437template void CodeStubAssembler::StoreValueByKeyIndex(
8438 TNode<NameDictionary> container, TNode<IntPtrT> key_index,
8439 TNode<Object> value, WriteBarrierMode write_barrier);
8440
8441// This must be kept in sync with HashTableBase::ComputeCapacity().
8442TNode<IntPtrT> CodeStubAssembler::HashTableComputeCapacity(
8443 TNode<IntPtrT> at_least_space_for) {
8444 TNode<IntPtrT> capacity = IntPtrRoundUpToPowerOfTwo32(
8445 IntPtrAdd(at_least_space_for, WordShr(at_least_space_for, 1)));
8446 return IntPtrMax(capacity, IntPtrConstant(HashTableBase::kMinCapacity));
8447}
8448
8449TNode<IntPtrT> CodeStubAssembler::IntPtrMax(TNode<IntPtrT> left,
8450 TNode<IntPtrT> right) {
8451 intptr_t left_constant;
8452 intptr_t right_constant;
8453 if (TryToIntPtrConstant(left, &left_constant) &&
8454 TryToIntPtrConstant(right, &right_constant)) {
8455 return IntPtrConstant(std::max(left_constant, right_constant));
8456 }
8457 return SelectConstant<IntPtrT>(IntPtrGreaterThanOrEqual(left, right), left,
8458 right);
8459}
8460
8461TNode<IntPtrT> CodeStubAssembler::IntPtrMin(TNode<IntPtrT> left,
8462 TNode<IntPtrT> right) {
8463 intptr_t left_constant;
8464 intptr_t right_constant;
8465 if (TryToIntPtrConstant(left, &left_constant) &&
8466 TryToIntPtrConstant(right, &right_constant)) {
8467 return IntPtrConstant(std::min(left_constant, right_constant));
8468 }
8469 return SelectConstant<IntPtrT>(IntPtrLessThanOrEqual(left, right), left,
8470 right);
8471}
8472
8473TNode<UintPtrT> CodeStubAssembler::UintPtrMin(TNode<UintPtrT> left,
8474 TNode<UintPtrT> right) {
8475 intptr_t left_constant;
8476 intptr_t right_constant;
8477 if (TryToIntPtrConstant(left, &left_constant) &&
8478 TryToIntPtrConstant(right, &right_constant)) {
8479 return UintPtrConstant(std::min(static_cast<uintptr_t>(left_constant),
8480 static_cast<uintptr_t>(right_constant)));
8481 }
8482 return SelectConstant<UintPtrT>(UintPtrLessThanOrEqual(left, right), left,
8483 right);
8484}
8485
8486template <>
8487TNode<HeapObject> CodeStubAssembler::LoadName<NameDictionary>(
8488 TNode<HeapObject> key) {
8489 CSA_DCHECK(this, Word32Or(IsTheHole(key), IsName(key)))((void)0);
8490 return key;
8491}
8492
8493template <>
8494TNode<HeapObject> CodeStubAssembler::LoadName<GlobalDictionary>(
8495 TNode<HeapObject> key) {
8496 TNode<PropertyCell> property_cell = CAST(key)Cast(key);
8497 return CAST(LoadObjectField(property_cell, PropertyCell::kNameOffset))Cast(LoadObjectField(property_cell, PropertyCell::kNameOffset
))
;
8498}
8499
8500template <>
8501TNode<HeapObject> CodeStubAssembler::LoadName<NameToIndexHashTable>(
8502 TNode<HeapObject> key) {
8503 CSA_DCHECK(this, IsName(key))((void)0);
8504 return key;
8505}
8506
8507// The implementation should be in sync with NameToIndexHashTable::Lookup.
8508TNode<IntPtrT> CodeStubAssembler::NameToIndexHashTableLookup(
8509 TNode<NameToIndexHashTable> table, TNode<Name> name, Label* not_found) {
8510 TVARIABLE(IntPtrT, var_entry)TVariable<IntPtrT> var_entry(this);
8511 Label index_found(this, {&var_entry});
8512 NameDictionaryLookup<NameToIndexHashTable>(table, name, &index_found,
8513 &var_entry, not_found,
8514 LookupMode::kFindExisting);
8515 BIND(&index_found)Bind(&index_found);
8516 TNode<Smi> value =
8517 CAST(LoadValueByKeyIndex<NameToIndexHashTable>(table, var_entry.value()))Cast(LoadValueByKeyIndex<NameToIndexHashTable>(table, var_entry
.value()))
;
8518 return SmiToIntPtr(value);
8519}
8520
8521template <typename Dictionary>
8522void CodeStubAssembler::NameDictionaryLookup(
8523 TNode<Dictionary> dictionary, TNode<Name> unique_name, Label* if_found,
8524 TVariable<IntPtrT>* var_name_index, Label* if_not_found, LookupMode mode) {
8525 static_assert(std::is_same<Dictionary, NameDictionary>::value ||
8526 std::is_same<Dictionary, GlobalDictionary>::value ||
8527 std::is_same<Dictionary, NameToIndexHashTable>::value,
8528 "Unexpected NameDictionary");
8529 DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep())((void) 0);
8530 DCHECK_IMPLIES(mode == kFindInsertionIndex, if_found == nullptr)((void) 0);
8531 Comment("NameDictionaryLookup");
8532 CSA_DCHECK(this, IsUniqueName(unique_name))((void)0);
8533
8534 TNode<IntPtrT> capacity = SmiUntag(GetCapacity<Dictionary>(dictionary));
8535 TNode<IntPtrT> mask = IntPtrSub(capacity, IntPtrConstant(1));
8536 TNode<UintPtrT> hash = ChangeUint32ToWord(LoadNameHash(unique_name));
8537
8538 // See Dictionary::FirstProbe().
8539 TNode<IntPtrT> count = IntPtrConstant(0);
8540 TNode<IntPtrT> initial_entry = Signed(WordAnd(hash, mask));
8541 TNode<Oddball> undefined = UndefinedConstant();
8542
8543 // Appease the variable merging algorithm for "Goto(&loop)" below.
8544 *var_name_index = IntPtrConstant(0);
8545
8546 TVARIABLE(IntPtrT, var_count, count)TVariable<IntPtrT> var_count(count, this);
8547 TVARIABLE(IntPtrT, var_entry, initial_entry)TVariable<IntPtrT> var_entry(initial_entry, this);
8548 Label loop(this, {&var_count, &var_entry, var_name_index});
8549 Goto(&loop);
8550 BIND(&loop)Bind(&loop);
8551 {
8552 Label next_probe(this);
8553 TNode<IntPtrT> entry = var_entry.value();
8554
8555 TNode<IntPtrT> index = EntryToIndex<Dictionary>(entry);
8556 *var_name_index = index;
8557
8558 TNode<HeapObject> current =
8559 CAST(UnsafeLoadFixedArrayElement(dictionary, index))Cast(UnsafeLoadFixedArrayElement(dictionary, index));
8560 GotoIf(TaggedEqual(current, undefined), if_not_found);
8561 if (mode == kFindExisting) {
8562 if (Dictionary::ShapeT::kMatchNeedsHoleCheck) {
8563 GotoIf(TaggedEqual(current, TheHoleConstant()), &next_probe);
8564 }
8565 current = LoadName<Dictionary>(current);
8566 GotoIf(TaggedEqual(current, unique_name), if_found);
8567 } else {
8568 DCHECK_EQ(kFindInsertionIndex, mode)((void) 0);
8569 GotoIf(TaggedEqual(current, TheHoleConstant()), if_not_found);
8570 }
8571 Goto(&next_probe);
8572
8573 BIND(&next_probe)Bind(&next_probe);
8574 // See Dictionary::NextProbe().
8575 Increment(&var_count);
8576 entry = Signed(WordAnd(IntPtrAdd(entry, var_count.value()), mask));
8577
8578 var_entry = entry;
8579 Goto(&loop);
8580 }
8581}
8582
8583// Instantiate template methods to workaround GCC compilation issue.
8584template V8_EXPORT_PRIVATE void
8585CodeStubAssembler::NameDictionaryLookup<NameDictionary>(TNode<NameDictionary>,
8586 TNode<Name>, Label*,
8587 TVariable<IntPtrT>*,
8588 Label*, LookupMode);
8589template V8_EXPORT_PRIVATE void CodeStubAssembler::NameDictionaryLookup<
8590 GlobalDictionary>(TNode<GlobalDictionary>, TNode<Name>, Label*,
8591 TVariable<IntPtrT>*, Label*, LookupMode);
8592
8593TNode<Word32T> CodeStubAssembler::ComputeSeededHash(TNode<IntPtrT> key) {
8594 const TNode<ExternalReference> function_addr =
8595 ExternalConstant(ExternalReference::compute_integer_hash());
8596 const TNode<ExternalReference> isolate_ptr =
8597 ExternalConstant(ExternalReference::isolate_address(isolate()));
8598
8599 MachineType type_ptr = MachineType::Pointer();
8600 MachineType type_uint32 = MachineType::Uint32();
8601 MachineType type_int32 = MachineType::Int32();
8602
8603 return UncheckedCast<Word32T>(CallCFunction(
8604 function_addr, type_uint32, std::make_pair(type_ptr, isolate_ptr),
8605 std::make_pair(type_int32, TruncateIntPtrToInt32(key))));
8606}
8607
8608template <>
8609void CodeStubAssembler::NameDictionaryLookup(
8610 TNode<SwissNameDictionary> dictionary, TNode<Name> unique_name,
8611 Label* if_found, TVariable<IntPtrT>* var_name_index, Label* if_not_found,
8612 LookupMode mode) {
8613 SwissNameDictionaryFindEntry(dictionary, unique_name, if_found,
8614 var_name_index, if_not_found);
8615}
8616
8617void CodeStubAssembler::NumberDictionaryLookup(
8618 TNode<NumberDictionary> dictionary, TNode<IntPtrT> intptr_index,
8619 Label* if_found, TVariable<IntPtrT>* var_entry, Label* if_not_found) {
8620 CSA_DCHECK(this, IsNumberDictionary(dictionary))((void)0);
8621 DCHECK_EQ(MachineType::PointerRepresentation(), var_entry->rep())((void) 0);
8622 Comment("NumberDictionaryLookup");
8623
8624 TNode<IntPtrT> capacity = SmiUntag(GetCapacity<NumberDictionary>(dictionary));
8625 TNode<IntPtrT> mask = IntPtrSub(capacity, IntPtrConstant(1));
8626
8627 TNode<UintPtrT> hash = ChangeUint32ToWord(ComputeSeededHash(intptr_index));
8628 TNode<Float64T> key_as_float64 = RoundIntPtrToFloat64(intptr_index);
8629
8630 // See Dictionary::FirstProbe().
8631 TNode<IntPtrT> count = IntPtrConstant(0);
8632 TNode<IntPtrT> initial_entry = Signed(WordAnd(hash, mask));
8633
8634 TNode<Oddball> undefined = UndefinedConstant();
8635 TNode<Oddball> the_hole = TheHoleConstant();
8636
8637 TVARIABLE(IntPtrT, var_count, count)TVariable<IntPtrT> var_count(count, this);
8638 Label loop(this, {&var_count, var_entry});
8639 *var_entry = initial_entry;
8640 Goto(&loop);
8641 BIND(&loop)Bind(&loop);
8642 {
8643 TNode<IntPtrT> entry = var_entry->value();
8644
8645 TNode<IntPtrT> index = EntryToIndex<NumberDictionary>(entry);
8646 TNode<Object> current = UnsafeLoadFixedArrayElement(dictionary, index);
8647 GotoIf(TaggedEqual(current, undefined), if_not_found);
8648 Label next_probe(this);
8649 {
8650 Label if_currentissmi(this), if_currentisnotsmi(this);
8651 Branch(TaggedIsSmi(current), &if_currentissmi, &if_currentisnotsmi);
8652 BIND(&if_currentissmi)Bind(&if_currentissmi);
8653 {
8654 TNode<IntPtrT> current_value = SmiUntag(CAST(current)Cast(current));
8655 Branch(WordEqual(current_value, intptr_index), if_found, &next_probe);
8656 }
8657 BIND(&if_currentisnotsmi)Bind(&if_currentisnotsmi);
8658 {
8659 GotoIf(TaggedEqual(current, the_hole), &next_probe);
8660 // Current must be the Number.
8661 TNode<Float64T> current_value = LoadHeapNumberValue(CAST(current)Cast(current));
8662 Branch(Float64Equal(current_value, key_as_float64), if_found,
8663 &next_probe);
8664 }
8665 }
8666
8667 BIND(&next_probe)Bind(&next_probe);
8668 // See Dictionary::NextProbe().
8669 Increment(&var_count);
8670 entry = Signed(WordAnd(IntPtrAdd(entry, var_count.value()), mask));
8671
8672 *var_entry = entry;
8673 Goto(&loop);
8674 }
8675}
8676
8677TNode<Object> CodeStubAssembler::BasicLoadNumberDictionaryElement(
8678 TNode<NumberDictionary> dictionary, TNode<IntPtrT> intptr_index,
8679 Label* not_data, Label* if_hole) {
8680 TVARIABLE(IntPtrT, var_entry)TVariable<IntPtrT> var_entry(this);
8681 Label if_found(this);
8682 NumberDictionaryLookup(dictionary, intptr_index, &if_found, &var_entry,
8683 if_hole);
8684 BIND(&if_found)Bind(&if_found);
8685
8686 // Check that the value is a data property.
8687 TNode<IntPtrT> index = EntryToIndex<NumberDictionary>(var_entry.value());
8688 TNode<Uint32T> details = LoadDetailsByKeyIndex(dictionary, index);
8689 TNode<Uint32T> kind = DecodeWord32<PropertyDetails::KindField>(details);
8690 // TODO(jkummerow): Support accessors without missing?
8691 GotoIfNot(
8692 Word32Equal(kind, Int32Constant(static_cast<int>(PropertyKind::kData))),
8693 not_data);
8694 // Finally, load the value.
8695 return LoadValueByKeyIndex(dictionary, index);
8696}
8697
8698template <class Dictionary>
8699void CodeStubAssembler::FindInsertionEntry(TNode<Dictionary> dictionary,
8700 TNode<Name> key,
8701 TVariable<IntPtrT>* var_key_index) {
8702 UNREACHABLE()V8_Fatal("unreachable code");
8703}
8704
8705template <>
8706void CodeStubAssembler::FindInsertionEntry<NameDictionary>(
8707 TNode<NameDictionary> dictionary, TNode<Name> key,
8708 TVariable<IntPtrT>* var_key_index) {
8709 Label done(this);
8710 NameDictionaryLookup<NameDictionary>(dictionary, key, nullptr, var_key_index,
8711 &done, kFindInsertionIndex);
8712 BIND(&done)Bind(&done);
8713}
8714
8715template <class Dictionary>
8716void CodeStubAssembler::InsertEntry(TNode<Dictionary> dictionary,
8717 TNode<Name> key, TNode<Object> value,
8718 TNode<IntPtrT> index,
8719 TNode<Smi> enum_index) {
8720 UNREACHABLE()V8_Fatal("unreachable code"); // Use specializations instead.
8721}
8722
8723template <>
8724void CodeStubAssembler::InsertEntry<NameDictionary>(
8725 TNode<NameDictionary> dictionary, TNode<Name> name, TNode<Object> value,
8726 TNode<IntPtrT> index, TNode<Smi> enum_index) {
8727 // This should only be used for adding, not updating existing mappings.
8728 CSA_DCHECK(this,((void)0)
8729 Word32Or(TaggedEqual(LoadFixedArrayElement(dictionary, index),((void)0)
8730 UndefinedConstant()),((void)0)
8731 TaggedEqual(LoadFixedArrayElement(dictionary, index),((void)0)
8732 TheHoleConstant())))((void)0);
8733
8734 // Store name and value.
8735 StoreFixedArrayElement(dictionary, index, name);
8736 StoreValueByKeyIndex<NameDictionary>(dictionary, index, value);
8737
8738 // Prepare details of the new property.
8739 PropertyDetails d(PropertyKind::kData, NONE,
8740 PropertyDetails::kConstIfDictConstnessTracking);
8741
8742 enum_index =
8743 SmiShl(enum_index, PropertyDetails::DictionaryStorageField::kShift);
8744 // We OR over the actual index below, so we expect the initial value to be 0.
8745 DCHECK_EQ(0, d.dictionary_index())((void) 0);
8746 TVARIABLE(Smi, var_details, SmiOr(SmiConstant(d.AsSmi()), enum_index))TVariable<Smi> var_details(SmiOr(SmiConstant(d.AsSmi())
, enum_index), this)
;
8747
8748 // Private names must be marked non-enumerable.
8749 Label not_private(this, &var_details);
8750 GotoIfNot(IsPrivateSymbol(name), &not_private);
8751 TNode<Smi> dont_enum =
8752 SmiShl(SmiConstant(DONT_ENUM), PropertyDetails::AttributesField::kShift);
8753 var_details = SmiOr(var_details.value(), dont_enum);
8754 Goto(&not_private);
8755 BIND(&not_private)Bind(&not_private);
8756
8757 // Finally, store the details.
8758 StoreDetailsByKeyIndex<NameDictionary>(dictionary, index,
8759 var_details.value());
8760}
8761
8762template <>
8763void CodeStubAssembler::InsertEntry<GlobalDictionary>(
8764 TNode<GlobalDictionary> dictionary, TNode<Name> key, TNode<Object> value,
8765 TNode<IntPtrT> index, TNode<Smi> enum_index) {
8766 UNIMPLEMENTED()V8_Fatal("unimplemented code");
8767}
8768
8769template <class Dictionary>
8770void CodeStubAssembler::Add(TNode<Dictionary> dictionary, TNode<Name> key,
8771 TNode<Object> value, Label* bailout) {
8772 CSA_DCHECK(this, Word32BinaryNot(IsEmptyPropertyDictionary(dictionary)))((void)0);
8773 TNode<Smi> capacity = GetCapacity<Dictionary>(dictionary);
8774 TNode<Smi> nof = GetNumberOfElements<Dictionary>(dictionary);
8775 TNode<Smi> new_nof = SmiAdd(nof, SmiConstant(1));
8776 // Require 33% to still be free after adding additional_elements.
8777 // Computing "x + (x >> 1)" on a Smi x does not return a valid Smi!
8778 // But that's OK here because it's only used for a comparison.
8779 TNode<Smi> required_capacity_pseudo_smi = SmiAdd(new_nof, SmiShr(new_nof, 1));
8780 GotoIf(SmiBelow(capacity, required_capacity_pseudo_smi), bailout);
8781 // Require rehashing if more than 50% of free elements are deleted elements.
8782 TNode<Smi> deleted = GetNumberOfDeletedElements<Dictionary>(dictionary);
8783 CSA_DCHECK(this, SmiAbove(capacity, new_nof))((void)0);
8784 TNode<Smi> half_of_free_elements = SmiShr(SmiSub(capacity, new_nof), 1);
8785 GotoIf(SmiAbove(deleted, half_of_free_elements), bailout);
8786
8787 TNode<Smi> enum_index = GetNextEnumerationIndex<Dictionary>(dictionary);
8788 TNode<Smi> new_enum_index = SmiAdd(enum_index, SmiConstant(1));
8789 TNode<Smi> max_enum_index =
8790 SmiConstant(PropertyDetails::DictionaryStorageField::kMax);
8791 GotoIf(SmiAbove(new_enum_index, max_enum_index), bailout);
8792
8793 // No more bailouts after this point.
8794 // Operations from here on can have side effects.
8795
8796 SetNextEnumerationIndex<Dictionary>(dictionary, new_enum_index);
8797 SetNumberOfElements<Dictionary>(dictionary, new_nof);
8798
8799 TVARIABLE(IntPtrT, var_key_index)TVariable<IntPtrT> var_key_index(this);
8800 FindInsertionEntry<Dictionary>(dictionary, key, &var_key_index);
8801 InsertEntry<Dictionary>(dictionary, key, value, var_key_index.value(),
8802 enum_index);
8803}
8804
8805template <>
8806void CodeStubAssembler::Add(TNode<SwissNameDictionary> dictionary,
8807 TNode<Name> key, TNode<Object> value,
8808 Label* bailout) {
8809 PropertyDetails d(PropertyKind::kData, NONE,
8810 PropertyDetails::kConstIfDictConstnessTracking);
8811
8812 PropertyDetails d_dont_enum(PropertyKind::kData, DONT_ENUM,
8813 PropertyDetails::kConstIfDictConstnessTracking);
8814 TNode<Uint8T> details_byte_enum =
8815 UncheckedCast<Uint8T>(Uint32Constant(d.ToByte()));
8816 TNode<Uint8T> details_byte_dont_enum =
8817 UncheckedCast<Uint8T>(Uint32Constant(d_dont_enum.ToByte()));
8818
8819 Label not_private(this);
8820 TVARIABLE(Uint8T, var_details, details_byte_enum)TVariable<Uint8T> var_details(details_byte_enum, this);
8821
8822 GotoIfNot(IsPrivateSymbol(key), &not_private);
8823 var_details = details_byte_dont_enum;
8824 Goto(&not_private);
8825
8826 BIND(&not_private)Bind(&not_private);
8827 SwissNameDictionaryAdd(dictionary, key, value, var_details.value(), bailout);
8828}
8829
8830template void CodeStubAssembler::Add<NameDictionary>(TNode<NameDictionary>,
8831 TNode<Name>, TNode<Object>,
8832 Label*);
8833
8834template <class Dictionary>
8835TNode<Smi> CodeStubAssembler::GetNumberOfElements(
8836 TNode<Dictionary> dictionary) {
8837 return CAST(Cast(LoadFixedArrayElement(dictionary, Dictionary::kNumberOfElementsIndex
))
8838 LoadFixedArrayElement(dictionary, Dictionary::kNumberOfElementsIndex))Cast(LoadFixedArrayElement(dictionary, Dictionary::kNumberOfElementsIndex
))
;
8839}
8840
8841template <>
8842TNode<Smi> CodeStubAssembler::GetNumberOfElements(
8843 TNode<SwissNameDictionary> dictionary) {
8844 TNode<IntPtrT> capacity =
8845 ChangeInt32ToIntPtr(LoadSwissNameDictionaryCapacity(dictionary));
8846 return SmiFromIntPtr(
8847 LoadSwissNameDictionaryNumberOfElements(dictionary, capacity));
8848}
8849
8850template TNode<Smi> CodeStubAssembler::GetNumberOfElements(
8851 TNode<NameDictionary> dictionary);
8852template TNode<Smi> CodeStubAssembler::GetNumberOfElements(
8853 TNode<NumberDictionary> dictionary);
8854template TNode<Smi> CodeStubAssembler::GetNumberOfElements(
8855 TNode<GlobalDictionary> dictionary);
8856
8857template <typename Array>
8858void CodeStubAssembler::LookupLinear(TNode<Name> unique_name,
8859 TNode<Array> array,
8860 TNode<Uint32T> number_of_valid_entries,
8861 Label* if_found,
8862 TVariable<IntPtrT>* var_name_index,
8863 Label* if_not_found) {
8864 static_assert(std::is_base_of<FixedArray, Array>::value ||
8865 std::is_base_of<WeakFixedArray, Array>::value ||
8866 std::is_base_of<DescriptorArray, Array>::value,
8867 "T must be a descendant of FixedArray or a WeakFixedArray");
8868 Comment("LookupLinear");
8869 CSA_DCHECK(this, IsUniqueName(unique_name))((void)0);
8870 TNode<IntPtrT> first_inclusive = IntPtrConstant(Array::ToKeyIndex(0));
8871 TNode<IntPtrT> factor = IntPtrConstant(Array::kEntrySize);
8872 TNode<IntPtrT> last_exclusive = IntPtrAdd(
8873 first_inclusive,
8874 IntPtrMul(ChangeInt32ToIntPtr(number_of_valid_entries), factor));
8875
8876 BuildFastLoop<IntPtrT>(
8877 last_exclusive, first_inclusive,
8878 [=](TNode<IntPtrT> name_index) {
8879 TNode<MaybeObject> element =
8880 LoadArrayElement(array, Array::kHeaderSize, name_index);
8881 TNode<Name> candidate_name = CAST(element)Cast(element);
8882 *var_name_index = name_index;
8883 GotoIf(TaggedEqual(candidate_name, unique_name), if_found);
8884 },
8885 -Array::kEntrySize, IndexAdvanceMode::kPre);
8886 Goto(if_not_found);
8887}
8888
8889template <>
8890TNode<Uint32T> CodeStubAssembler::NumberOfEntries<DescriptorArray>(
8891 TNode<DescriptorArray> descriptors) {
8892 return Unsigned(LoadNumberOfDescriptors(descriptors));
8893}
8894
8895template <>
8896TNode<Uint32T> CodeStubAssembler::NumberOfEntries<TransitionArray>(
8897 TNode<TransitionArray> transitions) {
8898 TNode<IntPtrT> length = LoadAndUntagWeakFixedArrayLength(transitions);
8899 return Select<Uint32T>(
8900 UintPtrLessThan(length, IntPtrConstant(TransitionArray::kFirstIndex)),
8901 [=] { return Unsigned(Int32Constant(0)); },
8902 [=] {
8903 return Unsigned(LoadAndUntagToWord32ArrayElement(
8904 transitions, WeakFixedArray::kHeaderSize,
8905 IntPtrConstant(TransitionArray::kTransitionLengthIndex)));
8906 });
8907}
8908
8909template <typename Array>
8910TNode<IntPtrT> CodeStubAssembler::EntryIndexToIndex(
8911 TNode<Uint32T> entry_index) {
8912 TNode<Int32T> entry_size = Int32Constant(Array::kEntrySize);
8913 TNode<Word32T> index = Int32Mul(entry_index, entry_size);
8914 return ChangeInt32ToIntPtr(index);
8915}
8916
8917template <typename Array>
8918TNode<IntPtrT> CodeStubAssembler::ToKeyIndex(TNode<Uint32T> entry_index) {
8919 return IntPtrAdd(IntPtrConstant(Array::ToKeyIndex(0)),
8920 EntryIndexToIndex<Array>(entry_index));
8921}
8922
8923template TNode<IntPtrT> CodeStubAssembler::ToKeyIndex<DescriptorArray>(
8924 TNode<Uint32T>);
8925template TNode<IntPtrT> CodeStubAssembler::ToKeyIndex<TransitionArray>(
8926 TNode<Uint32T>);
8927
8928template <>
8929TNode<Uint32T> CodeStubAssembler::GetSortedKeyIndex<DescriptorArray>(
8930 TNode<DescriptorArray> descriptors, TNode<Uint32T> descriptor_number) {
8931 TNode<Uint32T> details =
8932 DescriptorArrayGetDetails(descriptors, descriptor_number);
8933 return DecodeWord32<PropertyDetails::DescriptorPointer>(details);
8934}
8935
8936template <>
8937TNode<Uint32T> CodeStubAssembler::GetSortedKeyIndex<TransitionArray>(
8938 TNode<TransitionArray> transitions, TNode<Uint32T> transition_number) {
8939 return transition_number;
8940}
8941
8942template <typename Array>
8943TNode<Name> CodeStubAssembler::GetKey(TNode<Array> array,
8944 TNode<Uint32T> entry_index) {
8945 static_assert(std::is_base_of<TransitionArray, Array>::value ||
8946 std::is_base_of<DescriptorArray, Array>::value,
8947 "T must be a descendant of DescriptorArray or TransitionArray");
8948 const int key_offset = Array::ToKeyIndex(0) * kTaggedSize;
8949 TNode<MaybeObject> element =
8950 LoadArrayElement(array, Array::kHeaderSize,
8951 EntryIndexToIndex<Array>(entry_index), key_offset);
8952 return CAST(element)Cast(element);
8953}
8954
8955template TNode<Name> CodeStubAssembler::GetKey<DescriptorArray>(
8956 TNode<DescriptorArray>, TNode<Uint32T>);
8957template TNode<Name> CodeStubAssembler::GetKey<TransitionArray>(
8958 TNode<TransitionArray>, TNode<Uint32T>);
8959
8960TNode<Uint32T> CodeStubAssembler::DescriptorArrayGetDetails(
8961 TNode<DescriptorArray> descriptors, TNode<Uint32T> descriptor_number) {
8962 const int details_offset = DescriptorArray::ToDetailsIndex(0) * kTaggedSize;
8963 return Unsigned(LoadAndUntagToWord32ArrayElement(
8964 descriptors, DescriptorArray::kHeaderSize,
8965 EntryIndexToIndex<DescriptorArray>(descriptor_number), details_offset));
8966}
8967
8968template <typename Array>
8969void CodeStubAssembler::LookupBinary(TNode<Name> unique_name,
8970 TNode<Array> array,
8971 TNode<Uint32T> number_of_valid_entries,
8972 Label* if_found,
8973 TVariable<IntPtrT>* var_name_index,
8974 Label* if_not_found) {
8975 Comment("LookupBinary");
8976 TVARIABLE(Uint32T, var_low, Unsigned(Int32Constant(0)))TVariable<Uint32T> var_low(Unsigned(Int32Constant(0)), this
)
;
8977 TNode<Uint32T> limit =
8978 Unsigned(Int32Sub(NumberOfEntries<Array>(array), Int32Constant(1)));
8979 TVARIABLE(Uint32T, var_high, limit)TVariable<Uint32T> var_high(limit, this);
8980 TNode<Uint32T> hash = LoadNameHashAssumeComputed(unique_name);
8981 CSA_DCHECK(this, Word32NotEqual(hash, Int32Constant(0)))((void)0);
8982
8983 // Assume non-empty array.
8984 CSA_DCHECK(this, Uint32LessThanOrEqual(var_low.value(), var_high.value()))((void)0);
8985
8986 Label binary_loop(this, {&var_high, &var_low});
8987 Goto(&binary_loop);
8988 BIND(&binary_loop)Bind(&binary_loop);
8989 {
8990 // mid = low + (high - low) / 2 (to avoid overflow in "(low + high) / 2").
8991 TNode<Uint32T> mid = Unsigned(
8992 Int32Add(var_low.value(),
8993 Word32Shr(Int32Sub(var_high.value(), var_low.value()), 1)));
8994 // mid_name = array->GetSortedKey(mid).
8995 TNode<Uint32T> sorted_key_index = GetSortedKeyIndex<Array>(array, mid);
8996 TNode<Name> mid_name = GetKey<Array>(array, sorted_key_index);
8997
8998 TNode<Uint32T> mid_hash = LoadNameHashAssumeComputed(mid_name);
8999
9000 Label mid_greater(this), mid_less(this), merge(this);
9001 Branch(Uint32GreaterThanOrEqual(mid_hash, hash), &mid_greater, &mid_less);
9002 BIND(&mid_greater)Bind(&mid_greater);
9003 {
9004 var_high = mid;
9005 Goto(&merge);
9006 }
9007 BIND(&mid_less)Bind(&mid_less);
9008 {
9009 var_low = Unsigned(Int32Add(mid, Int32Constant(1)));
9010 Goto(&merge);
9011 }
9012 BIND(&merge)Bind(&merge);
9013 GotoIf(Word32NotEqual(var_low.value(), var_high.value()), &binary_loop);
9014 }
9015
9016 Label scan_loop(this, &var_low);
9017 Goto(&scan_loop);
9018 BIND(&scan_loop)Bind(&scan_loop);
9019 {
9020 GotoIf(Int32GreaterThan(var_low.value(), limit), if_not_found);
9021
9022 TNode<Uint32T> sort_index =
9023 GetSortedKeyIndex<Array>(array, var_low.value());
9024 TNode<Name> current_name = GetKey<Array>(array, sort_index);
9025 TNode<Uint32T> current_hash = LoadNameHashAssumeComputed(current_name);
9026 GotoIf(Word32NotEqual(current_hash, hash), if_not_found);
9027 Label next(this);
9028 GotoIf(TaggedNotEqual(current_name, unique_name), &next);
9029 GotoIf(Uint32GreaterThanOrEqual(sort_index, number_of_valid_entries),
9030 if_not_found);
9031 *var_name_index = ToKeyIndex<Array>(sort_index);
9032 Goto(if_found);
9033
9034 BIND(&next)Bind(&next);
9035 var_low = Unsigned(Int32Add(var_low.value(), Int32Constant(1)));
9036 Goto(&scan_loop);
9037 }
9038}
9039
9040void CodeStubAssembler::ForEachEnumerableOwnProperty(
9041 TNode<Context> context, TNode<Map> map, TNode<JSObject> object,
9042 PropertiesEnumerationMode mode, const ForEachKeyValueFunction& body,
9043 Label* bailout) {
9044 TNode<Uint16T> type = LoadMapInstanceType(map);
9045 TNode<Uint32T> bit_field3 = EnsureOnlyHasSimpleProperties(map, type, bailout);
9046
9047 TVARIABLE(DescriptorArray, var_descriptors, LoadMapDescriptors(map))TVariable<DescriptorArray> var_descriptors(LoadMapDescriptors
(map), this)
;
9048 TNode<Uint32T> nof_descriptors =
9049 DecodeWord32<Map::Bits3::NumberOfOwnDescriptorsBits>(bit_field3);
9050
9051 TVARIABLE(BoolT, var_stable, Int32TrueConstant())TVariable<BoolT> var_stable(Int32TrueConstant(), this);
9052
9053 TVARIABLE(BoolT, var_has_symbol, Int32FalseConstant())TVariable<BoolT> var_has_symbol(Int32FalseConstant(), this
)
;
9054 // false - iterate only string properties, true - iterate only symbol
9055 // properties
9056 TVARIABLE(BoolT, var_is_symbol_processing_loop, Int32FalseConstant())TVariable<BoolT> var_is_symbol_processing_loop(Int32FalseConstant
(), this)
;
9057 TVARIABLE(IntPtrT, var_start_key_index,TVariable<IntPtrT> var_start_key_index(ToKeyIndex<DescriptorArray
>(Unsigned(Int32Constant(0))), this)
9058 ToKeyIndex<DescriptorArray>(Unsigned(Int32Constant(0))))TVariable<IntPtrT> var_start_key_index(ToKeyIndex<DescriptorArray
>(Unsigned(Int32Constant(0))), this)
;
9059 // Note: var_end_key_index is exclusive for the loop
9060 TVARIABLE(IntPtrT, var_end_key_index,TVariable<IntPtrT> var_end_key_index(ToKeyIndex<DescriptorArray
>(nof_descriptors), this)
9061 ToKeyIndex<DescriptorArray>(nof_descriptors))TVariable<IntPtrT> var_end_key_index(ToKeyIndex<DescriptorArray
>(nof_descriptors), this)
;
9062 VariableList list({&var_descriptors, &var_stable, &var_has_symbol,
9063 &var_is_symbol_processing_loop, &var_start_key_index,
9064 &var_end_key_index},
9065 zone());
9066 Label descriptor_array_loop(this, list);
9067
9068 Goto(&descriptor_array_loop);
9069 BIND(&descriptor_array_loop)Bind(&descriptor_array_loop);
9070
9071 BuildFastLoop<IntPtrT>(
9072 list, var_start_key_index.value(), var_end_key_index.value(),
9073 [&](TNode<IntPtrT> descriptor_key_index) {
9074 TNode<Name> next_key =
9075 LoadKeyByKeyIndex(var_descriptors.value(), descriptor_key_index);
9076
9077 TVARIABLE(Object, var_value, SmiConstant(0))TVariable<Object> var_value(SmiConstant(0), this);
9078 Label callback(this), next_iteration(this);
9079
9080 if (mode == kEnumerationOrder) {
9081 // |next_key| is either a string or a symbol
9082 // Skip strings or symbols depending on
9083 // |var_is_symbol_processing_loop|.
9084 Label if_string(this), if_symbol(this), if_name_ok(this);
9085 Branch(IsSymbol(next_key), &if_symbol, &if_string);
9086 BIND(&if_symbol)Bind(&if_symbol);
9087 {
9088 // Process symbol property when |var_is_symbol_processing_loop| is
9089 // true.
9090 GotoIf(var_is_symbol_processing_loop.value(), &if_name_ok);
9091 // First iteration need to calculate smaller range for processing
9092 // symbols
9093 Label if_first_symbol(this);
9094 // var_end_key_index is still inclusive at this point.
9095 var_end_key_index = descriptor_key_index;
9096 Branch(var_has_symbol.value(), &next_iteration, &if_first_symbol);
9097 BIND(&if_first_symbol)Bind(&if_first_symbol);
9098 {
9099 var_start_key_index = descriptor_key_index;
9100 var_has_symbol = Int32TrueConstant();
9101 Goto(&next_iteration);
9102 }
9103 }
9104 BIND(&if_string)Bind(&if_string);
9105 {
9106 CSA_DCHECK(this, IsString(next_key))((void)0);
9107 // Process string property when |var_is_symbol_processing_loop| is
9108 // false.
9109 Branch(var_is_symbol_processing_loop.value(), &next_iteration,
9110 &if_name_ok);
9111 }
9112 BIND(&if_name_ok)Bind(&if_name_ok);
9113 }
9114 {
9115 TVARIABLE(Map, var_map)TVariable<Map> var_map(this);
9116 TVARIABLE(HeapObject, var_meta_storage)TVariable<HeapObject> var_meta_storage(this);
9117 TVARIABLE(IntPtrT, var_entry)TVariable<IntPtrT> var_entry(this);
9118 TVARIABLE(Uint32T, var_details)TVariable<Uint32T> var_details(this);
9119 Label if_found(this);
9120
9121 Label if_found_fast(this), if_found_dict(this);
9122
9123 Label if_stable(this), if_not_stable(this);
9124 Branch(var_stable.value(), &if_stable, &if_not_stable);
9125 BIND(&if_stable)Bind(&if_stable);
9126 {
9127 // Directly decode from the descriptor array if |object| did not
9128 // change shape.
9129 var_map = map;
9130 var_meta_storage = var_descriptors.value();
9131 var_entry = Signed(descriptor_key_index);
9132 Goto(&if_found_fast);
9133 }
9134 BIND(&if_not_stable)Bind(&if_not_stable);
9135 {
9136 // If the map did change, do a slower lookup. We are still
9137 // guaranteed that the object has a simple shape, and that the key
9138 // is a name.
9139 var_map = LoadMap(object);
9140 TryLookupPropertyInSimpleObject(object, var_map.value(), next_key,
9141 &if_found_fast, &if_found_dict,
9142 &var_meta_storage, &var_entry,
9143 &next_iteration, bailout);
9144 }
9145
9146 BIND(&if_found_fast)Bind(&if_found_fast);
9147 {
9148 TNode<DescriptorArray> descriptors = CAST(var_meta_storage.value())Cast(var_meta_storage.value());
9149 TNode<IntPtrT> name_index = var_entry.value();
9150
9151 // Skip non-enumerable properties.
9152 var_details = LoadDetailsByKeyIndex(descriptors, name_index);
9153 GotoIf(IsSetWord32(var_details.value(),
9154 PropertyDetails::kAttributesDontEnumMask),
9155 &next_iteration);
9156
9157 LoadPropertyFromFastObject(object, var_map.value(), descriptors,
9158 name_index, var_details.value(),
9159 &var_value);
9160 Goto(&if_found);
9161 }
9162 BIND(&if_found_dict)Bind(&if_found_dict);
9163 {
9164 TNode<PropertyDictionary> dictionary =
9165 CAST(var_meta_storage.value())Cast(var_meta_storage.value());
9166 TNode<IntPtrT> entry = var_entry.value();
9167
9168 TNode<Uint32T> details = LoadDetailsByKeyIndex(dictionary, entry);
9169 // Skip non-enumerable properties.
9170 GotoIf(
9171 IsSetWord32(details, PropertyDetails::kAttributesDontEnumMask),
9172 &next_iteration);
9173
9174 var_details = details;
9175 var_value =
9176 LoadValueByKeyIndex<PropertyDictionary>(dictionary, entry);
9177 Goto(&if_found);
9178 }
9179
9180 // Here we have details and value which could be an accessor.
9181 BIND(&if_found)Bind(&if_found);
9182 {
9183 Label slow_load(this, Label::kDeferred);
9184
9185 var_value = CallGetterIfAccessor(
9186 var_value.value(), object, var_details.value(), context, object,
9187 next_key, &slow_load, kCallJSGetterUseCachedName);
9188 Goto(&callback);
9189
9190 BIND(&slow_load)Bind(&slow_load);
9191 var_value =
9192 CallRuntime(Runtime::kGetProperty, context, object, next_key);
9193 Goto(&callback);
9194
9195 BIND(&callback)Bind(&callback);
9196 body(next_key, var_value.value());
9197
9198 // Check if |object| is still stable, i.e. the descriptors in the
9199 // preloaded |descriptors| are still the same modulo in-place
9200 // representation changes.
9201 GotoIfNot(var_stable.value(), &next_iteration);
9202 var_stable = TaggedEqual(LoadMap(object), map);
9203 // Reload the descriptors just in case the actual array changed, and
9204 // any of the field representations changed in-place.
9205 var_descriptors = LoadMapDescriptors(map);
9206
9207 Goto(&next_iteration);
9208 }
9209 }
9210 BIND(&next_iteration)Bind(&next_iteration);
9211 },
9212 DescriptorArray::kEntrySize, IndexAdvanceMode::kPost);
9213
9214 if (mode == kEnumerationOrder) {
9215 Label done(this);
9216 GotoIf(var_is_symbol_processing_loop.value(), &done);
9217 GotoIfNot(var_has_symbol.value(), &done);
9218 // All string properties are processed, now process symbol properties.
9219 var_is_symbol_processing_loop = Int32TrueConstant();
9220 // Add DescriptorArray::kEntrySize to make the var_end_key_index exclusive
9221 // as BuildFastLoop() expects.
9222 Increment(&var_end_key_index, DescriptorArray::kEntrySize);
9223 Goto(&descriptor_array_loop);
9224
9225 BIND(&done)Bind(&done);
9226 }
9227}
9228
9229TNode<Object> CodeStubAssembler::GetConstructor(TNode<Map> map) {
9230 TVARIABLE(HeapObject, var_maybe_constructor)TVariable<HeapObject> var_maybe_constructor(this);
9231 var_maybe_constructor = map;
9232 Label loop(this, &var_maybe_constructor), done(this);
9233 GotoIfNot(IsMap(var_maybe_constructor.value()), &done);
9234 Goto(&loop);
9235
9236 BIND(&loop)Bind(&loop);
9237 {
9238 var_maybe_constructor = CAST(Cast(LoadObjectField(var_maybe_constructor.value(), Map::kConstructorOrBackPointerOrNativeContextOffset
))
9239 LoadObjectField(var_maybe_constructor.value(),Cast(LoadObjectField(var_maybe_constructor.value(), Map::kConstructorOrBackPointerOrNativeContextOffset
))
9240 Map::kConstructorOrBackPointerOrNativeContextOffset))Cast(LoadObjectField(var_maybe_constructor.value(), Map::kConstructorOrBackPointerOrNativeContextOffset
))
;
9241 GotoIf(IsMap(var_maybe_constructor.value()), &loop);
9242 Goto(&done);
9243 }
9244
9245 BIND(&done)Bind(&done);
9246 return var_maybe_constructor.value();
9247}
9248
9249TNode<NativeContext> CodeStubAssembler::GetCreationContext(
9250 TNode<JSReceiver> receiver, Label* if_bailout) {
9251 TNode<Map> receiver_map = LoadMap(receiver);
9252 TNode<Object> constructor = GetConstructor(receiver_map);
9253
9254 TVARIABLE(JSFunction, var_function)TVariable<JSFunction> var_function(this);
9255
9256 Label done(this), if_jsfunction(this), if_jsgenerator(this);
9257 GotoIf(TaggedIsSmi(constructor), if_bailout);
9258
9259 TNode<Map> function_map = LoadMap(CAST(constructor)Cast(constructor));
9260 GotoIf(IsJSFunctionMap(function_map), &if_jsfunction);
9261 GotoIf(IsJSGeneratorMap(function_map), &if_jsgenerator);
9262 // Remote objects don't have a creation context.
9263 GotoIf(IsFunctionTemplateInfoMap(function_map), if_bailout);
9264
9265 CSA_DCHECK(this, IsJSFunctionMap(receiver_map))((void)0);
9266 var_function = CAST(receiver)Cast(receiver);
9267 Goto(&done);
9268
9269 BIND(&if_jsfunction)Bind(&if_jsfunction);
9270 {
9271 var_function = CAST(constructor)Cast(constructor);
9272 Goto(&done);
9273 }
9274
9275 BIND(&if_jsgenerator)Bind(&if_jsgenerator);
9276 {
9277 var_function = LoadJSGeneratorObjectFunction(CAST(receiver)Cast(receiver));
9278 Goto(&done);
9279 }
9280
9281 BIND(&done)Bind(&done);
9282 TNode<Context> context = LoadJSFunctionContext(var_function.value());
9283
9284 GotoIfNot(IsContext(context), if_bailout);
9285
9286 TNode<NativeContext> native_context = LoadNativeContext(context);
9287 return native_context;
9288}
9289
9290TNode<NativeContext> CodeStubAssembler::GetFunctionRealm(
9291 TNode<Context> context, TNode<JSReceiver> receiver, Label* if_bailout) {
9292 TVARIABLE(JSReceiver, current)TVariable<JSReceiver> current(this);
9293 Label loop(this, VariableList({&current}, zone())), is_proxy(this),
9294 is_function(this), is_bound_function(this), is_wrapped_function(this),
9295 proxy_revoked(this, Label::kDeferred);
9296 CSA_DCHECK(this, IsCallable(receiver))((void)0);
9297 current = receiver;
9298 Goto(&loop);
9299
9300 BIND(&loop)Bind(&loop);
9301 {
9302 TNode<JSReceiver> current_value = current.value();
9303 GotoIf(IsJSProxy(current_value), &is_proxy);
9304 GotoIf(IsJSFunction(current_value), &is_function);
9305 GotoIf(IsJSBoundFunction(current_value), &is_bound_function);
9306 GotoIf(IsJSWrappedFunction(current_value), &is_wrapped_function);
9307 Goto(if_bailout);
9308 }
9309
9310 BIND(&is_proxy)Bind(&is_proxy);
9311 {
9312 TNode<JSProxy> proxy = CAST(current.value())Cast(current.value());
9313 TNode<HeapObject> handler =
9314 CAST(LoadObjectField(proxy, JSProxy::kHandlerOffset))Cast(LoadObjectField(proxy, JSProxy::kHandlerOffset));
9315 // Proxy is revoked.
9316 GotoIfNot(IsJSReceiver(handler), &proxy_revoked);
9317 TNode<JSReceiver> target =
9318 CAST(LoadObjectField(proxy, JSProxy::kTargetOffset))Cast(LoadObjectField(proxy, JSProxy::kTargetOffset));
9319 current = target;
9320 Goto(&loop);
9321 }
9322
9323 BIND(&proxy_revoked)Bind(&proxy_revoked);
9324 { ThrowTypeError(context, MessageTemplate::kProxyRevoked, "apply"); }
9325
9326 BIND(&is_bound_function)Bind(&is_bound_function);
9327 {
9328 TNode<JSBoundFunction> bound_function = CAST(current.value())Cast(current.value());
9329 TNode<JSReceiver> target = CAST(LoadObjectField(Cast(LoadObjectField( bound_function, JSBoundFunction::kBoundTargetFunctionOffset
))
9330 bound_function, JSBoundFunction::kBoundTargetFunctionOffset))Cast(LoadObjectField( bound_function, JSBoundFunction::kBoundTargetFunctionOffset
))
;
9331 current = target;
9332 Goto(&loop);
9333 }
9334
9335 BIND(&is_wrapped_function)Bind(&is_wrapped_function);
9336 {
9337 TNode<JSWrappedFunction> wrapped_function = CAST(current.value())Cast(current.value());
9338 TNode<JSReceiver> target = CAST(LoadObjectField(Cast(LoadObjectField( wrapped_function, JSWrappedFunction::kWrappedTargetFunctionOffset
))
9339 wrapped_function, JSWrappedFunction::kWrappedTargetFunctionOffset))Cast(LoadObjectField( wrapped_function, JSWrappedFunction::kWrappedTargetFunctionOffset
))
;
9340 current = target;
9341 Goto(&loop);
9342 }
9343
9344 BIND(&is_function)Bind(&is_function);
9345 {
9346 TNode<JSFunction> function = CAST(current.value())Cast(current.value());
9347 TNode<Context> context =
9348 CAST(LoadObjectField(function, JSFunction::kContextOffset))Cast(LoadObjectField(function, JSFunction::kContextOffset));
9349 TNode<NativeContext> native_context = LoadNativeContext(context);
9350 return native_context;
9351 }
9352}
9353
9354void CodeStubAssembler::DescriptorLookup(TNode<Name> unique_name,
9355 TNode<DescriptorArray> descriptors,
9356 TNode<Uint32T> bitfield3,
9357 Label* if_found,
9358 TVariable<IntPtrT>* var_name_index,
9359 Label* if_not_found) {
9360 Comment("DescriptorArrayLookup");
9361 TNode<Uint32T> nof =
9362 DecodeWord32<Map::Bits3::NumberOfOwnDescriptorsBits>(bitfield3);
9363 Lookup<DescriptorArray>(unique_name, descriptors, nof, if_found,
9364 var_name_index, if_not_found);
9365}
9366
9367void CodeStubAssembler::TransitionLookup(TNode<Name> unique_name,
9368 TNode<TransitionArray> transitions,
9369 Label* if_found,
9370 TVariable<IntPtrT>* var_name_index,
9371 Label* if_not_found) {
9372 Comment("TransitionArrayLookup");
9373 TNode<Uint32T> number_of_valid_transitions =
9374 NumberOfEntries<TransitionArray>(transitions);
9375 Lookup<TransitionArray>(unique_name, transitions, number_of_valid_transitions,
9376 if_found, var_name_index, if_not_found);
9377}
9378
9379template <typename Array>
9380void CodeStubAssembler::Lookup(TNode<Name> unique_name, TNode<Array> array,
9381 TNode<Uint32T> number_of_valid_entries,
9382 Label* if_found,
9383 TVariable<IntPtrT>* var_name_index,
9384 Label* if_not_found) {
9385 Comment("ArrayLookup");
9386 if (!number_of_valid_entries) {
9387 number_of_valid_entries = NumberOfEntries(array);
9388 }
9389 GotoIf(Word32Equal(number_of_valid_entries, Int32Constant(0)), if_not_found);
9390 Label linear_search(this), binary_search(this);
9391 const int kMaxElementsForLinearSearch = 32;
9392 Branch(Uint32LessThanOrEqual(number_of_valid_entries,
9393 Int32Constant(kMaxElementsForLinearSearch)),
9394 &linear_search, &binary_search);
9395 BIND(&linear_search)Bind(&linear_search);
9396 {
9397 LookupLinear<Array>(unique_name, array, number_of_valid_entries, if_found,
9398 var_name_index, if_not_found);
9399 }
9400 BIND(&binary_search)Bind(&binary_search);
9401 {
9402 LookupBinary<Array>(unique_name, array, number_of_valid_entries, if_found,
9403 var_name_index, if_not_found);
9404 }
9405}
9406
9407void CodeStubAssembler::TryLookupPropertyInSimpleObject(
9408 TNode<JSObject> object, TNode<Map> map, TNode<Name> unique_name,
9409 Label* if_found_fast, Label* if_found_dict,
9410 TVariable<HeapObject>* var_meta_storage, TVariable<IntPtrT>* var_name_index,
9411 Label* if_not_found, Label* bailout) {
9412 CSA_DCHECK(this, IsSimpleObjectMap(map))((void)0);
9413 CSA_DCHECK(this, IsUniqueNameNoCachedIndex(unique_name))((void)0);
9414
9415 TNode<Uint32T> bit_field3 = LoadMapBitField3(map);
9416 Label if_isfastmap(this), if_isslowmap(this);
9417 Branch(IsSetWord32<Map::Bits3::IsDictionaryMapBit>(bit_field3), &if_isslowmap,
9418 &if_isfastmap);
9419 BIND(&if_isfastmap)Bind(&if_isfastmap);
9420 {
9421 TNode<DescriptorArray> descriptors = LoadMapDescriptors(map);
9422 *var_meta_storage = descriptors;
9423
9424 DescriptorLookup(unique_name, descriptors, bit_field3, if_found_fast,
9425 var_name_index, if_not_found);
9426 }
9427 BIND(&if_isslowmap)Bind(&if_isslowmap);
9428 {
9429 TNode<PropertyDictionary> dictionary = CAST(LoadSlowProperties(object))Cast(LoadSlowProperties(object));
9430 *var_meta_storage = dictionary;
9431
9432 NameDictionaryLookup<PropertyDictionary>(
9433 dictionary, unique_name, if_found_dict, var_name_index, if_not_found);
9434 }
9435}
9436
9437void CodeStubAssembler::TryLookupProperty(
9438 TNode<HeapObject> object, TNode<Map> map, TNode<Int32T> instance_type,
9439 TNode<Name> unique_name, Label* if_found_fast, Label* if_found_dict,
9440 Label* if_found_global, TVariable<HeapObject>* var_meta_storage,
9441 TVariable<IntPtrT>* var_name_index, Label* if_not_found,
9442 Label* if_bailout) {
9443 Label if_objectisspecial(this);
9444 GotoIf(IsSpecialReceiverInstanceType(instance_type), &if_objectisspecial);
9445
9446 TryLookupPropertyInSimpleObject(CAST(object)Cast(object), map, unique_name, if_found_fast,
9447 if_found_dict, var_meta_storage,
9448 var_name_index, if_not_found, if_bailout);
9449
9450 BIND(&if_objectisspecial)Bind(&if_objectisspecial);
9451 {
9452 // Handle global object here and bailout for other special objects.
9453 GotoIfNot(InstanceTypeEqual(instance_type, JS_GLOBAL_OBJECT_TYPE),
9454 if_bailout);
9455
9456 // Handle interceptors and access checks in runtime.
9457 TNode<Int32T> bit_field = LoadMapBitField(map);
9458 int mask = Map::Bits1::HasNamedInterceptorBit::kMask |
9459 Map::Bits1::IsAccessCheckNeededBit::kMask;
9460 GotoIf(IsSetWord32(bit_field, mask), if_bailout);
9461
9462 TNode<GlobalDictionary> dictionary = CAST(LoadSlowProperties(CAST(object)))Cast(LoadSlowProperties(Cast(object)));
9463 *var_meta_storage = dictionary;
9464
9465 NameDictionaryLookup<GlobalDictionary>(
9466 dictionary, unique_name, if_found_global, var_name_index, if_not_found);
9467 }
9468}
9469
9470void CodeStubAssembler::TryHasOwnProperty(TNode<HeapObject> object,
9471 TNode<Map> map,
9472 TNode<Int32T> instance_type,
9473 TNode<Name> unique_name,
9474 Label* if_found, Label* if_not_found,
9475 Label* if_bailout) {
9476 Comment("TryHasOwnProperty");
9477 CSA_DCHECK(this, IsUniqueNameNoCachedIndex(unique_name))((void)0);
9478 TVARIABLE(HeapObject, var_meta_storage)TVariable<HeapObject> var_meta_storage(this);
9479 TVARIABLE(IntPtrT, var_name_index)TVariable<IntPtrT> var_name_index(this);
9480
9481 Label if_found_global(this);
9482 TryLookupProperty(object, map, instance_type, unique_name, if_found, if_found,
9483 &if_found_global, &var_meta_storage, &var_name_index,
9484 if_not_found, if_bailout);
9485
9486 BIND(&if_found_global)Bind(&if_found_global);
9487 {
9488 TVARIABLE(Object, var_value)TVariable<Object> var_value(this);
9489 TVARIABLE(Uint32T, var_details)TVariable<Uint32T> var_details(this);
9490 // Check if the property cell is not deleted.
9491 LoadPropertyFromGlobalDictionary(CAST(var_meta_storage.value())Cast(var_meta_storage.value()),
9492 var_name_index.value(), &var_details,
9493 &var_value, if_not_found);
9494 Goto(if_found);
9495 }
9496}
9497
9498TNode<Object> CodeStubAssembler::GetMethod(TNode<Context> context,
9499 TNode<Object> object,
9500 Handle<Name> name,
9501 Label* if_null_or_undefined) {
9502 TNode<Object> method = GetProperty(context, object, name);
9503
9504 GotoIf(IsUndefined(method), if_null_or_undefined);
9505 GotoIf(IsNull(method), if_null_or_undefined);
9506
9507 return method;
9508}
9509
9510TNode<Object> CodeStubAssembler::GetIteratorMethod(
9511 TNode<Context> context, TNode<HeapObject> heap_obj,
9512 Label* if_iteratorundefined) {
9513 return GetMethod(context, heap_obj, isolate()->factory()->iterator_symbol(),
9514 if_iteratorundefined);
9515}
9516
9517TNode<Object> CodeStubAssembler::CreateAsyncFromSyncIterator(
9518 TNode<Context> context, TNode<Object> sync_iterator) {
9519 Label not_receiver(this, Label::kDeferred);
9520 Label done(this);
9521 TVARIABLE(Object, return_value)TVariable<Object> return_value(this);
9522
9523 GotoIf(TaggedIsSmi(sync_iterator), &not_receiver);
9524 GotoIfNot(IsJSReceiver(CAST(sync_iterator)Cast(sync_iterator)), &not_receiver);
9525
9526 const TNode<Object> next =
9527 GetProperty(context, sync_iterator, factory()->next_string());
9528
9529 const TNode<NativeContext> native_context = LoadNativeContext(context);
9530 const TNode<Map> map = CAST(LoadContextElement(Cast(LoadContextElement( native_context, Context::ASYNC_FROM_SYNC_ITERATOR_MAP_INDEX
))
9531 native_context, Context::ASYNC_FROM_SYNC_ITERATOR_MAP_INDEX))Cast(LoadContextElement( native_context, Context::ASYNC_FROM_SYNC_ITERATOR_MAP_INDEX
))
;
9532 const TNode<JSObject> iterator = AllocateJSObjectFromMap(map);
9533
9534 StoreObjectFieldNoWriteBarrier(
9535 iterator, JSAsyncFromSyncIterator::kSyncIteratorOffset, sync_iterator);
9536 StoreObjectFieldNoWriteBarrier(iterator, JSAsyncFromSyncIterator::kNextOffset,
9537 next);
9538
9539 return_value = iterator;
9540 Goto(&done);
9541
9542 BIND(&not_receiver)Bind(&not_receiver);
9543 {
9544 return_value = CallRuntime(Runtime::kThrowSymbolIteratorInvalid, context);
9545
9546 // Unreachable due to the Throw in runtime call.
9547 Goto(&done);
9548 }
9549
9550 BIND(&done)Bind(&done);
9551 return return_value.value();
9552}
9553
9554void CodeStubAssembler::LoadPropertyFromFastObject(
9555 TNode<HeapObject> object, TNode<Map> map,
9556 TNode<DescriptorArray> descriptors, TNode<IntPtrT> name_index,
9557 TVariable<Uint32T>* var_details, TVariable<Object>* var_value) {
9558 TNode<Uint32T> details = LoadDetailsByKeyIndex(descriptors, name_index);
9559 *var_details = details;
9560
9561 LoadPropertyFromFastObject(object, map, descriptors, name_index, details,
9562 var_value);
9563}
9564
9565void CodeStubAssembler::LoadPropertyFromFastObject(
9566 TNode<HeapObject> object, TNode<Map> map,
9567 TNode<DescriptorArray> descriptors, TNode<IntPtrT> name_index,
9568 TNode<Uint32T> details, TVariable<Object>* var_value) {
9569 Comment("[ LoadPropertyFromFastObject");
9570
9571 TNode<Uint32T> location =
9572 DecodeWord32<PropertyDetails::LocationField>(details);
9573
9574 Label if_in_field(this), if_in_descriptor(this), done(this);
9575 Branch(Word32Equal(location, Int32Constant(static_cast<int32_t>(
9576 PropertyLocation::kField))),
9577 &if_in_field, &if_in_descriptor);
9578 BIND(&if_in_field)Bind(&if_in_field);
9579 {
9580 TNode<IntPtrT> field_index =
9581 Signed(DecodeWordFromWord32<PropertyDetails::FieldIndexField>(details));
9582 TNode<Uint32T> representation =
9583 DecodeWord32<PropertyDetails::RepresentationField>(details);
9584
9585 // TODO(ishell): support WasmValues.
9586 CSA_DCHECK(this, Word32NotEqual(representation,((void)0)
9587 Int32Constant(Representation::kWasmValue)))((void)0);
9588 field_index =
9589 IntPtrAdd(field_index, LoadMapInobjectPropertiesStartInWords(map));
9590 TNode<IntPtrT> instance_size_in_words = LoadMapInstanceSizeInWords(map);
9591
9592 Label if_inobject(this), if_backing_store(this);
9593 TVARIABLE(Float64T, var_double_value)TVariable<Float64T> var_double_value(this);
9594 Label rebox_double(this, &var_double_value);
9595 Branch(UintPtrLessThan(field_index, instance_size_in_words), &if_inobject,
9596 &if_backing_store);
9597 BIND(&if_inobject)Bind(&if_inobject);
9598 {
9599 Comment("if_inobject");
9600 TNode<IntPtrT> field_offset = TimesTaggedSize(field_index);
9601
9602 Label if_double(this), if_tagged(this);
9603 Branch(Word32NotEqual(representation,
9604 Int32Constant(Representation::kDouble)),
9605 &if_tagged, &if_double);
9606 BIND(&if_tagged)Bind(&if_tagged);
9607 {
9608 *var_value = LoadObjectField(object, field_offset);
9609 Goto(&done);
9610 }
9611 BIND(&if_double)Bind(&if_double);
9612 {
9613 TNode<HeapNumber> heap_number =
9614 CAST(LoadObjectField(object, field_offset))Cast(LoadObjectField(object, field_offset));
9615 var_double_value = LoadHeapNumberValue(heap_number);
9616 Goto(&rebox_double);
9617 }
9618 }
9619 BIND(&if_backing_store)Bind(&if_backing_store);
9620 {
9621 Comment("if_backing_store");
9622 TNode<HeapObject> properties = LoadFastProperties(CAST(object)Cast(object));
9623 field_index = Signed(IntPtrSub(field_index, instance_size_in_words));
9624 TNode<Object> value =
9625 LoadPropertyArrayElement(CAST(properties)Cast(properties), field_index);
9626
9627 Label if_double(this), if_tagged(this);
9628 Branch(Word32NotEqual(representation,
9629 Int32Constant(Representation::kDouble)),
9630 &if_tagged, &if_double);
9631 BIND(&if_tagged)Bind(&if_tagged);
9632 {
9633 *var_value = value;
9634 Goto(&done);
9635 }
9636 BIND(&if_double)Bind(&if_double);
9637 {
9638 var_double_value = LoadHeapNumberValue(CAST(value)Cast(value));
9639 Goto(&rebox_double);
9640 }
9641 }
9642 BIND(&rebox_double)Bind(&rebox_double);
9643 {
9644 Comment("rebox_double");
9645 TNode<HeapNumber> heap_number =
9646 AllocateHeapNumberWithValue(var_double_value.value());
9647 *var_value = heap_number;
9648 Goto(&done);
9649 }
9650 }
9651 BIND(&if_in_descriptor)Bind(&if_in_descriptor);
9652 {
9653 *var_value = LoadValueByKeyIndex(descriptors, name_index);
9654 Goto(&done);
9655 }
9656 BIND(&done)Bind(&done);
9657
9658 Comment("] LoadPropertyFromFastObject");
9659}
9660
9661template <typename Dictionary>
9662void CodeStubAssembler::LoadPropertyFromDictionary(
9663 TNode<Dictionary> dictionary, TNode<IntPtrT> name_index,
9664 TVariable<Uint32T>* var_details, TVariable<Object>* var_value) {
9665 Comment("LoadPropertyFromNameDictionary");
9666 *var_details = LoadDetailsByKeyIndex(dictionary, name_index);
9667 *var_value = LoadValueByKeyIndex(dictionary, name_index);
9668
9669 Comment("] LoadPropertyFromNameDictionary");
9670}
9671
9672void CodeStubAssembler::LoadPropertyFromGlobalDictionary(
9673 TNode<GlobalDictionary> dictionary, TNode<IntPtrT> name_index,
9674 TVariable<Uint32T>* var_details, TVariable<Object>* var_value,
9675 Label* if_deleted) {
9676 Comment("[ LoadPropertyFromGlobalDictionary");
9677 TNode<PropertyCell> property_cell =
9678 CAST(LoadFixedArrayElement(dictionary, name_index))Cast(LoadFixedArrayElement(dictionary, name_index));
9679
9680 TNode<Object> value =
9681 LoadObjectField(property_cell, PropertyCell::kValueOffset);
9682 GotoIf(TaggedEqual(value, TheHoleConstant()), if_deleted);
9683
9684 *var_value = value;
9685
9686 TNode<Uint32T> details = Unsigned(LoadAndUntagToWord32ObjectField(
9687 property_cell, PropertyCell::kPropertyDetailsRawOffset));
9688 *var_details = details;
9689
9690 Comment("] LoadPropertyFromGlobalDictionary");
9691}
9692
9693template void CodeStubAssembler::LoadPropertyFromDictionary(
9694 TNode<NameDictionary> dictionary, TNode<IntPtrT> name_index,
9695 TVariable<Uint32T>* var_details, TVariable<Object>* var_value);
9696
9697template void CodeStubAssembler::LoadPropertyFromDictionary(
9698 TNode<SwissNameDictionary> dictionary, TNode<IntPtrT> name_index,
9699 TVariable<Uint32T>* var_details, TVariable<Object>* var_value);
9700
9701// |value| is the property backing store's contents, which is either a value or
9702// an accessor pair, as specified by |details|. |holder| is a JSObject or a
9703// PropertyCell (TODO: use UnionT). Returns either the original value, or the
9704// result of the getter call.
9705TNode<Object> CodeStubAssembler::CallGetterIfAccessor(
9706 TNode<Object> value, TNode<HeapObject> holder, TNode<Uint32T> details,
9707 TNode<Context> context, TNode<Object> receiver, TNode<Object> name,
9708 Label* if_bailout, GetOwnPropertyMode mode) {
9709 TVARIABLE(Object, var_value, value)TVariable<Object> var_value(value, this);
9710 Label done(this), if_accessor_info(this, Label::kDeferred);
9711
9712 TNode<Uint32T> kind = DecodeWord32<PropertyDetails::KindField>(details);
9713 GotoIf(
9714 Word32Equal(kind, Int32Constant(static_cast<int>(PropertyKind::kData))),
9715 &done);
9716
9717 // Accessor case.
9718 GotoIfNot(IsAccessorPair(CAST(value)Cast(value)), &if_accessor_info);
9719
9720 // AccessorPair case.
9721 {
9722 if (mode == kCallJSGetterUseCachedName ||
9723 mode == kCallJSGetterDontUseCachedName) {
9724 Label if_callable(this), if_function_template_info(this);
9725 TNode<AccessorPair> accessor_pair = CAST(value)Cast(value);
9726 TNode<HeapObject> getter =
9727 CAST(LoadObjectField(accessor_pair, AccessorPair::kGetterOffset))Cast(LoadObjectField(accessor_pair, AccessorPair::kGetterOffset
))
;
9728 TNode<Map> getter_map = LoadMap(getter);
9729
9730 GotoIf(IsCallableMap(getter_map), &if_callable);
9731 GotoIf(IsFunctionTemplateInfoMap(getter_map), &if_function_template_info);
9732
9733 // Return undefined if the {getter} is not callable.
9734 var_value = UndefinedConstant();
9735 Goto(&done);
9736
9737 BIND(&if_callable)Bind(&if_callable);
9738 {
9739 // Call the accessor. No need to check side-effect mode here, since it
9740 // will be checked later in DebugOnFunctionCall.
9741 var_value = Call(context, getter, receiver);
9742 Goto(&done);
9743 }
9744
9745 BIND(&if_function_template_info)Bind(&if_function_template_info);
9746 {
9747 Label runtime(this, Label::kDeferred);
9748 Label use_cached_property(this);
9749 GotoIf(IsSideEffectFreeDebuggingActive(), &runtime);
9750 TNode<HeapObject> cached_property_name = LoadObjectField<HeapObject>(
9751 getter, FunctionTemplateInfo::kCachedPropertyNameOffset);
9752
9753 Label* has_cached_property = mode == kCallJSGetterUseCachedName
9754 ? &use_cached_property
9755 : if_bailout;
9756 GotoIfNot(IsTheHole(cached_property_name), has_cached_property);
9757
9758 TNode<NativeContext> creation_context =
9759 GetCreationContext(CAST(holder)Cast(holder), if_bailout);
9760 var_value = CallBuiltin(
9761 Builtin::kCallFunctionTemplate_CheckAccessAndCompatibleReceiver,
9762 creation_context, getter, IntPtrConstant(i::JSParameterCount(0)),
9763 receiver);
9764 Goto(&done);
9765
9766 if (mode == kCallJSGetterUseCachedName) {
9767 Bind(&use_cached_property);
9768
9769 var_value = GetProperty(context, holder, cached_property_name);
9770
9771 Goto(&done);
9772 }
9773
9774 BIND(&runtime)Bind(&runtime);
9775 {
9776 var_value = CallRuntime(Runtime::kGetProperty, context, holder, name,
9777 receiver);
9778 Goto(&done);
9779 }
9780 }
9781 } else {
9782 DCHECK_EQ(mode, kReturnAccessorPair)((void) 0);
9783 Goto(&done);
9784 }
9785 }
9786
9787 // AccessorInfo case.
9788 BIND(&if_accessor_info)Bind(&if_accessor_info);
9789 {
9790 TNode<AccessorInfo> accessor_info = CAST(value)Cast(value);
9791 Label if_array(this), if_function(this), if_wrapper(this);
9792
9793 // Dispatch based on {holder} instance type.
9794 TNode<Map> holder_map = LoadMap(holder);
9795 TNode<Uint16T> holder_instance_type = LoadMapInstanceType(holder_map);
9796 GotoIf(IsJSArrayInstanceType(holder_instance_type), &if_array);
9797 GotoIf(IsJSFunctionInstanceType(holder_instance_type), &if_function);
9798 Branch(IsJSPrimitiveWrapperInstanceType(holder_instance_type), &if_wrapper,
9799 if_bailout);
9800
9801 // JSArray AccessorInfo case.
9802 BIND(&if_array)Bind(&if_array);
9803 {
9804 // We only deal with the "length" accessor on JSArray.
9805 GotoIfNot(IsLengthString(
9806 LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
9807 if_bailout);
9808 TNode<JSArray> array = CAST(holder)Cast(holder);
9809 var_value = LoadJSArrayLength(array);
9810 Goto(&done);
9811 }
9812
9813 // JSFunction AccessorInfo case.
9814 BIND(&if_function)Bind(&if_function);
9815 {
9816 // We only deal with the "prototype" accessor on JSFunction here.
9817 GotoIfNot(IsPrototypeString(
9818 LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
9819 if_bailout);
9820
9821 TNode<JSFunction> function = CAST(holder)Cast(holder);
9822 GotoIfPrototypeRequiresRuntimeLookup(function, holder_map, if_bailout);
9823 var_value = LoadJSFunctionPrototype(function, if_bailout);
9824 Goto(&done);
9825 }
9826
9827 // JSPrimitiveWrapper AccessorInfo case.
9828 BIND(&if_wrapper)Bind(&if_wrapper);
9829 {
9830 // We only deal with the "length" accessor on JSPrimitiveWrapper string
9831 // wrappers.
9832 GotoIfNot(IsLengthString(
9833 LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
9834 if_bailout);
9835 TNode<Object> holder_value = LoadJSPrimitiveWrapperValue(CAST(holder)Cast(holder));
9836 GotoIfNot(TaggedIsNotSmi(holder_value), if_bailout);
9837 GotoIfNot(IsString(CAST(holder_value)Cast(holder_value)), if_bailout);
9838 var_value = LoadStringLengthAsSmi(CAST(holder_value)Cast(holder_value));
9839 Goto(&done);
9840 }
9841 }
9842
9843 BIND(&done)Bind(&done);
9844 return var_value.value();
9845}
9846
9847void CodeStubAssembler::TryGetOwnProperty(
9848 TNode<Context> context, TNode<Object> receiver, TNode<JSReceiver> object,
9849 TNode<Map> map, TNode<Int32T> instance_type, TNode<Name> unique_name,
9850 Label* if_found_value, TVariable<Object>* var_value, Label* if_not_found,
9851 Label* if_bailout) {
9852 TryGetOwnProperty(context, receiver, object, map, instance_type, unique_name,
9853 if_found_value, var_value, nullptr, nullptr, if_not_found,
9854 if_bailout, kCallJSGetterUseCachedName);
9855}
9856
9857void CodeStubAssembler::TryGetOwnProperty(
9858 TNode<Context> context, TNode<Object> receiver, TNode<JSReceiver> object,
9859 TNode<Map> map, TNode<Int32T> instance_type, TNode<Name> unique_name,
9860 Label* if_found_value, TVariable<Object>* var_value,
9861 TVariable<Uint32T>* var_details, TVariable<Object>* var_raw_value,
9862 Label* if_not_found, Label* if_bailout, GetOwnPropertyMode mode) {
9863 DCHECK_EQ(MachineRepresentation::kTagged, var_value->rep())((void) 0);
9864 Comment("TryGetOwnProperty");
9865 CSA_DCHECK(this, IsUniqueNameNoCachedIndex(unique_name))((void)0);
9866 TVARIABLE(HeapObject, var_meta_storage)TVariable<HeapObject> var_meta_storage(this);
9867 TVARIABLE(IntPtrT, var_entry)TVariable<IntPtrT> var_entry(this);
9868
9869 Label if_found_fast(this), if_found_dict(this), if_found_global(this);
9870
9871 TVARIABLE(Uint32T, local_var_details)TVariable<Uint32T> local_var_details(this);
9872 if (!var_details) {
9873 var_details = &local_var_details;
9874 }
9875 Label if_found(this);
9876
9877 TryLookupProperty(object, map, instance_type, unique_name, &if_found_fast,
9878 &if_found_dict, &if_found_global, &var_meta_storage,
9879 &var_entry, if_not_found, if_bailout);
9880 BIND(&if_found_fast)Bind(&if_found_fast);
9881 {
9882 TNode<DescriptorArray> descriptors = CAST(var_meta_storage.value())Cast(var_meta_storage.value());
9883 TNode<IntPtrT> name_index = var_entry.value();
9884
9885 LoadPropertyFromFastObject(object, map, descriptors, name_index,
9886 var_details, var_value);
9887 Goto(&if_found);
9888 }
9889 BIND(&if_found_dict)Bind(&if_found_dict);
9890 {
9891 TNode<PropertyDictionary> dictionary = CAST(var_meta_storage.value())Cast(var_meta_storage.value());
9892 TNode<IntPtrT> entry = var_entry.value();
9893 LoadPropertyFromDictionary(dictionary, entry, var_details, var_value);
9894
9895 Goto(&if_found);
9896 }
9897 BIND(&if_found_global)Bind(&if_found_global);
9898 {
9899 TNode<GlobalDictionary> dictionary = CAST(var_meta_storage.value())Cast(var_meta_storage.value());
9900 TNode<IntPtrT> entry = var_entry.value();
9901
9902 LoadPropertyFromGlobalDictionary(dictionary, entry, var_details, var_value,
9903 if_not_found);
9904 Goto(&if_found);
9905 }
9906 // Here we have details and value which could be an accessor.
9907 BIND(&if_found)Bind(&if_found);
9908 {
9909 // TODO(ishell): Execute C++ accessor in case of accessor info
9910 if (var_raw_value) {
9911 *var_raw_value = *var_value;
9912 }
9913 TNode<Object> value =
9914 CallGetterIfAccessor(var_value->value(), object, var_details->value(),
9915 context, receiver, unique_name, if_bailout, mode);
9916 *var_value = value;
9917 Goto(if_found_value);
9918 }
9919}
9920
9921void CodeStubAssembler::TryLookupElement(
9922 TNode<HeapObject> object, TNode<Map> map, TNode<Int32T> instance_type,
9923 TNode<IntPtrT> intptr_index, Label* if_found, Label* if_absent,
9924 Label* if_not_found, Label* if_bailout) {
9925 // Handle special objects in runtime.
9926 GotoIf(IsSpecialReceiverInstanceType(instance_type), if_bailout);
9927
9928 TNode<Int32T> elements_kind = LoadMapElementsKind(map);
9929
9930 // TODO(verwaest): Support other elements kinds as well.
9931 Label if_isobjectorsmi(this), if_isdouble(this), if_isdictionary(this),
9932 if_isfaststringwrapper(this), if_isslowstringwrapper(this), if_oob(this),
9933 if_typedarray(this), if_rab_gsab_typedarray(this);
9934 // clang-format off
9935 int32_t values[] = {
9936 // Handled by {if_isobjectorsmi}.
9937 PACKED_SMI_ELEMENTS, HOLEY_SMI_ELEMENTS, PACKED_ELEMENTS, HOLEY_ELEMENTS,
9938 PACKED_NONEXTENSIBLE_ELEMENTS, PACKED_SEALED_ELEMENTS,
9939 HOLEY_NONEXTENSIBLE_ELEMENTS, HOLEY_SEALED_ELEMENTS,
9940 PACKED_FROZEN_ELEMENTS, HOLEY_FROZEN_ELEMENTS,
9941 // Handled by {if_isdouble}.
9942 PACKED_DOUBLE_ELEMENTS, HOLEY_DOUBLE_ELEMENTS,
9943 // Handled by {if_isdictionary}.
9944 DICTIONARY_ELEMENTS,
9945 // Handled by {if_isfaststringwrapper}.
9946 FAST_STRING_WRAPPER_ELEMENTS,
9947 // Handled by {if_isslowstringwrapper}.
9948 SLOW_STRING_WRAPPER_ELEMENTS,
9949 // Handled by {if_not_found}.
9950 NO_ELEMENTS,
9951 // Handled by {if_typed_array}.
9952 UINT8_ELEMENTS,
9953 INT8_ELEMENTS,
9954 UINT16_ELEMENTS,
9955 INT16_ELEMENTS,
9956 UINT32_ELEMENTS,
9957 INT32_ELEMENTS,
9958 FLOAT32_ELEMENTS,
9959 FLOAT64_ELEMENTS,
9960 UINT8_CLAMPED_ELEMENTS,
9961 BIGUINT64_ELEMENTS,
9962 BIGINT64_ELEMENTS,
9963 RAB_GSAB_UINT8_ELEMENTS,
9964 RAB_GSAB_INT8_ELEMENTS,
9965 RAB_GSAB_UINT16_ELEMENTS,
9966 RAB_GSAB_INT16_ELEMENTS,
9967 RAB_GSAB_UINT32_ELEMENTS,
9968 RAB_GSAB_INT32_ELEMENTS,
9969 RAB_GSAB_FLOAT32_ELEMENTS,
9970 RAB_GSAB_FLOAT64_ELEMENTS,
9971 RAB_GSAB_UINT8_CLAMPED_ELEMENTS,
9972 RAB_GSAB_BIGUINT64_ELEMENTS,
9973 RAB_GSAB_BIGINT64_ELEMENTS,
9974 };
9975 Label* labels[] = {
9976 &if_isobjectorsmi, &if_isobjectorsmi, &if_isobjectorsmi,
9977 &if_isobjectorsmi, &if_isobjectorsmi, &if_isobjectorsmi,
9978 &if_isobjectorsmi, &if_isobjectorsmi, &if_isobjectorsmi,
9979 &if_isobjectorsmi,
9980 &if_isdouble, &if_isdouble,
9981 &if_isdictionary,
9982 &if_isfaststringwrapper,
9983 &if_isslowstringwrapper,
9984 if_not_found,
9985 &if_typedarray,
9986 &if_typedarray,
9987 &if_typedarray,
9988 &if_typedarray,
9989 &if_typedarray,
9990 &if_typedarray,
9991 &if_typedarray,
9992 &if_typedarray,
9993 &if_typedarray,
9994 &if_typedarray,
9995 &if_typedarray,
9996 &if_rab_gsab_typedarray,
9997 &if_rab_gsab_typedarray,
9998 &if_rab_gsab_typedarray,
9999 &if_rab_gsab_typedarray,
10000 &if_rab_gsab_typedarray,
10001 &if_rab_gsab_typedarray,
10002 &if_rab_gsab_typedarray,
10003 &if_rab_gsab_typedarray,
10004 &if_rab_gsab_typedarray,
10005 &if_rab_gsab_typedarray,
10006 &if_rab_gsab_typedarray,
10007 };
10008 // clang-format on
10009 STATIC_ASSERT(arraysize(values) == arraysize(labels))static_assert((sizeof(ArraySizeHelper(values))) == (sizeof(ArraySizeHelper
(labels))), "arraysize(values) == arraysize(labels)")
;
10010 Switch(elements_kind, if_bailout, values, labels, arraysize(values)(sizeof(ArraySizeHelper(values))));
10011
10012 BIND(&if_isobjectorsmi)Bind(&if_isobjectorsmi);
10013 {
10014 TNode<FixedArray> elements = CAST(LoadElements(CAST(object)))Cast(LoadElements(Cast(object)));
10015 TNode<IntPtrT> length = LoadAndUntagFixedArrayBaseLength(elements);
10016
10017 GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
10018
10019 TNode<Object> element = UnsafeLoadFixedArrayElement(elements, intptr_index);
10020 TNode<Oddball> the_hole = TheHoleConstant();
10021 Branch(TaggedEqual(element, the_hole), if_not_found, if_found);
10022 }
10023 BIND(&if_isdouble)Bind(&if_isdouble);
10024 {
10025 TNode<FixedArrayBase> elements = LoadElements(CAST(object)Cast(object));
10026 TNode<IntPtrT> length = LoadAndUntagFixedArrayBaseLength(elements);
10027
10028 GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
10029
10030 // Check if the element is a double hole, but don't load it.
10031 LoadFixedDoubleArrayElement(CAST(elements)Cast(elements), intptr_index, if_not_found,
10032 MachineType::None());
10033 Goto(if_found);
10034 }
10035 BIND(&if_isdictionary)Bind(&if_isdictionary);
10036 {
10037 // Negative and too-large keys must be converted to property names.
10038 if (Is64()) {
10039 GotoIf(UintPtrLessThan(IntPtrConstant(JSObject::kMaxElementIndex),
10040 intptr_index),
10041 if_bailout);
10042 } else {
10043 GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
10044 }
10045
10046 TVARIABLE(IntPtrT, var_entry)TVariable<IntPtrT> var_entry(this);
10047 TNode<NumberDictionary> elements = CAST(LoadElements(CAST(object)))Cast(LoadElements(Cast(object)));
10048 NumberDictionaryLookup(elements, intptr_index, if_found, &var_entry,
10049 if_not_found);
10050 }
10051 BIND(&if_isfaststringwrapper)Bind(&if_isfaststringwrapper);
10052 {
10053 TNode<String> string = CAST(LoadJSPrimitiveWrapperValue(CAST(object)))Cast(LoadJSPrimitiveWrapperValue(Cast(object)));
10054 TNode<IntPtrT> length = LoadStringLengthAsWord(string);
10055 GotoIf(UintPtrLessThan(intptr_index, length), if_found);
10056 Goto(&if_isobjectorsmi);
10057 }
10058 BIND(&if_isslowstringwrapper)Bind(&if_isslowstringwrapper);
10059 {
10060 TNode<String> string = CAST(LoadJSPrimitiveWrapperValue(CAST(object)))Cast(LoadJSPrimitiveWrapperValue(Cast(object)));
10061 TNode<IntPtrT> length = LoadStringLengthAsWord(string);
10062 GotoIf(UintPtrLessThan(intptr_index, length), if_found);
10063 Goto(&if_isdictionary);
10064 }
10065 BIND(&if_typedarray)Bind(&if_typedarray);
10066 {
10067 TNode<JSArrayBuffer> buffer = LoadJSArrayBufferViewBuffer(CAST(object)Cast(object));
10068 GotoIf(IsDetachedBuffer(buffer), if_absent);
10069
10070 TNode<UintPtrT> length = LoadJSTypedArrayLength(CAST(object)Cast(object));
10071 Branch(UintPtrLessThan(intptr_index, length), if_found, if_absent);
10072 }
10073 BIND(&if_rab_gsab_typedarray)Bind(&if_rab_gsab_typedarray);
10074 {
10075 TNode<JSArrayBuffer> buffer = LoadJSArrayBufferViewBuffer(CAST(object)Cast(object));
10076 TNode<UintPtrT> length =
10077 LoadVariableLengthJSTypedArrayLength(CAST(object)Cast(object), buffer, if_absent);
10078 Branch(UintPtrLessThan(intptr_index, length), if_found, if_absent);
10079 }
10080 BIND(&if_oob)Bind(&if_oob);
10081 {
10082 // Positive OOB indices mean "not found", negative indices and indices
10083 // out of array index range must be converted to property names.
10084 if (Is64()) {
10085 GotoIf(UintPtrLessThan(IntPtrConstant(JSObject::kMaxElementIndex),
10086 intptr_index),
10087 if_bailout);
10088 } else {
10089 GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
10090 }
10091 Goto(if_not_found);
10092 }
10093}
10094
10095void CodeStubAssembler::BranchIfMaybeSpecialIndex(TNode<String> name_string,
10096 Label* if_maybe_special_index,
10097 Label* if_not_special_index) {
10098 // TODO(cwhan.tunz): Implement fast cases more.
10099
10100 // If a name is empty or too long, it's not a special index
10101 // Max length of canonical double: -X.XXXXXXXXXXXXXXXXX-eXXX
10102 const int kBufferSize = 24;
10103 TNode<Smi> string_length = LoadStringLengthAsSmi(name_string);
10104 GotoIf(SmiEqual(string_length, SmiConstant(0)), if_not_special_index);
10105 GotoIf(SmiGreaterThan(string_length, SmiConstant(kBufferSize)),
10106 if_not_special_index);
10107
10108 // If the first character of name is not a digit or '-', or we can't match it
10109 // to Infinity or NaN, then this is not a special index.
10110 TNode<Int32T> first_char = StringCharCodeAt(name_string, UintPtrConstant(0));
10111 // If the name starts with '-', it can be a negative index.
10112 GotoIf(Word32Equal(first_char, Int32Constant('-')), if_maybe_special_index);
10113 // If the name starts with 'I', it can be "Infinity".
10114 GotoIf(Word32Equal(first_char, Int32Constant('I')), if_maybe_special_index);
10115 // If the name starts with 'N', it can be "NaN".
10116 GotoIf(Word32Equal(first_char, Int32Constant('N')), if_maybe_special_index);
10117 // Finally, if the first character is not a digit either, then we are sure
10118 // that the name is not a special index.
10119 GotoIf(Uint32LessThan(first_char, Int32Constant('0')), if_not_special_index);
10120 GotoIf(Uint32LessThan(Int32Constant('9'), first_char), if_not_special_index);
10121 Goto(if_maybe_special_index);
10122}
10123
10124void CodeStubAssembler::TryPrototypeChainLookup(
10125 TNode<Object> receiver, TNode<Object> object_arg, TNode<Object> key,
10126 const LookupPropertyInHolder& lookup_property_in_holder,
10127 const LookupElementInHolder& lookup_element_in_holder, Label* if_end,
10128 Label* if_bailout, Label* if_proxy, bool handle_private_names) {
10129 // Ensure receiver is JSReceiver, otherwise bailout.
10130 GotoIf(TaggedIsSmi(receiver), if_bailout);
10131 TNode<HeapObject> object = CAST(object_arg)Cast(object_arg);
10132
10133 TNode<Map> map = LoadMap(object);
10134 TNode<Uint16T> instance_type = LoadMapInstanceType(map);
10135 {
10136 Label if_objectisreceiver(this);
10137 Branch(IsJSReceiverInstanceType(instance_type), &if_objectisreceiver,
10138 if_bailout);
10139 BIND(&if_objectisreceiver)Bind(&if_objectisreceiver);
10140
10141 GotoIf(InstanceTypeEqual(instance_type, JS_PROXY_TYPE), if_proxy);
10142 }
10143
10144 TVARIABLE(IntPtrT, var_index)TVariable<IntPtrT> var_index(this);
10145 TVARIABLE(Name, var_unique)TVariable<Name> var_unique(this);
10146
10147 Label if_keyisindex(this), if_iskeyunique(this);
10148 TryToName(key, &if_keyisindex, &var_index, &if_iskeyunique, &var_unique,
10149 if_bailout);
10150
10151 BIND(&if_iskeyunique)Bind(&if_iskeyunique);
10152 {
10153 TVARIABLE(HeapObject, var_holder, object)TVariable<HeapObject> var_holder(object, this);
10154 TVARIABLE(Map, var_holder_map, map)TVariable<Map> var_holder_map(map, this);
10155 TVARIABLE(Int32T, var_holder_instance_type, instance_type)TVariable<Int32T> var_holder_instance_type(instance_type
, this)
;
10156
10157 Label loop(this, {&var_holder, &var_holder_map, &var_holder_instance_type});
10158 Goto(&loop);
10159 BIND(&loop)Bind(&loop);
10160 {
10161 TNode<Map> holder_map = var_holder_map.value();
10162 TNode<Int32T> holder_instance_type = var_holder_instance_type.value();
10163
10164 Label next_proto(this), check_integer_indexed_exotic(this);
10165 lookup_property_in_holder(CAST(receiver)Cast(receiver), var_holder.value(), holder_map,
10166 holder_instance_type, var_unique.value(),
10167 &check_integer_indexed_exotic, if_bailout);
10168
10169 BIND(&check_integer_indexed_exotic)Bind(&check_integer_indexed_exotic);
10170 {
10171 // Bailout if it can be an integer indexed exotic case.
10172 GotoIfNot(InstanceTypeEqual(holder_instance_type, JS_TYPED_ARRAY_TYPE),
10173 &next_proto);
10174 GotoIfNot(IsString(var_unique.value()), &next_proto);
10175 BranchIfMaybeSpecialIndex(CAST(var_unique.value())Cast(var_unique.value()), if_bailout,
10176 &next_proto);
10177 }
10178
10179 BIND(&next_proto)Bind(&next_proto);
10180
10181 if (handle_private_names) {
10182 // Private name lookup doesn't walk the prototype chain.
10183 GotoIf(IsPrivateSymbol(CAST(key)Cast(key)), if_end);
10184 }
10185
10186 TNode<HeapObject> proto = LoadMapPrototype(holder_map);
10187
10188 GotoIf(IsNull(proto), if_end);
10189
10190 TNode<Map> proto_map = LoadMap(proto);
10191 TNode<Uint16T> proto_instance_type = LoadMapInstanceType(proto_map);
10192
10193 var_holder = proto;
10194 var_holder_map = proto_map;
10195 var_holder_instance_type = proto_instance_type;
10196 Goto(&loop);
10197 }
10198 }
10199 BIND(&if_keyisindex)Bind(&if_keyisindex);
10200 {
10201 TVARIABLE(HeapObject, var_holder, object)TVariable<HeapObject> var_holder(object, this);
10202 TVARIABLE(Map, var_holder_map, map)TVariable<Map> var_holder_map(map, this);
10203 TVARIABLE(Int32T, var_holder_instance_type, instance_type)TVariable<Int32T> var_holder_instance_type(instance_type
, this)
;
10204
10205 Label loop(this, {&var_holder, &var_holder_map, &var_holder_instance_type});
10206 Goto(&loop);
10207 BIND(&loop)Bind(&loop);
10208 {
10209 Label next_proto(this);
10210 lookup_element_in_holder(CAST(receiver)Cast(receiver), var_holder.value(),
10211 var_holder_map.value(),
10212 var_holder_instance_type.value(),
10213 var_index.value(), &next_proto, if_bailout);
10214 BIND(&next_proto)Bind(&next_proto);
10215
10216 TNode<HeapObject> proto = LoadMapPrototype(var_holder_map.value());
10217
10218 GotoIf(IsNull(proto), if_end);
10219
10220 TNode<Map> proto_map = LoadMap(proto);
10221 TNode<Uint16T> proto_instance_type = LoadMapInstanceType(proto_map);
10222
10223 var_holder = proto;
10224 var_holder_map = proto_map;
10225 var_holder_instance_type = proto_instance_type;
10226 Goto(&loop);
10227 }
10228 }
10229}
10230
10231TNode<Oddball> CodeStubAssembler::HasInPrototypeChain(TNode<Context> context,
10232 TNode<HeapObject> object,
10233 TNode<Object> prototype) {
10234 TVARIABLE(Oddball, var_result)TVariable<Oddball> var_result(this);
10235 Label return_false(this), return_true(this),
10236 return_runtime(this, Label::kDeferred), return_result(this);
10237
10238 // Loop through the prototype chain looking for the {prototype}.
10239 TVARIABLE(Map, var_object_map, LoadMap(object))TVariable<Map> var_object_map(LoadMap(object), this);
10240 Label loop(this, &var_object_map);
10241 Goto(&loop);
10242 BIND(&loop)Bind(&loop);
10243 {
10244 // Check if we can determine the prototype directly from the {object_map}.
10245 Label if_objectisdirect(this), if_objectisspecial(this, Label::kDeferred);
10246 TNode<Map> object_map = var_object_map.value();
10247 TNode<Uint16T> object_instance_type = LoadMapInstanceType(object_map);
10248 Branch(IsSpecialReceiverInstanceType(object_instance_type),
10249 &if_objectisspecial, &if_objectisdirect);
10250 BIND(&if_objectisspecial)Bind(&if_objectisspecial);
10251 {
10252 // The {object_map} is a special receiver map or a primitive map, check
10253 // if we need to use the if_objectisspecial path in the runtime.
10254 GotoIf(InstanceTypeEqual(object_instance_type, JS_PROXY_TYPE),
10255 &return_runtime);
10256 TNode<Int32T> object_bitfield = LoadMapBitField(object_map);
10257 int mask = Map::Bits1::HasNamedInterceptorBit::kMask |
10258 Map::Bits1::IsAccessCheckNeededBit::kMask;
10259 Branch(IsSetWord32(object_bitfield, mask), &return_runtime,
10260 &if_objectisdirect);
10261 }
10262 BIND(&if_objectisdirect)Bind(&if_objectisdirect);
10263
10264 // Check the current {object} prototype.
10265 TNode<HeapObject> object_prototype = LoadMapPrototype(object_map);
10266 GotoIf(IsNull(object_prototype), &return_false);
10267 GotoIf(TaggedEqual(object_prototype, prototype), &return_true);
10268
10269 // Continue with the prototype.
10270 CSA_DCHECK(this, TaggedIsNotSmi(object_prototype))((void)0);
10271 var_object_map = LoadMap(object_prototype);
10272 Goto(&loop);
10273 }
10274
10275 BIND(&return_true)Bind(&return_true);
10276 var_result = TrueConstant();
10277 Goto(&return_result);
10278
10279 BIND(&return_false)Bind(&return_false);
10280 var_result = FalseConstant();
10281 Goto(&return_result);
10282
10283 BIND(&return_runtime)Bind(&return_runtime);
10284 {
10285 // Fallback to the runtime implementation.
10286 var_result = CAST(Cast(CallRuntime(Runtime::kHasInPrototypeChain, context, object
, prototype))
10287 CallRuntime(Runtime::kHasInPrototypeChain, context, object, prototype))Cast(CallRuntime(Runtime::kHasInPrototypeChain, context, object
, prototype))
;
10288 }
10289 Goto(&return_result);
10290
10291 BIND(&return_result)Bind(&return_result);
10292 return var_result.value();
10293}
10294
10295TNode<Oddball> CodeStubAssembler::OrdinaryHasInstance(
10296 TNode<Context> context, TNode<Object> callable_maybe_smi,
10297 TNode<Object> object_maybe_smi) {
10298 TVARIABLE(Oddball, var_result)TVariable<Oddball> var_result(this);
10299 Label return_runtime(this, Label::kDeferred), return_result(this);
10300
10301 GotoIfForceSlowPath(&return_runtime);
10302
10303 // Goto runtime if {object} is a Smi.
10304 GotoIf(TaggedIsSmi(object_maybe_smi), &return_runtime);
10305
10306 // Goto runtime if {callable} is a Smi.
10307 GotoIf(TaggedIsSmi(callable_maybe_smi), &return_runtime);
10308
10309 {
10310 // Load map of {callable}.
10311 TNode<HeapObject> object = CAST(object_maybe_smi)Cast(object_maybe_smi);
10312 TNode<HeapObject> callable = CAST(callable_maybe_smi)Cast(callable_maybe_smi);
10313 TNode<Map> callable_map = LoadMap(callable);
10314
10315 // Goto runtime if {callable} is not a JSFunction.
10316 TNode<Uint16T> callable_instance_type = LoadMapInstanceType(callable_map);
10317 GotoIfNot(IsJSFunctionInstanceType(callable_instance_type),
10318 &return_runtime);
10319
10320 GotoIfPrototypeRequiresRuntimeLookup(CAST(callable)Cast(callable), callable_map,
10321 &return_runtime);
10322
10323 // Get the "prototype" (or initial map) of the {callable}.
10324 TNode<HeapObject> callable_prototype = LoadObjectField<HeapObject>(
10325 callable, JSFunction::kPrototypeOrInitialMapOffset);
10326 {
10327 Label no_initial_map(this), walk_prototype_chain(this);
10328 TVARIABLE(HeapObject, var_callable_prototype, callable_prototype)TVariable<HeapObject> var_callable_prototype(callable_prototype
, this)
;
10329
10330 // Resolve the "prototype" if the {callable} has an initial map.
10331 GotoIfNot(IsMap(callable_prototype), &no_initial_map);
10332 var_callable_prototype = LoadObjectField<HeapObject>(
10333 callable_prototype, Map::kPrototypeOffset);
10334 Goto(&walk_prototype_chain);
10335
10336 BIND(&no_initial_map)Bind(&no_initial_map);
10337 // {callable_prototype} is the hole if the "prototype" property hasn't
10338 // been requested so far.
10339 Branch(TaggedEqual(callable_prototype, TheHoleConstant()),
10340 &return_runtime, &walk_prototype_chain);
10341
10342 BIND(&walk_prototype_chain)Bind(&walk_prototype_chain);
10343 callable_prototype = var_callable_prototype.value();
10344 }
10345
10346 // Loop through the prototype chain looking for the {callable} prototype.
10347 var_result = HasInPrototypeChain(context, object, callable_prototype);
10348 Goto(&return_result);
10349 }
10350
10351 BIND(&return_runtime)Bind(&return_runtime);
10352 {
10353 // Fallback to the runtime implementation.
10354 var_result = CAST(CallRuntime(Runtime::kOrdinaryHasInstance, context,Cast(CallRuntime(Runtime::kOrdinaryHasInstance, context, callable_maybe_smi
, object_maybe_smi))
10355 callable_maybe_smi, object_maybe_smi))Cast(CallRuntime(Runtime::kOrdinaryHasInstance, context, callable_maybe_smi
, object_maybe_smi))
;
10356 }
10357 Goto(&return_result);
10358
10359 BIND(&return_result)Bind(&return_result);
10360 return var_result.value();
10361}
10362
10363template <typename TIndex>
10364TNode<IntPtrT> CodeStubAssembler::ElementOffsetFromIndex(
10365 TNode<TIndex> index_node, ElementsKind kind, int base_size) {
10366 // TODO(v8:9708): Remove IntPtrT variant in favor of UintPtrT.
10367 static_assert(std::is_same<TIndex, Smi>::value ||
10368 std::is_same<TIndex, TaggedIndex>::value ||
10369 std::is_same<TIndex, IntPtrT>::value ||
10370 std::is_same<TIndex, UintPtrT>::value,
10371 "Only Smi, UintPtrT or IntPtrT index nodes are allowed");
10372 int element_size_shift = ElementsKindToShiftSize(kind);
10373 int element_size = 1 << element_size_shift;
10374 intptr_t index = 0;
10375 TNode<IntPtrT> intptr_index_node;
10376 bool constant_index = false;
10377 if (std::is_same<TIndex, Smi>::value) {
10378 TNode<Smi> smi_index_node = ReinterpretCast<Smi>(index_node);
10379 int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
10380 element_size_shift -= kSmiShiftBits;
10381 Smi smi_index;
10382 constant_index = TryToSmiConstant(smi_index_node, &smi_index);
10383 if (constant_index) {
10384 index = smi_index.value();
10385 } else {
10386 if (COMPRESS_POINTERS_BOOLfalse) {
10387 smi_index_node = NormalizeSmiIndex(smi_index_node);
10388 }
10389 }
10390 intptr_index_node = BitcastTaggedToWordForTagAndSmiBits(smi_index_node);
10391 } else if (std::is_same<TIndex, TaggedIndex>::value) {
10392 TNode<TaggedIndex> tagged_index_node =
10393 ReinterpretCast<TaggedIndex>(index_node);
10394 element_size_shift -= kSmiTagSize;
10395 intptr_index_node = BitcastTaggedToWordForTagAndSmiBits(tagged_index_node);
10396 constant_index = TryToIntPtrConstant(intptr_index_node, &index);
10397 } else {
10398 intptr_index_node = ReinterpretCast<IntPtrT>(index_node);
10399 constant_index = TryToIntPtrConstant(intptr_index_node, &index);
10400 }
10401 if (constant_index) {
10402 return IntPtrConstant(base_size + element_size * index);
10403 }
10404
10405 TNode<IntPtrT> shifted_index =
10406 (element_size_shift == 0)
10407 ? intptr_index_node
10408 : ((element_size_shift > 0)
10409 ? WordShl(intptr_index_node,
10410 IntPtrConstant(element_size_shift))
10411 : WordSar(intptr_index_node,
10412 IntPtrConstant(-element_size_shift)));
10413 return IntPtrAdd(IntPtrConstant(base_size), Signed(shifted_index));
10414}
10415
10416// Instantiate ElementOffsetFromIndex for Smi and IntPtrT.
10417template V8_EXPORT_PRIVATE TNode<IntPtrT>
10418CodeStubAssembler::ElementOffsetFromIndex<Smi>(TNode<Smi> index_node,
10419 ElementsKind kind,
10420 int base_size);
10421template V8_EXPORT_PRIVATE TNode<IntPtrT>
10422CodeStubAssembler::ElementOffsetFromIndex<TaggedIndex>(
10423 TNode<TaggedIndex> index_node, ElementsKind kind, int base_size);
10424template V8_EXPORT_PRIVATE TNode<IntPtrT>
10425CodeStubAssembler::ElementOffsetFromIndex<IntPtrT>(TNode<IntPtrT> index_node,
10426 ElementsKind kind,
10427 int base_size);
10428
10429TNode<BoolT> CodeStubAssembler::IsOffsetInBounds(TNode<IntPtrT> offset,
10430 TNode<IntPtrT> length,
10431 int header_size,
10432 ElementsKind kind) {
10433 // Make sure we point to the last field.
10434 int element_size = 1 << ElementsKindToShiftSize(kind);
10435 int correction = header_size - kHeapObjectTag - element_size;
10436 TNode<IntPtrT> last_offset = ElementOffsetFromIndex(length, kind, correction);
10437 return IntPtrLessThanOrEqual(offset, last_offset);
10438}
10439
10440TNode<HeapObject> CodeStubAssembler::LoadFeedbackCellValue(
10441 TNode<JSFunction> closure) {
10442 TNode<FeedbackCell> feedback_cell =
10443 LoadObjectField<FeedbackCell>(closure, JSFunction::kFeedbackCellOffset);
10444 return LoadObjectField<HeapObject>(feedback_cell, FeedbackCell::kValueOffset);
10445}
10446
10447TNode<HeapObject> CodeStubAssembler::LoadFeedbackVector(
10448 TNode<JSFunction> closure) {
10449 TVARIABLE(HeapObject, maybe_vector, LoadFeedbackCellValue(closure))TVariable<HeapObject> maybe_vector(LoadFeedbackCellValue
(closure), this)
;
10450 Label done(this);
10451
10452 // If the closure doesn't have a feedback vector allocated yet, return
10453 // undefined. FeedbackCell can contain Undefined / FixedArray (for lazy
10454 // allocations) / FeedbackVector.
10455 GotoIf(IsFeedbackVector(maybe_vector.value()), &done);
10456
10457 // In all other cases return Undefined.
10458 maybe_vector = UndefinedConstant();
10459 Goto(&done);
10460
10461 BIND(&done)Bind(&done);
10462 return maybe_vector.value();
10463}
10464
10465TNode<ClosureFeedbackCellArray> CodeStubAssembler::LoadClosureFeedbackArray(
10466 TNode<JSFunction> closure) {
10467 TVARIABLE(HeapObject, feedback_cell_array, LoadFeedbackCellValue(closure))TVariable<HeapObject> feedback_cell_array(LoadFeedbackCellValue
(closure), this)
;
10468 Label end(this);
10469
10470 // When feedback vectors are not yet allocated feedback cell contains a
10471 // an array of feedback cells used by create closures.
10472 GotoIf(HasInstanceType(feedback_cell_array.value(),
10473 CLOSURE_FEEDBACK_CELL_ARRAY_TYPE),
10474 &end);
10475
10476 // Load FeedbackCellArray from feedback vector.
10477 TNode<FeedbackVector> vector = CAST(feedback_cell_array.value())Cast(feedback_cell_array.value());
10478 feedback_cell_array = CAST(Cast(LoadObjectField(vector, FeedbackVector::kClosureFeedbackCellArrayOffset
))
10479 LoadObjectField(vector, FeedbackVector::kClosureFeedbackCellArrayOffset))Cast(LoadObjectField(vector, FeedbackVector::kClosureFeedbackCellArrayOffset
))
;
10480 Goto(&end);
10481
10482 BIND(&end)Bind(&end);
10483 return CAST(feedback_cell_array.value())Cast(feedback_cell_array.value());
10484}
10485
10486TNode<FeedbackVector> CodeStubAssembler::LoadFeedbackVectorForStub() {
10487 TNode<JSFunction> function =
10488 CAST(LoadFromParentFrame(StandardFrameConstants::kFunctionOffset))Cast(LoadFromParentFrame(StandardFrameConstants::kFunctionOffset
))
;
10489 return CAST(LoadFeedbackVector(function))Cast(LoadFeedbackVector(function));
10490}
10491
10492TNode<FeedbackVector> CodeStubAssembler::LoadFeedbackVectorFromBaseline() {
10493 return CAST(Cast(LoadFromParentFrame(InterpreterFrameConstants::kBytecodeOffsetFromFp
))
10494 LoadFromParentFrame(InterpreterFrameConstants::kBytecodeOffsetFromFp))Cast(LoadFromParentFrame(InterpreterFrameConstants::kBytecodeOffsetFromFp
))
;
10495}
10496
10497TNode<Context> CodeStubAssembler::LoadContextFromBaseline() {
10498 return CAST(LoadFromParentFrame(InterpreterFrameConstants::kContextOffset))Cast(LoadFromParentFrame(InterpreterFrameConstants::kContextOffset
))
;
10499}
10500
10501TNode<FeedbackVector>
10502CodeStubAssembler::LoadFeedbackVectorForStubWithTrampoline() {
10503 TNode<RawPtrT> frame_pointer = LoadParentFramePointer();
10504 TNode<RawPtrT> parent_frame_pointer = Load<RawPtrT>(frame_pointer);
10505 TNode<JSFunction> function = CAST(Cast(LoadFullTagged(parent_frame_pointer, IntPtrConstant(StandardFrameConstants
::kFunctionOffset)))
10506 LoadFullTagged(parent_frame_pointer,Cast(LoadFullTagged(parent_frame_pointer, IntPtrConstant(StandardFrameConstants
::kFunctionOffset)))
10507 IntPtrConstant(StandardFrameConstants::kFunctionOffset)))Cast(LoadFullTagged(parent_frame_pointer, IntPtrConstant(StandardFrameConstants
::kFunctionOffset)))
;
10508 return CAST(LoadFeedbackVector(function))Cast(LoadFeedbackVector(function));
10509}
10510
10511void CodeStubAssembler::UpdateFeedback(TNode<Smi> feedback,
10512 TNode<HeapObject> maybe_feedback_vector,
10513 TNode<UintPtrT> slot_id,
10514 UpdateFeedbackMode mode) {
10515 switch (mode) {
10516 case UpdateFeedbackMode::kOptionalFeedback:
10517 MaybeUpdateFeedback(feedback, maybe_feedback_vector, slot_id);
10518 break;
10519 case UpdateFeedbackMode::kGuaranteedFeedback:
10520 CSA_DCHECK(this, IsFeedbackVector(maybe_feedback_vector))((void)0);
10521 UpdateFeedback(feedback, CAST(maybe_feedback_vector)Cast(maybe_feedback_vector), slot_id);
10522 break;
10523 }
10524}
10525
10526void CodeStubAssembler::MaybeUpdateFeedback(TNode<Smi> feedback,
10527 TNode<HeapObject> maybe_vector,
10528 TNode<UintPtrT> slot_id) {
10529 Label end(this);
10530 GotoIf(IsUndefined(maybe_vector), &end);
10531 {
10532 UpdateFeedback(feedback, CAST(maybe_vector)Cast(maybe_vector), slot_id);
10533 Goto(&end);
10534 }
10535 BIND(&end)Bind(&end);
10536}
10537
10538void CodeStubAssembler::UpdateFeedback(TNode<Smi> feedback,
10539 TNode<FeedbackVector> feedback_vector,
10540 TNode<UintPtrT> slot_id) {
10541 Label end(this);
10542
10543 // This method is used for binary op and compare feedback. These
10544 // vector nodes are initialized with a smi 0, so we can simply OR
10545 // our new feedback in place.
10546 TNode<MaybeObject> feedback_element =
10547 LoadFeedbackVectorSlot(feedback_vector, slot_id);
10548 TNode<Smi> previous_feedback = CAST(feedback_element)Cast(feedback_element);
10549 TNode<Smi> combined_feedback = SmiOr(previous_feedback, feedback);
10550
10551 GotoIf(SmiEqual(previous_feedback, combined_feedback), &end);
10552 {
10553 StoreFeedbackVectorSlot(feedback_vector, slot_id, combined_feedback,
10554 SKIP_WRITE_BARRIER);
10555 ReportFeedbackUpdate(feedback_vector, slot_id, "UpdateFeedback");
10556 Goto(&end);
10557 }
10558
10559 BIND(&end)Bind(&end);
10560}
10561
10562void CodeStubAssembler::ReportFeedbackUpdate(
10563 TNode<FeedbackVector> feedback_vector, TNode<UintPtrT> slot_id,
10564 const char* reason) {
10565 // Reset profiler ticks.
10566 StoreObjectFieldNoWriteBarrier(
10567 feedback_vector, FeedbackVector::kProfilerTicksOffset, Int32Constant(0));
10568
10569#ifdef V8_TRACE_FEEDBACK_UPDATES
10570 // Trace the update.
10571 CallRuntime(Runtime::kTraceUpdateFeedback, NoContextConstant(),
10572 LoadFromParentFrame(StandardFrameConstants::kFunctionOffset),
10573 SmiTag(Signed(slot_id)), StringConstant(reason));
10574#endif // V8_TRACE_FEEDBACK_UPDATES
10575}
10576
10577void CodeStubAssembler::OverwriteFeedback(TVariable<Smi>* existing_feedback,
10578 int new_feedback) {
10579 if (existing_feedback == nullptr) return;
10580 *existing_feedback = SmiConstant(new_feedback);
10581}
10582
10583void CodeStubAssembler::CombineFeedback(TVariable<Smi>* existing_feedback,
10584 int feedback) {
10585 if (existing_feedback == nullptr) return;
10586 *existing_feedback = SmiOr(existing_feedback->value(), SmiConstant(feedback));
10587}
10588
10589void CodeStubAssembler::CombineFeedback(TVariable<Smi>* existing_feedback,
10590 TNode<Smi> feedback) {
10591 if (existing_feedback == nullptr) return;
10592 *existing_feedback = SmiOr(existing_feedback->value(), feedback);
10593}
10594
10595void CodeStubAssembler::CheckForAssociatedProtector(TNode<Name> name,
10596 Label* if_protector) {
10597 // This list must be kept in sync with LookupIterator::UpdateProtector!
10598 // TODO(jkummerow): Would it be faster to have a bit in Symbol::flags()?
10599 GotoIf(TaggedEqual(name, ConstructorStringConstant()), if_protector);
10600 GotoIf(TaggedEqual(name, IteratorSymbolConstant()), if_protector);
10601 GotoIf(TaggedEqual(name, NextStringConstant()), if_protector);
10602 GotoIf(TaggedEqual(name, SpeciesSymbolConstant()), if_protector);
10603 GotoIf(TaggedEqual(name, IsConcatSpreadableSymbolConstant()), if_protector);
10604 GotoIf(TaggedEqual(name, ResolveStringConstant()), if_protector);
10605 GotoIf(TaggedEqual(name, ThenStringConstant()), if_protector);
10606 // Fall through if no case matched.
10607}
10608
10609TNode<Map> CodeStubAssembler::LoadReceiverMap(TNode<Object> receiver) {
10610 return Select<Map>(
10611 TaggedIsSmi(receiver), [=] { return HeapNumberMapConstant(); },
10612 [=] { return LoadMap(UncheckedCast<HeapObject>(receiver)); });
10613}
10614
10615TNode<IntPtrT> CodeStubAssembler::TryToIntptr(
10616 TNode<Object> key, Label* if_not_intptr,
10617 TVariable<Int32T>* var_instance_type) {
10618 TVARIABLE(IntPtrT, var_intptr_key)TVariable<IntPtrT> var_intptr_key(this);
10619 Label done(this, &var_intptr_key), key_is_smi(this), key_is_heapnumber(this);
10620 GotoIf(TaggedIsSmi(key), &key_is_smi);
10621
10622 TNode<Int32T> instance_type = LoadInstanceType(CAST(key)Cast(key));
10623 if (var_instance_type != nullptr) {
10624 *var_instance_type = instance_type;
10625 }
10626
10627 Branch(IsHeapNumberInstanceType(instance_type), &key_is_heapnumber,
10628 if_not_intptr);
10629
10630 BIND(&key_is_smi)Bind(&key_is_smi);
10631 {
10632 var_intptr_key = SmiUntag(CAST(key)Cast(key));
10633 Goto(&done);
10634 }
10635
10636 BIND(&key_is_heapnumber)Bind(&key_is_heapnumber);
10637 {
10638 TNode<Float64T> value = LoadHeapNumberValue(CAST(key)Cast(key));
10639 TNode<IntPtrT> int_value = ChangeFloat64ToIntPtr(value);
10640 GotoIfNot(Float64Equal(value, RoundIntPtrToFloat64(int_value)),
10641 if_not_intptr);
10642#if V8_TARGET_ARCH_64_BIT1
10643 // We can't rely on Is64() alone because 32-bit compilers rightly complain
10644 // about kMaxSafeIntegerUint64 not fitting into an intptr_t.
10645 DCHECK(Is64())((void) 0);
10646 // TODO(jkummerow): Investigate whether we can drop support for
10647 // negative indices.
10648 GotoIfNot(IsInRange(int_value, static_cast<intptr_t>(-kMaxSafeInteger),
10649 static_cast<intptr_t>(kMaxSafeIntegerUint64)),
10650 if_not_intptr);
10651#else
10652 DCHECK(!Is64())((void) 0);
10653#endif
10654 var_intptr_key = int_value;
10655 Goto(&done);
10656 }
10657
10658 BIND(&done)Bind(&done);
10659 return var_intptr_key.value();
10660}
10661
10662TNode<Context> CodeStubAssembler::LoadScriptContext(
10663 TNode<Context> context, TNode<IntPtrT> context_index) {
10664 TNode<NativeContext> native_context = LoadNativeContext(context);
10665 TNode<ScriptContextTable> script_context_table = CAST(Cast(LoadContextElement(native_context, Context::SCRIPT_CONTEXT_TABLE_INDEX
))
10666 LoadContextElement(native_context, Context::SCRIPT_CONTEXT_TABLE_INDEX))Cast(LoadContextElement(native_context, Context::SCRIPT_CONTEXT_TABLE_INDEX
))
;
10667
10668 TNode<Context> script_context = CAST(LoadFixedArrayElement(Cast(LoadFixedArrayElement( script_context_table, context_index
, ScriptContextTable::kFirstContextSlotIndex * kTaggedSize))
10669 script_context_table, context_index,Cast(LoadFixedArrayElement( script_context_table, context_index
, ScriptContextTable::kFirstContextSlotIndex * kTaggedSize))
10670 ScriptContextTable::kFirstContextSlotIndex * kTaggedSize))Cast(LoadFixedArrayElement( script_context_table, context_index
, ScriptContextTable::kFirstContextSlotIndex * kTaggedSize))
;
10671 return script_context;
10672}
10673
10674namespace {
10675
10676// Converts typed array elements kind to a machine representations.
10677MachineRepresentation ElementsKindToMachineRepresentation(ElementsKind kind) {
10678 switch (kind) {
10679 case UINT8_CLAMPED_ELEMENTS:
10680 case UINT8_ELEMENTS:
10681 case INT8_ELEMENTS:
10682 return MachineRepresentation::kWord8;
10683 case UINT16_ELEMENTS:
10684 case INT16_ELEMENTS:
10685 return MachineRepresentation::kWord16;
10686 case UINT32_ELEMENTS:
10687 case INT32_ELEMENTS:
10688 return MachineRepresentation::kWord32;
10689 case FLOAT32_ELEMENTS:
10690 return MachineRepresentation::kFloat32;
10691 case FLOAT64_ELEMENTS:
10692 return MachineRepresentation::kFloat64;
10693 default:
10694 UNREACHABLE()V8_Fatal("unreachable code");
10695 }
10696}
10697
10698} // namespace
10699
10700// TODO(solanes): Since we can't use `if constexpr` until we enable C++17 we
10701// have to specialize the BigInt and Word32T cases. Since we can't partly
10702// specialize, we have to specialize all used combinations.
10703template <typename TIndex>
10704void CodeStubAssembler::StoreElementTypedArrayBigInt(TNode<RawPtrT> elements,
10705 ElementsKind kind,
10706 TNode<TIndex> index,
10707 TNode<BigInt> value) {
10708 static_assert(std::is_same<TIndex, UintPtrT>::value ||
10709 std::is_same<TIndex, IntPtrT>::value,
10710 "Only UintPtrT or IntPtrT indices is allowed");
10711 DCHECK(kind == BIGINT64_ELEMENTS || kind == BIGUINT64_ELEMENTS)((void) 0);
10712 TNode<IntPtrT> offset = ElementOffsetFromIndex(index, kind, 0);
10713 TVARIABLE(UintPtrT, var_low)TVariable<UintPtrT> var_low(this);
10714 // Only used on 32-bit platforms.
10715 TVARIABLE(UintPtrT, var_high)TVariable<UintPtrT> var_high(this);
10716 BigIntToRawBytes(value, &var_low, &var_high);
10717
10718 MachineRepresentation rep = WordT::kMachineRepresentation;
10719#if defined(V8_TARGET_BIG_ENDIAN)
10720 if (!Is64()) {
10721 StoreNoWriteBarrier(rep, elements, offset, var_high.value());
10722 StoreNoWriteBarrier(rep, elements,
10723 IntPtrAdd(offset, IntPtrConstant(kSystemPointerSize)),
10724 var_low.value());
10725 } else {
10726 StoreNoWriteBarrier(rep, elements, offset, var_low.value());
10727 }
10728#else
10729 StoreNoWriteBarrier(rep, elements, offset, var_low.value());
10730 if (!Is64()) {
10731 StoreNoWriteBarrier(rep, elements,
10732 IntPtrAdd(offset, IntPtrConstant(kSystemPointerSize)),
10733 var_high.value());
10734 }
10735#endif
10736}
10737
10738template <>
10739void CodeStubAssembler::StoreElementTypedArray(TNode<RawPtrT> elements,
10740 ElementsKind kind,
10741 TNode<UintPtrT> index,
10742 TNode<BigInt> value) {
10743 StoreElementTypedArrayBigInt(elements, kind, index, value);
10744}
10745
10746template <>
10747void CodeStubAssembler::StoreElementTypedArray(TNode<RawPtrT> elements,
10748 ElementsKind kind,
10749 TNode<IntPtrT> index,
10750 TNode<BigInt> value) {
10751 StoreElementTypedArrayBigInt(elements, kind, index, value);
10752}
10753
10754template <typename TIndex>
10755void CodeStubAssembler::StoreElementTypedArrayWord32(TNode<RawPtrT> elements,
10756 ElementsKind kind,
10757 TNode<TIndex> index,
10758 TNode<Word32T> value) {
10759 static_assert(std::is_same<TIndex, UintPtrT>::value ||
10760 std::is_same<TIndex, IntPtrT>::value,
10761 "Only UintPtrT or IntPtrT indices is allowed");
10762 DCHECK(IsTypedArrayElementsKind(kind))((void) 0);
10763 if (kind == UINT8_CLAMPED_ELEMENTS) {
10764 CSA_DCHECK(this, Word32Equal(value, Word32And(Int32Constant(0xFF), value)))((void)0);
10765 }
10766 TNode<IntPtrT> offset = ElementOffsetFromIndex(index, kind, 0);
10767 // TODO(cbruni): Add OOB check once typed.
10768 MachineRepresentation rep = ElementsKindToMachineRepresentation(kind);
10769 StoreNoWriteBarrier(rep, elements, offset, value);
10770}
10771
10772template <>
10773void CodeStubAssembler::StoreElementTypedArray(TNode<RawPtrT> elements,
10774 ElementsKind kind,
10775 TNode<UintPtrT> index,
10776 TNode<Word32T> value) {
10777 StoreElementTypedArrayWord32(elements, kind, index, value);
10778}
10779
10780template <>
10781void CodeStubAssembler::StoreElementTypedArray(TNode<RawPtrT> elements,
10782 ElementsKind kind,
10783 TNode<IntPtrT> index,
10784 TNode<Word32T> value) {
10785 StoreElementTypedArrayWord32(elements, kind, index, value);
10786}
10787
10788template <typename TArray, typename TIndex, typename TValue>
10789void CodeStubAssembler::StoreElementTypedArray(TNode<TArray> elements,
10790 ElementsKind kind,
10791 TNode<TIndex> index,
10792 TNode<TValue> value) {
10793 // TODO(v8:9708): Do we want to keep both IntPtrT and UintPtrT variants?
10794 static_assert(std::is_same<TIndex, Smi>::value ||
10795 std::is_same<TIndex, UintPtrT>::value ||
10796 std::is_same<TIndex, IntPtrT>::value,
10797 "Only Smi, UintPtrT or IntPtrT indices is allowed");
10798 static_assert(std::is_same<TArray, RawPtrT>::value ||
10799 std::is_same<TArray, FixedArrayBase>::value,
10800 "Only RawPtrT or FixedArrayBase elements are allowed");
10801 static_assert(std::is_same<TValue, Int32T>::value ||
10802 std::is_same<TValue, Float32T>::value ||
10803 std::is_same<TValue, Float64T>::value ||
10804 std::is_same<TValue, Object>::value,
10805 "Only Int32T, Float32T, Float64T or object value "
10806 "types are allowed");
10807 DCHECK(IsTypedArrayElementsKind(kind))((void) 0);
10808 TNode<IntPtrT> offset = ElementOffsetFromIndex(index, kind, 0);
10809 // TODO(cbruni): Add OOB check once typed.
10810 MachineRepresentation rep = ElementsKindToMachineRepresentation(kind);
10811 StoreNoWriteBarrier(rep, elements, offset, value);
10812}
10813
10814template <typename TIndex>
10815void CodeStubAssembler::StoreElement(TNode<FixedArrayBase> elements,
10816 ElementsKind kind, TNode<TIndex> index,
10817 TNode<Object> value) {
10818 static_assert(
10819 std::is_same<TIndex, Smi>::value || std::is_same<TIndex, IntPtrT>::value,
10820 "Only Smi or IntPtrT indices are allowed");
10821 DCHECK(!IsDoubleElementsKind(kind))((void) 0);
10822 if (IsTypedArrayElementsKind(kind)) {
10823 StoreElementTypedArray(elements, kind, index, value);
10824 } else if (IsSmiElementsKind(kind)) {
10825 TNode<Smi> smi_value = CAST(value)Cast(value);
10826 StoreFixedArrayElement(CAST(elements)Cast(elements), index, smi_value);
10827 } else {
10828 StoreFixedArrayElement(CAST(elements)Cast(elements), index, value);
10829 }
10830}
10831
10832template <typename TIndex>
10833void CodeStubAssembler::StoreElement(TNode<FixedArrayBase> elements,
10834 ElementsKind kind, TNode<TIndex> index,
10835 TNode<Float64T> value) {
10836 static_assert(
10837 std::is_same<TIndex, Smi>::value || std::is_same<TIndex, IntPtrT>::value,
10838 "Only Smi or IntPtrT indices are allowed");
10839 DCHECK(IsDoubleElementsKind(kind))((void) 0);
10840 StoreFixedDoubleArrayElement(CAST(elements)Cast(elements), index, value);
10841}
10842
10843template <typename TIndex, typename TValue>
10844void CodeStubAssembler::StoreElement(TNode<RawPtrT> elements, ElementsKind kind,
10845 TNode<TIndex> index, TNode<TValue> value) {
10846 static_assert(std::is_same<TIndex, Smi>::value ||
10847 std::is_same<TIndex, IntPtrT>::value ||
10848 std::is_same<TIndex, UintPtrT>::value,
10849 "Only Smi, IntPtrT or UintPtrT indices are allowed");
10850 static_assert(
10851 std::is_same<TValue, Int32T>::value ||
10852 std::is_same<TValue, Word32T>::value ||
10853 std::is_same<TValue, Float32T>::value ||
10854 std::is_same<TValue, Float64T>::value ||
10855 std::is_same<TValue, BigInt>::value,
10856 "Only Int32T, Word32T, Float32T, Float64T or BigInt value types "
10857 "are allowed");
10858
10859 DCHECK(IsTypedArrayElementsKind(kind))((void) 0);
10860 StoreElementTypedArray(elements, kind, index, value);
10861}
10862template V8_EXPORT_PRIVATE void CodeStubAssembler::StoreElement(TNode<RawPtrT>,
10863 ElementsKind,
10864 TNode<UintPtrT>,
10865 TNode<Int32T>);
10866template V8_EXPORT_PRIVATE void CodeStubAssembler::StoreElement(TNode<RawPtrT>,
10867 ElementsKind,
10868 TNode<UintPtrT>,
10869 TNode<Word32T>);
10870template V8_EXPORT_PRIVATE void CodeStubAssembler::StoreElement(
10871 TNode<RawPtrT>, ElementsKind, TNode<UintPtrT>, TNode<Float32T>);
10872template V8_EXPORT_PRIVATE void CodeStubAssembler::StoreElement(
10873 TNode<RawPtrT>, ElementsKind, TNode<UintPtrT>, TNode<Float64T>);
10874template V8_EXPORT_PRIVATE void CodeStubAssembler::StoreElement(TNode<RawPtrT>,
10875 ElementsKind,
10876 TNode<UintPtrT>,
10877 TNode<BigInt>);
10878
10879TNode<Uint8T> CodeStubAssembler::Int32ToUint8Clamped(
10880 TNode<Int32T> int32_value) {
10881 Label done(this);
10882 TNode<Int32T> int32_zero = Int32Constant(0);
10883 TNode<Int32T> int32_255 = Int32Constant(255);
10884 TVARIABLE(Word32T, var_value, int32_value)TVariable<Word32T> var_value(int32_value, this);
10885 GotoIf(Uint32LessThanOrEqual(int32_value, int32_255), &done);
10886 var_value = int32_zero;
10887 GotoIf(Int32LessThan(int32_value, int32_zero), &done);
10888 var_value = int32_255;
10889 Goto(&done);
10890 BIND(&done)Bind(&done);
10891 return UncheckedCast<Uint8T>(var_value.value());
10892}
10893
10894TNode<Uint8T> CodeStubAssembler::Float64ToUint8Clamped(
10895 TNode<Float64T> float64_value) {
10896 Label done(this);
10897 TVARIABLE(Word32T, var_value, Int32Constant(0))TVariable<Word32T> var_value(Int32Constant(0), this);
10898 GotoIf(Float64LessThanOrEqual(float64_value, Float64Constant(0.0)), &done);
10899 var_value = Int32Constant(255);
10900 GotoIf(Float64LessThanOrEqual(Float64Constant(255.0), float64_value), &done);
10901 {
10902 TNode<Float64T> rounded_value = Float64RoundToEven(float64_value);
10903 var_value = TruncateFloat64ToWord32(rounded_value);
10904 Goto(&done);
10905 }
10906 BIND(&done)Bind(&done);
10907 return UncheckedCast<Uint8T>(var_value.value());
10908}
10909
10910template <>
10911TNode<Word32T> CodeStubAssembler::PrepareValueForWriteToTypedArray<Word32T>(
10912 TNode<Object> input, ElementsKind elements_kind, TNode<Context> context) {
10913 DCHECK(IsTypedArrayElementsKind(elements_kind))((void) 0);
10914
10915 switch (elements_kind) {
10916 case UINT8_ELEMENTS:
10917 case INT8_ELEMENTS:
10918 case UINT16_ELEMENTS:
10919 case INT16_ELEMENTS:
10920 case UINT32_ELEMENTS:
10921 case INT32_ELEMENTS:
10922 case UINT8_CLAMPED_ELEMENTS:
10923 break;
10924 default:
10925 UNREACHABLE()V8_Fatal("unreachable code");
10926 }
10927
10928 TVARIABLE(Word32T, var_result)TVariable<Word32T> var_result(this);
10929 TVARIABLE(Object, var_input, input)TVariable<Object> var_input(input, this);
10930 Label done(this, &var_result), if_smi(this), if_heapnumber_or_oddball(this),
10931 convert(this), loop(this, &var_input);
10932 Goto(&loop);
10933 BIND(&loop)Bind(&loop);
10934 GotoIf(TaggedIsSmi(var_input.value()), &if_smi);
10935 // We can handle both HeapNumber and Oddball here, since Oddball has the
10936 // same layout as the HeapNumber for the HeapNumber::value field. This
10937 // way we can also properly optimize stores of oddballs to typed arrays.
10938 TNode<HeapObject> heap_object = CAST(var_input.value())Cast(var_input.value());
10939 GotoIf(IsHeapNumber(heap_object), &if_heapnumber_or_oddball);
10940 STATIC_ASSERT_FIELD_OFFSETS_EQUAL(HeapNumber::kValueOffset,static_assert(static_cast<int>(HeapNumber::kValueOffset
) == Oddball::kToNumberRawOffset, "static_cast<int>(HeapNumber::kValueOffset) == Oddball::kToNumberRawOffset"
)
10941 Oddball::kToNumberRawOffset)static_assert(static_cast<int>(HeapNumber::kValueOffset
) == Oddball::kToNumberRawOffset, "static_cast<int>(HeapNumber::kValueOffset) == Oddball::kToNumberRawOffset"
)
;
10942 Branch(HasInstanceType(heap_object, ODDBALL_TYPE), &if_heapnumber_or_oddball,
10943 &convert);
10944
10945 BIND(&if_heapnumber_or_oddball)Bind(&if_heapnumber_or_oddball);
10946 {
10947 TNode<Float64T> value =
10948 LoadObjectField<Float64T>(heap_object, HeapNumber::kValueOffset);
10949 if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
10950 var_result = Float64ToUint8Clamped(value);
10951 } else {
10952 var_result = TruncateFloat64ToWord32(value);
10953 }
10954 Goto(&done);
10955 }
10956
10957 BIND(&if_smi)Bind(&if_smi);
10958 {
10959 TNode<Int32T> value = SmiToInt32(CAST(var_input.value())Cast(var_input.value()));
10960 if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
10961 var_result = Int32ToUint8Clamped(value);
10962 } else {
10963 var_result = value;
10964 }
10965 Goto(&done);
10966 }
10967
10968 BIND(&convert)Bind(&convert);
10969 {
10970 var_input = CallBuiltin(Builtin::kNonNumberToNumber, context, input);
10971 Goto(&loop);
10972 }
10973
10974 BIND(&done)Bind(&done);
10975 return var_result.value();
10976}
10977
10978template <>
10979TNode<Float32T> CodeStubAssembler::PrepareValueForWriteToTypedArray<Float32T>(
10980 TNode<Object> input, ElementsKind elements_kind, TNode<Context> context) {
10981 DCHECK(IsTypedArrayElementsKind(elements_kind))((void) 0);
10982 CHECK_EQ(elements_kind, FLOAT32_ELEMENTS)do { bool _cmp = ::v8::base::CmpEQImpl< typename ::v8::base
::pass_value_or_ref<decltype(elements_kind)>::type, typename
::v8::base::pass_value_or_ref<decltype(FLOAT32_ELEMENTS)>
::type>((elements_kind), (FLOAT32_ELEMENTS)); do { if ((__builtin_expect
(!!(!(_cmp)), 0))) { V8_Fatal("Check failed: %s.", "elements_kind"
" " "==" " " "FLOAT32_ELEMENTS"); } } while (false); } while
(false)
;
10983
10984 TVARIABLE(Float32T, var_result)TVariable<Float32T> var_result(this);
10985 TVARIABLE(Object, var_input, input)TVariable<Object> var_input(input, this);
10986 Label done(this, &var_result), if_smi(this), if_heapnumber_or_oddball(this),
10987 convert(this), loop(this, &var_input);
10988 Goto(&loop);
10989 BIND(&loop)Bind(&loop);
10990 GotoIf(TaggedIsSmi(var_input.value()), &if_smi);
10991 // We can handle both HeapNumber and Oddball here, since Oddball has the
10992 // same layout as the HeapNumber for the HeapNumber::value field. This
10993 // way we can also properly optimize stores of oddballs to typed arrays.
10994 TNode<HeapObject> heap_object = CAST(var_input.value())Cast(var_input.value());
10995 GotoIf(IsHeapNumber(heap_object), &if_heapnumber_or_oddball);
10996 STATIC_ASSERT_FIELD_OFFSETS_EQUAL(HeapNumber::kValueOffset,static_assert(static_cast<int>(HeapNumber::kValueOffset
) == Oddball::kToNumberRawOffset, "static_cast<int>(HeapNumber::kValueOffset) == Oddball::kToNumberRawOffset"
)
10997 Oddball::kToNumberRawOffset)static_assert(static_cast<int>(HeapNumber::kValueOffset
) == Oddball::kToNumberRawOffset, "static_cast<int>(HeapNumber::kValueOffset) == Oddball::kToNumberRawOffset"
)
;
10998 Branch(HasInstanceType(heap_object, ODDBALL_TYPE), &if_heapnumber_or_oddball,
10999 &convert);
11000
11001 BIND(&if_heapnumber_or_oddball)Bind(&if_heapnumber_or_oddball);
11002 {
11003 TNode<Float64T> value =
11004 LoadObjectField<Float64T>(heap_object, HeapNumber::kValueOffset);
11005 var_result = TruncateFloat64ToFloat32(value);
11006 Goto(&done);
11007 }
11008
11009 BIND(&if_smi)Bind(&if_smi);
11010 {
11011 TNode<Int32T> value = SmiToInt32(CAST(var_input.value())Cast(var_input.value()));
11012 var_result = RoundInt32ToFloat32(value);
11013 Goto(&done);
11014 }
11015
11016 BIND(&convert)Bind(&convert);
11017 {
11018 var_input = CallBuiltin(Builtin::kNonNumberToNumber, context, input);
11019 Goto(&loop);
11020 }
11021
11022 BIND(&done)Bind(&done);
11023 return var_result.value();
11024}
11025
11026template <>
11027TNode<Float64T> CodeStubAssembler::PrepareValueForWriteToTypedArray<Float64T>(
11028 TNode<Object> input, ElementsKind elements_kind, TNode<Context> context) {
11029 DCHECK(IsTypedArrayElementsKind(elements_kind))((void) 0);
11030 CHECK_EQ(elements_kind, FLOAT64_ELEMENTS)do { bool _cmp = ::v8::base::CmpEQImpl< typename ::v8::base
::pass_value_or_ref<decltype(elements_kind)>::type, typename
::v8::base::pass_value_or_ref<decltype(FLOAT64_ELEMENTS)>
::type>((elements_kind), (FLOAT64_ELEMENTS)); do { if ((__builtin_expect
(!!(!(_cmp)), 0))) { V8_Fatal("Check failed: %s.", "elements_kind"
" " "==" " " "FLOAT64_ELEMENTS"); } } while (false); } while
(false)
;
11031
11032 TVARIABLE(Float64T, var_result)TVariable<Float64T> var_result(this);
11033 TVARIABLE(Object, var_input, input)TVariable<Object> var_input(input, this);
11034 Label done(this, &var_result), if_smi(this), if_heapnumber_or_oddball(this),
11035 convert(this), loop(this, &var_input);
11036 Goto(&loop);
11037 BIND(&loop)Bind(&loop);
11038 GotoIf(TaggedIsSmi(var_input.value()), &if_smi);
11039 // We can handle both HeapNumber and Oddball here, since Oddball has the
11040 // same layout as the HeapNumber for the HeapNumber::value field. This
11041 // way we can also properly optimize stores of oddballs to typed arrays.
11042 TNode<HeapObject> heap_object = CAST(var_input.value())Cast(var_input.value());
11043 GotoIf(IsHeapNumber(heap_object), &if_heapnumber_or_oddball);
11044 STATIC_ASSERT_FIELD_OFFSETS_EQUAL(HeapNumber::kValueOffset,static_assert(static_cast<int>(HeapNumber::kValueOffset
) == Oddball::kToNumberRawOffset, "static_cast<int>(HeapNumber::kValueOffset) == Oddball::kToNumberRawOffset"
)
11045 Oddball::kToNumberRawOffset)static_assert(static_cast<int>(HeapNumber::kValueOffset
) == Oddball::kToNumberRawOffset, "static_cast<int>(HeapNumber::kValueOffset) == Oddball::kToNumberRawOffset"
)
;
11046 Branch(HasInstanceType(heap_object, ODDBALL_TYPE), &if_heapnumber_or_oddball,
11047 &convert);
11048
11049 BIND(&if_heapnumber_or_oddball)Bind(&if_heapnumber_or_oddball);
11050 {
11051 var_result =
11052 LoadObjectField<Float64T>(heap_object, HeapNumber::kValueOffset);
11053 Goto(&done);
11054 }
11055
11056 BIND(&if_smi)Bind(&if_smi);
11057 {
11058 TNode<Int32T> value = SmiToInt32(CAST(var_input.value())Cast(var_input.value()));
11059 var_result = ChangeInt32ToFloat64(value);
11060 Goto(&done);
11061 }
11062
11063 BIND(&convert)Bind(&convert);
11064 {
11065 var_input = CallBuiltin(Builtin::kNonNumberToNumber, context, input);
11066 Goto(&loop);
11067 }
11068
11069 BIND(&done)Bind(&done);
11070 return var_result.value();
11071}
11072
11073template <>
11074TNode<BigInt> CodeStubAssembler::PrepareValueForWriteToTypedArray<BigInt>(
11075 TNode<Object> input, ElementsKind elements_kind, TNode<Context> context) {
11076 DCHECK(elements_kind == BIGINT64_ELEMENTS ||((void) 0)
11077 elements_kind == BIGUINT64_ELEMENTS)((void) 0);
11078 return ToBigInt(context, input);
11079}
11080
11081void CodeStubAssembler::BigIntToRawBytes(TNode<BigInt> bigint,
11082 TVariable<UintPtrT>* var_low,
11083 TVariable<UintPtrT>* var_high) {
11084 Label done(this);
11085 *var_low = Unsigned(IntPtrConstant(0));
11086 *var_high = Unsigned(IntPtrConstant(0));
11087 TNode<Word32T> bitfield = LoadBigIntBitfield(bigint);
11088 TNode<Uint32T> length = DecodeWord32<BigIntBase::LengthBits>(bitfield);
11089 TNode<Uint32T> sign = DecodeWord32<BigIntBase::SignBits>(bitfield);
11090 GotoIf(Word32Equal(length, Int32Constant(0)), &done);
11091 *var_low = LoadBigIntDigit(bigint, 0);
11092 if (!Is64()) {
11093 Label load_done(this);
11094 GotoIf(Word32Equal(length, Int32Constant(1)), &load_done);
11095 *var_high = LoadBigIntDigit(bigint, 1);
11096 Goto(&load_done);
11097 BIND(&load_done)Bind(&load_done);
11098 }
11099 GotoIf(Word32Equal(sign, Int32Constant(0)), &done);
11100 // Negative value. Simulate two's complement.
11101 if (!Is64()) {
11102 *var_high = Unsigned(IntPtrSub(IntPtrConstant(0), var_high->value()));
11103 Label no_carry(this);
11104 GotoIf(IntPtrEqual(var_low->value(), IntPtrConstant(0)), &no_carry);
11105 *var_high = Unsigned(IntPtrSub(var_high->value(), IntPtrConstant(1)));
11106 Goto(&no_carry);
11107 BIND(&no_carry)Bind(&no_carry);
11108 }
11109 *var_low = Unsigned(IntPtrSub(IntPtrConstant(0), var_low->value()));
11110 Goto(&done);
11111 BIND(&done)Bind(&done);
11112}
11113
11114template <>
11115void CodeStubAssembler::EmitElementStoreTypedArrayUpdateValue(
11116 TNode<Object> value, ElementsKind elements_kind,
11117 TNode<Word32T> converted_value, TVariable<Object>* maybe_converted_value) {
11118 switch (elements_kind) {
11119 case UINT8_ELEMENTS:
11120 case INT8_ELEMENTS:
11121 case UINT16_ELEMENTS:
11122 case INT16_ELEMENTS:
11123 case UINT8_CLAMPED_ELEMENTS:
11124 *maybe_converted_value =
11125 SmiFromInt32(UncheckedCast<Int32T>(converted_value));
11126 break;
11127 case UINT32_ELEMENTS:
11128 *maybe_converted_value =
11129 ChangeUint32ToTagged(UncheckedCast<Uint32T>(converted_value));
11130 break;
11131 case INT32_ELEMENTS:
11132 *maybe_converted_value =
11133 ChangeInt32ToTagged(UncheckedCast<Int32T>(converted_value));
11134 break;
11135 default:
11136 UNREACHABLE()V8_Fatal("unreachable code");
11137 }
11138}
11139
11140template <>
11141void CodeStubAssembler::EmitElementStoreTypedArrayUpdateValue(
11142 TNode<Object> value, ElementsKind elements_kind,
11143 TNode<Float32T> converted_value, TVariable<Object>* maybe_converted_value) {
11144 Label dont_allocate_heap_number(this), end(this);
11145 GotoIf(TaggedIsSmi(value), &dont_allocate_heap_number);
11146 GotoIf(IsHeapNumber(CAST(value)Cast(value)), &dont_allocate_heap_number);
11147 {
11148 *maybe_converted_value =
11149 AllocateHeapNumberWithValue(ChangeFloat32ToFloat64(converted_value));
11150 Goto(&end);
11151 }
11152 BIND(&dont_allocate_heap_number)Bind(&dont_allocate_heap_number);
11153 {
11154 *maybe_converted_value = value;
11155 Goto(&end);
11156 }
11157 BIND(&end)Bind(&end);
11158}
11159
11160template <>
11161void CodeStubAssembler::EmitElementStoreTypedArrayUpdateValue(
11162 TNode<Object> value, ElementsKind elements_kind,
11163 TNode<Float64T> converted_value, TVariable<Object>* maybe_converted_value) {
11164 Label dont_allocate_heap_number(this), end(this);
11165 GotoIf(TaggedIsSmi(value), &dont_allocate_heap_number);
11166 GotoIf(IsHeapNumber(CAST(value)Cast(value)), &dont_allocate_heap_number);
11167 {
11168 *maybe_converted_value = AllocateHeapNumberWithValue(converted_value);
11169 Goto(&end);
11170 }
11171 BIND(&dont_allocate_heap_number)Bind(&dont_allocate_heap_number);
11172 {
11173 *maybe_converted_value = value;
11174 Goto(&end);
11175 }
11176 BIND(&end)Bind(&end);
11177}
11178
11179template <>
11180void CodeStubAssembler::EmitElementStoreTypedArrayUpdateValue(
11181 TNode<Object> value, ElementsKind elements_kind,
11182 TNode<BigInt> converted_value, TVariable<Object>* maybe_converted_value) {
11183 *maybe_converted_value = converted_value;
11184}
11185
11186template <typename TValue>
11187void CodeStubAssembler::EmitElementStoreTypedArray(
11188 TNode<JSTypedArray> typed_array, TNode<IntPtrT> key, TNode<Object> value,
11189 ElementsKind elements_kind, KeyedAccessStoreMode store_mode, Label* bailout,
11190 TNode<Context> context, TVariable<Object>* maybe_converted_value) {
11191 Label done(this), update_value_and_bailout(this, Label::kDeferred);
11192
11193 bool is_rab_gsab = false;
11194 if (IsRabGsabTypedArrayElementsKind(elements_kind)) {
11195 is_rab_gsab = true;
11196 // For the rest of the function, use the corresponding non-RAB/GSAB
11197 // ElementsKind.
11198 elements_kind = GetCorrespondingNonRabGsabElementsKind(elements_kind);
11199 }
11200
11201 TNode<TValue> converted_value =
11202 PrepareValueForWriteToTypedArray<TValue>(value, elements_kind, context);
11203
11204 // There must be no allocations between the buffer load and
11205 // and the actual store to backing store, because GC may decide that
11206 // the buffer is not alive or move the elements.
11207 // TODO(ishell): introduce DisallowGarbageCollectionCode scope here.
11208
11209 // Check if buffer has been detached. (For RAB / GSAB this is part of loading
11210 // the length, so no additional check is needed.)
11211 TNode<JSArrayBuffer> buffer = LoadJSArrayBufferViewBuffer(typed_array);
11212 if (!is_rab_gsab) {
11213 GotoIf(IsDetachedBuffer(buffer), &update_value_and_bailout);
11214 }
11215
11216 // Bounds check.
11217 TNode<UintPtrT> length;
11218 if (is_rab_gsab) {
11219 length = LoadVariableLengthJSTypedArrayLength(
11220 typed_array, buffer,
11221 store_mode == STORE_IGNORE_OUT_OF_BOUNDS ? &done
11222 : &update_value_and_bailout);
11223 } else {
11224 length = LoadJSTypedArrayLength(typed_array);
11225 }
11226
11227 if (store_mode == STORE_IGNORE_OUT_OF_BOUNDS) {
11228 // Skip the store if we write beyond the length or
11229 // to a property with a negative integer index.
11230 GotoIfNot(UintPtrLessThan(key, length), &done);
11231 } else {
11232 DCHECK_EQ(store_mode, STANDARD_STORE)((void) 0);
11233 GotoIfNot(UintPtrLessThan(key, length), &update_value_and_bailout);
11234 }
11235
11236 TNode<RawPtrT> data_ptr = LoadJSTypedArrayDataPtr(typed_array);
11237 StoreElement(data_ptr, elements_kind, key, converted_value);
11238 Goto(&done);
11239
11240 if (!is_rab_gsab || store_mode != STORE_IGNORE_OUT_OF_BOUNDS) {
11241 BIND(&update_value_and_bailout)Bind(&update_value_and_bailout);
11242 // We already prepared the incoming value for storing into a typed array.
11243 // This might involve calling ToNumber in some cases. We shouldn't call
11244 // ToNumber again in the runtime so pass the converted value to the runtime.
11245 // The prepared value is an untagged value. Convert it to a tagged value
11246 // to pass it to runtime. It is not possible to do the detached buffer check
11247 // before we prepare the value, since ToNumber can detach the ArrayBuffer.
11248 // The spec specifies the order of these operations.
11249 if (maybe_converted_value != nullptr) {
11250 EmitElementStoreTypedArrayUpdateValue(
11251 value, elements_kind, converted_value, maybe_converted_value);
11252 }
11253 Goto(bailout);
11254 }
11255
11256 BIND(&done)Bind(&done);
11257}
11258
11259void CodeStubAssembler::EmitElementStore(
11260 TNode<JSObject> object, TNode<Object> key, TNode<Object> value,
11261 ElementsKind elements_kind, KeyedAccessStoreMode store_mode, Label* bailout,
11262 TNode<Context> context, TVariable<Object>* maybe_converted_value) {
11263 CSA_DCHECK(this, Word32BinaryNot(IsJSProxy(object)))((void)0);
11264
11265 TNode<FixedArrayBase> elements = LoadElements(object);
11266 if (!(IsSmiOrObjectElementsKind(elements_kind) ||
11267 IsSealedElementsKind(elements_kind) ||
11268 IsNonextensibleElementsKind(elements_kind))) {
11269 CSA_DCHECK(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))))((void)0);
11270 } else if (!IsCOWHandlingStoreMode(store_mode)) {
11271 GotoIf(IsFixedCOWArrayMap(LoadMap(elements)), bailout);
11272 }
11273
11274 // TODO(ishell): introduce TryToIntPtrOrSmi() and use BInt.
11275 TNode<IntPtrT> intptr_key = TryToIntptr(key, bailout);
11276
11277 // TODO(rmcilroy): TNodify the converted value once this funciton and
11278 // StoreElement are templated based on the type elements_kind type.
11279 if (IsTypedArrayOrRabGsabTypedArrayElementsKind(elements_kind)) {
11280 TNode<JSTypedArray> typed_array = CAST(object)Cast(object);
11281 switch (elements_kind) {
11282 case UINT8_ELEMENTS:
11283 case INT8_ELEMENTS:
11284 case UINT16_ELEMENTS:
11285 case INT16_ELEMENTS:
11286 case UINT32_ELEMENTS:
11287 case INT32_ELEMENTS:
11288 case UINT8_CLAMPED_ELEMENTS:
11289 case RAB_GSAB_UINT8_ELEMENTS:
11290 case RAB_GSAB_INT8_ELEMENTS:
11291 case RAB_GSAB_UINT16_ELEMENTS:
11292 case RAB_GSAB_INT16_ELEMENTS:
11293 case RAB_GSAB_UINT32_ELEMENTS:
11294 case RAB_GSAB_INT32_ELEMENTS:
11295 case RAB_GSAB_UINT8_CLAMPED_ELEMENTS:
11296 EmitElementStoreTypedArray<Word32T>(typed_array, intptr_key, value,
11297 elements_kind, store_mode, bailout,
11298 context, maybe_converted_value);
11299 break;
11300 case FLOAT32_ELEMENTS:
11301 case RAB_GSAB_FLOAT32_ELEMENTS:
11302 EmitElementStoreTypedArray<Float32T>(typed_array, intptr_key, value,
11303 elements_kind, store_mode, bailout,
11304 context, maybe_converted_value);
11305 break;
11306 case FLOAT64_ELEMENTS:
11307 case RAB_GSAB_FLOAT64_ELEMENTS:
11308 EmitElementStoreTypedArray<Float64T>(typed_array, intptr_key, value,
11309 elements_kind, store_mode, bailout,
11310 context, maybe_converted_value);
11311 break;
11312 case BIGINT64_ELEMENTS:
11313 case BIGUINT64_ELEMENTS:
11314 case RAB_GSAB_BIGINT64_ELEMENTS:
11315 case RAB_GSAB_BIGUINT64_ELEMENTS:
11316 EmitElementStoreTypedArray<BigInt>(typed_array, intptr_key, value,
11317 elements_kind, store_mode, bailout,
11318 context, maybe_converted_value);
11319 break;
11320 default:
11321 UNREACHABLE()V8_Fatal("unreachable code");
11322 }
11323 return;
11324 }
11325 DCHECK(IsFastElementsKind(elements_kind) ||((void) 0)
11326 IsSealedElementsKind(elements_kind) ||((void) 0)
11327 IsNonextensibleElementsKind(elements_kind))((void) 0);
11328
11329 // In case value is stored into a fast smi array, assure that the value is
11330 // a smi before manipulating the backing store. Otherwise the backing store
11331 // may be left in an invalid state.
11332 base::Optional<TNode<Float64T>> float_value;
11333 if (IsSmiElementsKind(elements_kind)) {
11334 GotoIfNot(TaggedIsSmi(value), bailout);
11335 } else if (IsDoubleElementsKind(elements_kind)) {
11336 float_value = TryTaggedToFloat64(value, bailout);
11337 }
11338
11339 TNode<Smi> smi_length = Select<Smi>(
11340 IsJSArray(object),
11341 [=]() {
11342 // This is casting Number -> Smi which may not actually be safe.
11343 return CAST(LoadJSArrayLength(CAST(object)))Cast(LoadJSArrayLength(Cast(object)));
11344 },
11345 [=]() { return LoadFixedArrayBaseLength(elements); });
11346
11347 TNode<UintPtrT> length = Unsigned(SmiUntag(smi_length));
11348 if (IsGrowStoreMode(store_mode) &&
11349 !(IsSealedElementsKind(elements_kind) ||
11350 IsNonextensibleElementsKind(elements_kind))) {
11351 elements = CheckForCapacityGrow(object, elements, elements_kind, length,
11352 intptr_key, bailout);
11353 } else {
11354 GotoIfNot(UintPtrLessThan(Unsigned(intptr_key), length), bailout);
11355 }
11356
11357 // Cannot store to a hole in holey sealed elements so bailout.
11358 if (elements_kind == HOLEY_SEALED_ELEMENTS ||
11359 elements_kind == HOLEY_NONEXTENSIBLE_ELEMENTS) {
11360 TNode<Object> target_value =
11361 LoadFixedArrayElement(CAST(elements)Cast(elements), intptr_key);
11362 GotoIf(IsTheHole(target_value), bailout);
11363 }
11364
11365 // If we didn't grow {elements}, it might still be COW, in which case we
11366 // copy it now.
11367 if (!(IsSmiOrObjectElementsKind(elements_kind) ||
11368 IsSealedElementsKind(elements_kind) ||
11369 IsNonextensibleElementsKind(elements_kind))) {
11370 CSA_DCHECK(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))))((void)0);
11371 } else if (IsCOWHandlingStoreMode(store_mode)) {
11372 elements = CopyElementsOnWrite(object, elements, elements_kind,
11373 Signed(length), bailout);
11374 }
11375
11376 CSA_DCHECK(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))))((void)0);
11377 if (float_value) {
11378 StoreElement(elements, elements_kind, intptr_key, float_value.value());
11379 } else {
11380 StoreElement(elements, elements_kind, intptr_key, value);
11381 }
11382}
11383
11384TNode<FixedArrayBase> CodeStubAssembler::CheckForCapacityGrow(
11385 TNode<JSObject> object, TNode<FixedArrayBase> elements, ElementsKind kind,
11386 TNode<UintPtrT> length, TNode<IntPtrT> key, Label* bailout) {
11387 DCHECK(IsFastElementsKind(kind))((void) 0);
11388 TVARIABLE(FixedArrayBase, checked_elements)TVariable<FixedArrayBase> checked_elements(this);
11389 Label grow_case(this), no_grow_case(this), done(this),
11390 grow_bailout(this, Label::kDeferred);
11391
11392 TNode<BoolT> condition;
11393 if (IsHoleyElementsKind(kind)) {
11394 condition = UintPtrGreaterThanOrEqual(key, length);
11395 } else {
11396 // We don't support growing here unless the value is being appended.
11397 condition = WordEqual(key, length);
11398 }
11399 Branch(condition, &grow_case, &no_grow_case);
11400
11401 BIND(&grow_case)Bind(&grow_case);
11402 {
11403 TNode<IntPtrT> current_capacity =
11404 SmiUntag(LoadFixedArrayBaseLength(elements));
11405 checked_elements = elements;
11406 Label fits_capacity(this);
11407 // If key is negative, we will notice in Runtime::kGrowArrayElements.
11408 GotoIf(UintPtrLessThan(key, current_capacity), &fits_capacity);
11409
11410 {
11411 TNode<FixedArrayBase> new_elements = TryGrowElementsCapacity(
11412 object, elements, kind, key, current_capacity, &grow_bailout);
11413 checked_elements = new_elements;
11414 Goto(&fits_capacity);
11415 }
11416
11417 BIND(&grow_bailout)Bind(&grow_bailout);
11418 {
11419 GotoIf(IntPtrLessThan(key, IntPtrConstant(0)), bailout);
11420 TNode<Number> tagged_key = ChangeUintPtrToTagged(Unsigned(key));
11421 TNode<Object> maybe_elements = CallRuntime(
11422 Runtime::kGrowArrayElements, NoContextConstant(), object, tagged_key);
11423 GotoIf(TaggedIsSmi(maybe_elements), bailout);
11424 TNode<FixedArrayBase> new_elements = CAST(maybe_elements)Cast(maybe_elements);
11425 CSA_DCHECK(this, IsFixedArrayWithKind(new_elements, kind))((void)0);
11426 checked_elements = new_elements;
11427 Goto(&fits_capacity);
11428 }
11429
11430 BIND(&fits_capacity)Bind(&fits_capacity);
11431 GotoIfNot(IsJSArray(object), &done);
11432
11433 TNode<IntPtrT> new_length = IntPtrAdd(key, IntPtrConstant(1));
11434 StoreObjectFieldNoWriteBarrier(object, JSArray::kLengthOffset,
11435 SmiTag(new_length));
11436 Goto(&done);
11437 }
11438
11439 BIND(&no_grow_case)Bind(&no_grow_case);
11440 {
11441 GotoIfNot(UintPtrLessThan(key, length), bailout);
11442 checked_elements = elements;
11443 Goto(&done);
11444 }
11445
11446 BIND(&done)Bind(&done);
11447 return checked_elements.value();
11448}
11449
11450TNode<FixedArrayBase> CodeStubAssembler::CopyElementsOnWrite(
11451 TNode<HeapObject> object, TNode<FixedArrayBase> elements, ElementsKind kind,
11452 TNode<IntPtrT> length, Label* bailout) {
11453 TVARIABLE(FixedArrayBase, new_elements_var, elements)TVariable<FixedArrayBase> new_elements_var(elements, this
)
;
11454 Label done(this);
11455
11456 GotoIfNot(IsFixedCOWArrayMap(LoadMap(elements)), &done);
11457 {
11458 TNode<IntPtrT> capacity = SmiUntag(LoadFixedArrayBaseLength(elements));
11459 TNode<FixedArrayBase> new_elements = GrowElementsCapacity(
11460 object, elements, kind, kind, length, capacity, bailout);
11461 new_elements_var = new_elements;
11462 Goto(&done);
11463 }
11464
11465 BIND(&done)Bind(&done);
11466 return new_elements_var.value();
11467}
11468
11469void CodeStubAssembler::TransitionElementsKind(TNode<JSObject> object,
11470 TNode<Map> map,
11471 ElementsKind from_kind,
11472 ElementsKind to_kind,
11473 Label* bailout) {
11474 DCHECK(!IsHoleyElementsKind(from_kind) || IsHoleyElementsKind(to_kind))((void) 0);
11475 if (AllocationSite::ShouldTrack(from_kind, to_kind)) {
11476 TrapAllocationMemento(object, bailout);
11477 }
11478
11479 if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
11480 Comment("Non-simple map transition");
11481 TNode<FixedArrayBase> elements = LoadElements(object);
11482
11483 Label done(this);
11484 GotoIf(TaggedEqual(elements, EmptyFixedArrayConstant()), &done);
11485
11486 // TODO(ishell): Use BInt for elements_length and array_length.
11487 TNode<IntPtrT> elements_length =
11488 SmiUntag(LoadFixedArrayBaseLength(elements));
11489 TNode<IntPtrT> array_length = Select<IntPtrT>(
11490 IsJSArray(object),
11491 [=]() {
11492 CSA_DCHECK(this, IsFastElementsKind(LoadElementsKind(object)))((void)0);
11493 return SmiUntag(LoadFastJSArrayLength(CAST(object)Cast(object)));
11494 },
11495 [=]() { return elements_length; });
11496
11497 CSA_DCHECK(this, WordNotEqual(elements_length, IntPtrConstant(0)))((void)0);
11498
11499 GrowElementsCapacity(object, elements, from_kind, to_kind, array_length,
11500 elements_length, bailout);
11501 Goto(&done);
11502 BIND(&done)Bind(&done);
11503 }
11504
11505 StoreMap(object, map);
11506}
11507
11508void CodeStubAssembler::TrapAllocationMemento(TNode<JSObject> object,
11509 Label* memento_found) {
11510 DCHECK(V8_ALLOCATION_SITE_TRACKING_BOOL)((void) 0);
11511 Comment("[ TrapAllocationMemento");
11512 Label no_memento_found(this);
11513 Label top_check(this), map_check(this);
11514
11515 TNode<ExternalReference> new_space_top_address = ExternalConstant(
11516 ExternalReference::new_space_allocation_top_address(isolate()));
11517 const int kMementoMapOffset = JSArray::kHeaderSize;
11518 const int kMementoLastWordOffset =
11519 kMementoMapOffset + AllocationMemento::kSize - kTaggedSize;
11520
11521 // Bail out if the object is not in new space.
11522 TNode<IntPtrT> object_word = BitcastTaggedToWord(object);
11523 // TODO(v8:11641): Skip TrapAllocationMemento when allocation-site
11524 // tracking is disabled.
11525 TNode<IntPtrT> object_page = PageFromAddress(object_word);
11526 {
11527 TNode<IntPtrT> page_flags =
11528 Load<IntPtrT>(object_page, IntPtrConstant(Page::kFlagsOffset));
11529 GotoIf(WordEqual(
11530 WordAnd(page_flags,
11531 IntPtrConstant(MemoryChunk::kIsInYoungGenerationMask)),
11532 IntPtrConstant(0)),
11533 &no_memento_found);
11534 // TODO(v8:11799): Support allocation memento for a large object by
11535 // allocating additional word for the memento after the large object.
11536 GotoIf(WordNotEqual(WordAnd(page_flags,
11537 IntPtrConstant(MemoryChunk::kIsLargePageMask)),
11538 IntPtrConstant(0)),
11539 &no_memento_found);
11540 }
11541
11542 TNode<IntPtrT> memento_last_word = IntPtrAdd(
11543 object_word, IntPtrConstant(kMementoLastWordOffset - kHeapObjectTag));
11544 TNode<IntPtrT> memento_last_word_page = PageFromAddress(memento_last_word);
11545
11546 TNode<IntPtrT> new_space_top = Load<IntPtrT>(new_space_top_address);
11547 TNode<IntPtrT> new_space_top_page = PageFromAddress(new_space_top);
11548
11549 // If the object is in new space, we need to check whether respective
11550 // potential memento object is on the same page as the current top.
11551 GotoIf(WordEqual(memento_last_word_page, new_space_top_page), &top_check);
11552
11553 // The object is on a different page than allocation top. Bail out if the
11554 // object sits on the page boundary as no memento can follow and we cannot
11555 // touch the memory following it.
11556 Branch(WordEqual(object_page, memento_last_word_page), &map_check,
11557 &no_memento_found);
11558
11559 // If top is on the same page as the current object, we need to check whether
11560 // we are below top.
11561 BIND(&top_check)Bind(&top_check);
11562 {
11563 Branch(UintPtrGreaterThanOrEqual(memento_last_word, new_space_top),
11564 &no_memento_found, &map_check);
11565 }
11566
11567 // Memento map check.
11568 BIND(&map_check)Bind(&map_check);
11569 {
11570 TNode<AnyTaggedT> maybe_mapword =
11571 LoadObjectField(object, kMementoMapOffset);
11572 TNode<AnyTaggedT> memento_mapword =
11573 LoadRootMapWord(RootIndex::kAllocationMementoMap);
11574 Branch(TaggedEqual(maybe_mapword, memento_mapword), memento_found,
11575 &no_memento_found);
11576 }
11577 BIND(&no_memento_found)Bind(&no_memento_found);
11578 Comment("] TrapAllocationMemento");
11579}
11580
11581TNode<IntPtrT> CodeStubAssembler::PageFromAddress(TNode<IntPtrT> address) {
11582 DCHECK(!V8_ENABLE_THIRD_PARTY_HEAP_BOOL)((void) 0);
11583 return WordAnd(address, IntPtrConstant(~kPageAlignmentMask));
11584}
11585
11586TNode<AllocationSite> CodeStubAssembler::CreateAllocationSiteInFeedbackVector(
11587 TNode<FeedbackVector> feedback_vector, TNode<UintPtrT> slot) {
11588 TNode<IntPtrT> size = IntPtrConstant(AllocationSite::kSizeWithWeakNext);
11589 TNode<HeapObject> site = Allocate(size, AllocationFlag::kPretenured);
11590 StoreMapNoWriteBarrier(site, RootIndex::kAllocationSiteWithWeakNextMap);
11591 // Should match AllocationSite::Initialize.
11592 TNode<WordT> field = UpdateWord<AllocationSite::ElementsKindBits>(
11593 IntPtrConstant(0), UintPtrConstant(GetInitialFastElementsKind()));
11594 StoreObjectFieldNoWriteBarrier(
11595 site, AllocationSite::kTransitionInfoOrBoilerplateOffset,
11596 SmiTag(Signed(field)));
11597
11598 // Unlike literals, constructed arrays don't have nested sites
11599 TNode<Smi> zero = SmiConstant(0);
11600 StoreObjectFieldNoWriteBarrier(site, AllocationSite::kNestedSiteOffset, zero);
11601
11602 // Pretenuring calculation field.
11603 StoreObjectFieldNoWriteBarrier(site, AllocationSite::kPretenureDataOffset,
11604 Int32Constant(0));
11605
11606 // Pretenuring memento creation count field.
11607 StoreObjectFieldNoWriteBarrier(
11608 site, AllocationSite::kPretenureCreateCountOffset, Int32Constant(0));
11609
11610 // Store an empty fixed array for the code dependency.
11611 StoreObjectFieldRoot(site, AllocationSite::kDependentCodeOffset,
11612 DependentCode::kEmptyDependentCode);
11613
11614 // Link the object to the allocation site list
11615 TNode<ExternalReference> site_list = ExternalConstant(
11616 ExternalReference::allocation_sites_list_address(isolate()));
11617 TNode<Object> next_site =
11618 LoadBufferObject(ReinterpretCast<RawPtrT>(site_list), 0);
11619
11620 // TODO(mvstanton): This is a store to a weak pointer, which we may want to
11621 // mark as such in order to skip the write barrier, once we have a unified
11622 // system for weakness. For now we decided to keep it like this because having
11623 // an initial write barrier backed store makes this pointer strong until the
11624 // next GC, and allocation sites are designed to survive several GCs anyway.
11625 StoreObjectField(site, AllocationSite::kWeakNextOffset, next_site);
11626 StoreFullTaggedNoWriteBarrier(site_list, site);
11627
11628 StoreFeedbackVectorSlot(feedback_vector, slot, site);
11629 return CAST(site)Cast(site);
11630}
11631
11632TNode<MaybeObject> CodeStubAssembler::StoreWeakReferenceInFeedbackVector(
11633 TNode<FeedbackVector> feedback_vector, TNode<UintPtrT> slot,
11634 TNode<HeapObject> value, int additional_offset) {
11635 TNode<MaybeObject> weak_value = MakeWeak(value);
11636 StoreFeedbackVectorSlot(feedback_vector, slot, weak_value,
11637 UPDATE_WRITE_BARRIER, additional_offset);
11638 return weak_value;
11639}
11640
11641TNode<BoolT> CodeStubAssembler::HasBoilerplate(
11642 TNode<Object> maybe_literal_site) {
11643 return TaggedIsNotSmi(maybe_literal_site);
11644}
11645
11646TNode<Smi> CodeStubAssembler::LoadTransitionInfo(
11647 TNode<AllocationSite> allocation_site) {
11648 TNode<Smi> transition_info = CAST(LoadObjectField(Cast(LoadObjectField( allocation_site, AllocationSite::kTransitionInfoOrBoilerplateOffset
))
11649 allocation_site, AllocationSite::kTransitionInfoOrBoilerplateOffset))Cast(LoadObjectField( allocation_site, AllocationSite::kTransitionInfoOrBoilerplateOffset
))
;
11650 return transition_info;
11651}
11652
11653TNode<JSObject> CodeStubAssembler::LoadBoilerplate(
11654 TNode<AllocationSite> allocation_site) {
11655 TNode<JSObject> boilerplate = CAST(LoadObjectField(Cast(LoadObjectField( allocation_site, AllocationSite::kTransitionInfoOrBoilerplateOffset
))
11656 allocation_site, AllocationSite::kTransitionInfoOrBoilerplateOffset))Cast(LoadObjectField( allocation_site, AllocationSite::kTransitionInfoOrBoilerplateOffset
))
;
11657 return boilerplate;
11658}
11659
11660TNode<Int32T> CodeStubAssembler::LoadElementsKind(
11661 TNode<AllocationSite> allocation_site) {
11662 TNode<Smi> transition_info = LoadTransitionInfo(allocation_site);
11663 TNode<Int32T> elements_kind =
11664 Signed(DecodeWord32<AllocationSite::ElementsKindBits>(
11665 SmiToInt32(transition_info)));
11666 CSA_DCHECK(this, IsFastElementsKind(elements_kind))((void)0);
11667 return elements_kind;
11668}
11669
11670template <typename TIndex>
11671TNode<TIndex> CodeStubAssembler::BuildFastLoop(const VariableList& vars,
11672 TNode<TIndex> start_index,
11673 TNode<TIndex> end_index,
11674 const FastLoopBody<TIndex>& body,
11675 int increment,
11676 IndexAdvanceMode advance_mode) {
11677 TVARIABLE(TIndex, var, start_index)TVariable<TIndex> var(start_index, this);
11678 VariableList vars_copy(vars.begin(), vars.end(), zone());
11679 vars_copy.push_back(&var);
11680 Label loop(this, vars_copy);
11681 Label after_loop(this);
11682 // Introduce an explicit second check of the termination condition before the
11683 // loop that helps turbofan generate better code. If there's only a single
11684 // check, then the CodeStubAssembler forces it to be at the beginning of the
11685 // loop requiring a backwards branch at the end of the loop (it's not possible
11686 // to force the loop header check at the end of the loop and branch forward to
11687 // it from the pre-header). The extra branch is slower in the case that the
11688 // loop actually iterates.
11689 TNode<BoolT> first_check = IntPtrOrSmiEqual(var.value(), end_index);
11690 int32_t first_check_val;
11691 if (TryToInt32Constant(first_check, &first_check_val)) {
11692 if (first_check_val) return var.value();
11693 Goto(&loop);
11694 } else {
11695 Branch(first_check, &after_loop, &loop);
11696 }
11697
11698 BIND(&loop)Bind(&loop);
11699 {
11700 if (advance_mode == IndexAdvanceMode::kPre) {
11701 Increment(&var, increment);
11702 }
11703 body(var.value());
11704 if (advance_mode == IndexAdvanceMode::kPost) {
11705 Increment(&var, increment);
11706 }
11707 Branch(IntPtrOrSmiNotEqual(var.value(), end_index), &loop, &after_loop);
11708 }
11709 BIND(&after_loop)Bind(&after_loop);
11710 return var.value();
11711}
11712
11713// Instantiate BuildFastLoop for IntPtrT and UintPtrT.
11714template V8_EXPORT_PRIVATE TNode<IntPtrT>
11715CodeStubAssembler::BuildFastLoop<IntPtrT>(const VariableList& vars,
11716 TNode<IntPtrT> start_index,
11717 TNode<IntPtrT> end_index,
11718 const FastLoopBody<IntPtrT>& body,
11719 int increment,
11720 IndexAdvanceMode advance_mode);
11721template V8_EXPORT_PRIVATE TNode<UintPtrT>
11722CodeStubAssembler::BuildFastLoop<UintPtrT>(const VariableList& vars,
11723 TNode<UintPtrT> start_index,
11724 TNode<UintPtrT> end_index,
11725 const FastLoopBody<UintPtrT>& body,
11726 int increment,
11727 IndexAdvanceMode advance_mode);
11728
11729template <typename TIndex>
11730void CodeStubAssembler::BuildFastArrayForEach(
11731 TNode<UnionT<UnionT<FixedArray, PropertyArray>, HeapObject>> array,
11732 ElementsKind kind, TNode<TIndex> first_element_inclusive,
11733 TNode<TIndex> last_element_exclusive, const FastArrayForEachBody& body,
11734 ForEachDirection direction) {
11735 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize)static_assert(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize
, "FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize")
;
11736 CSA_SLOW_DCHECK(this, Word32Or(IsFixedArrayWithKind(array, kind),((void)0)
11737 IsPropertyArray(array)))((void)0);
11738
11739 intptr_t first_val;
11740 bool constant_first =
11741 TryToIntPtrConstant(first_element_inclusive, &first_val);
11742 intptr_t last_val;
11743 bool constent_last = TryToIntPtrConstant(last_element_exclusive, &last_val);
11744 if (constant_first && constent_last) {
11745 intptr_t delta = last_val - first_val;
11746 DCHECK_GE(delta, 0)((void) 0);
11747 if (delta <= kElementLoopUnrollThreshold) {
11748 if (direction == ForEachDirection::kForward) {
11749 for (intptr_t i = first_val; i < last_val; ++i) {
11750 TNode<IntPtrT> index = IntPtrConstant(i);
11751 TNode<IntPtrT> offset = ElementOffsetFromIndex(
11752 index, kind, FixedArray::kHeaderSize - kHeapObjectTag);
11753 body(array, offset);
11754 }
11755 } else {
11756 for (intptr_t i = last_val - 1; i >= first_val; --i) {
11757 TNode<IntPtrT> index = IntPtrConstant(i);
11758 TNode<IntPtrT> offset = ElementOffsetFromIndex(
11759 index, kind, FixedArray::kHeaderSize - kHeapObjectTag);
11760 body(array, offset);
11761 }
11762 }
11763 return;
11764 }
11765 }
11766
11767 TNode<IntPtrT> start = ElementOffsetFromIndex(
11768 first_element_inclusive, kind, FixedArray::kHeaderSize - kHeapObjectTag);
11769 TNode<IntPtrT> limit = ElementOffsetFromIndex(
11770 last_element_exclusive, kind, FixedArray::kHeaderSize - kHeapObjectTag);
11771 if (direction == ForEachDirection::kReverse) std::swap(start, limit);
11772
11773 int increment = IsDoubleElementsKind(kind) ? kDoubleSize : kTaggedSize;
11774 BuildFastLoop<IntPtrT>(
11775 start, limit, [&](TNode<IntPtrT> offset) { body(array, offset); },
11776 direction == ForEachDirection::kReverse ? -increment : increment,
11777 direction == ForEachDirection::kReverse ? IndexAdvanceMode::kPre
11778 : IndexAdvanceMode::kPost);
11779}
11780
11781template <typename TIndex>
11782void CodeStubAssembler::GotoIfFixedArraySizeDoesntFitInNewSpace(
11783 TNode<TIndex> element_count, Label* doesnt_fit, int base_size) {
11784 GotoIf(FixedArraySizeDoesntFitInNewSpace(element_count, base_size),
11785 doesnt_fit);
11786}
11787
11788void CodeStubAssembler::InitializeFieldsWithRoot(TNode<HeapObject> object,
11789 TNode<IntPtrT> start_offset,
11790 TNode<IntPtrT> end_offset,
11791 RootIndex root_index) {
11792 CSA_SLOW_DCHECK(this, TaggedIsNotSmi(object))((void)0);
11793 start_offset = IntPtrAdd(start_offset, IntPtrConstant(-kHeapObjectTag));
11794 end_offset = IntPtrAdd(end_offset, IntPtrConstant(-kHeapObjectTag));
11795 TNode<AnyTaggedT> root_value;
11796 if (root_index == RootIndex::kOnePointerFillerMap) {
11797 root_value = LoadRootMapWord(root_index);
11798 } else {
11799 root_value = LoadRoot(root_index);
11800 }
11801 BuildFastLoop<IntPtrT>(
11802 end_offset, start_offset,
11803 [=](TNode<IntPtrT> current) {
11804 StoreNoWriteBarrier(MachineRepresentation::kTagged, object, current,
11805 root_value);
11806 },
11807 -kTaggedSize, CodeStubAssembler::IndexAdvanceMode::kPre);
11808}
11809
11810void CodeStubAssembler::BranchIfNumberRelationalComparison(Operation op,
11811 TNode<Number> left,
11812 TNode<Number> right,
11813 Label* if_true,
11814 Label* if_false) {
11815 Label do_float_comparison(this);
11816 TVARIABLE(Float64T, var_left_float)TVariable<Float64T> var_left_float(this);
11817 TVARIABLE(Float64T, var_right_float)TVariable<Float64T> var_right_float(this);
11818
11819 Branch(
11820 TaggedIsSmi(left),
11821 [&] {
11822 TNode<Smi> smi_left = CAST(left)Cast(left);
11823
11824 Branch(
11825 TaggedIsSmi(right),
11826 [&] {
11827 TNode<Smi> smi_right = CAST(right)Cast(right);
11828
11829 // Both {left} and {right} are Smi, so just perform a fast
11830 // Smi comparison.
11831 switch (op) {
11832 case Operation::kEqual:
11833 BranchIfSmiEqual(smi_left, smi_right, if_true, if_false);
11834 break;
11835 case Operation::kLessThan:
11836 BranchIfSmiLessThan(smi_left, smi_right, if_true, if_false);
11837 break;
11838 case Operation::kLessThanOrEqual:
11839 BranchIfSmiLessThanOrEqual(smi_left, smi_right, if_true,
11840 if_false);
11841 break;
11842 case Operation::kGreaterThan:
11843 BranchIfSmiLessThan(smi_right, smi_left, if_true, if_false);
11844 break;
11845 case Operation::kGreaterThanOrEqual:
11846 BranchIfSmiLessThanOrEqual(smi_right, smi_left, if_true,
11847 if_false);
11848 break;
11849 default:
11850 UNREACHABLE()V8_Fatal("unreachable code");
11851 }
11852 },
11853 [&] {
11854 var_left_float = SmiToFloat64(smi_left);
11855 var_right_float = LoadHeapNumberValue(CAST(right)Cast(right));
11856 Goto(&do_float_comparison);
11857 });
11858 },
11859 [&] {
11860 var_left_float = LoadHeapNumberValue(CAST(left)Cast(left));
11861
11862 Branch(
11863 TaggedIsSmi(right),
11864 [&] {
11865 var_right_float = SmiToFloat64(CAST(right)Cast(right));
11866 Goto(&do_float_comparison);
11867 },
11868 [&] {
11869 var_right_float = LoadHeapNumberValue(CAST(right)Cast(right));
11870 Goto(&do_float_comparison);
11871 });
11872 });
11873
11874 BIND(&do_float_comparison)Bind(&do_float_comparison);
11875 {
11876 switch (op) {
11877 case Operation::kEqual:
11878 Branch(Float64Equal(var_left_float.value(), var_right_float.value()),
11879 if_true, if_false);
11880 break;
11881 case Operation::kLessThan:
11882 Branch(Float64LessThan(var_left_float.value(), var_right_float.value()),
11883 if_true, if_false);
11884 break;
11885 case Operation::kLessThanOrEqual:
11886 Branch(Float64LessThanOrEqual(var_left_float.value(),
11887 var_right_float.value()),
11888 if_true, if_false);
11889 break;
11890 case Operation::kGreaterThan:
11891 Branch(
11892 Float64GreaterThan(var_left_float.value(), var_right_float.value()),
11893 if_true, if_false);
11894 break;
11895 case Operation::kGreaterThanOrEqual:
11896 Branch(Float64GreaterThanOrEqual(var_left_float.value(),
11897 var_right_float.value()),
11898 if_true, if_false);
11899 break;
11900 default:
11901 UNREACHABLE()V8_Fatal("unreachable code");
11902 }
11903 }
11904}
11905
11906void CodeStubAssembler::GotoIfNumberGreaterThanOrEqual(TNode<Number> left,
11907 TNode<Number> right,
11908 Label* if_true) {
11909 Label if_false(this);
11910 BranchIfNumberRelationalComparison(Operation::kGreaterThanOrEqual, left,
11911 right, if_true, &if_false);
11912 BIND(&if_false)Bind(&if_false);
11913}
11914
11915namespace {
11916Operation Reverse(Operation op) {
11917 switch (op) {
11918 case Operation::kLessThan:
11919 return Operation::kGreaterThan;
11920 case Operation::kLessThanOrEqual:
11921 return Operation::kGreaterThanOrEqual;
11922 case Operation::kGreaterThan:
11923 return Operation::kLessThan;
11924 case Operation::kGreaterThanOrEqual:
11925 return Operation::kLessThanOrEqual;
11926 default:
11927 break;
11928 }
11929 UNREACHABLE()V8_Fatal("unreachable code");
11930}
11931} // anonymous namespace
11932
11933TNode<Context> CodeStubAssembler::GotoIfHasContextExtensionUpToDepth(
11934 TNode<Context> context, TNode<Uint32T> depth, Label* target) {
11935 TVARIABLE(Context, cur_context, context)TVariable<Context> cur_context(context, this);
11936 TVARIABLE(Uint32T, cur_depth, depth)TVariable<Uint32T> cur_depth(depth, this);
11937
11938 Label context_search(this, {&cur_depth, &cur_context});
11939 Label exit_loop(this);
11940 Label no_extension(this);
11941
11942 // Loop until the depth is 0.
11943 CSA_DCHECK(this, Word32NotEqual(cur_depth.value(), Int32Constant(0)))((void)0);
11944 Goto(&context_search);
11945 BIND(&context_search)Bind(&context_search);
11946 {
11947 // Check if context has an extension slot.
11948 TNode<BoolT> has_extension =
11949 LoadScopeInfoHasExtensionField(LoadScopeInfo(cur_context.value()));
11950 GotoIfNot(has_extension, &no_extension);
11951
11952 // Jump to the target if the extension slot is not an undefined value.
11953 TNode<Object> extension_slot =
11954 LoadContextElement(cur_context.value(), Context::EXTENSION_INDEX);
11955 Branch(TaggedNotEqual(extension_slot, UndefinedConstant()), target,
11956 &no_extension);
11957
11958 BIND(&no_extension)Bind(&no_extension);
11959 {
11960 cur_depth = Unsigned(Int32Sub(cur_depth.value(), Int32Constant(1)));
11961 cur_context = CAST(Cast(LoadContextElement(cur_context.value(), Context::PREVIOUS_INDEX
))
11962 LoadContextElement(cur_context.value(), Context::PREVIOUS_INDEX))Cast(LoadContextElement(cur_context.value(), Context::PREVIOUS_INDEX
))
;
11963
11964 Branch(Word32NotEqual(cur_depth.value(), Int32Constant(0)),
11965 &context_search, &exit_loop);
11966 }
11967 }
11968 BIND(&exit_loop)Bind(&exit_loop);
11969 return cur_context.value();
11970}
11971
11972TNode<Oddball> CodeStubAssembler::RelationalComparison(
11973 Operation op, TNode<Object> left, TNode<Object> right,
11974 const LazyNode<Context>& context, TVariable<Smi>* var_type_feedback) {
11975 Label return_true(this), return_false(this), do_float_comparison(this),
11976 end(this);
11977 TVARIABLE(Oddball, var_result)TVariable<Oddball> var_result(this); // Actually only "true" or "false".
11978 TVARIABLE(Float64T, var_left_float)TVariable<Float64T> var_left_float(this);
11979 TVARIABLE(Float64T, var_right_float)TVariable<Float64T> var_right_float(this);
11980
11981 // We might need to loop several times due to ToPrimitive and/or ToNumeric
11982 // conversions.
11983 TVARIABLE(Object, var_left, left)TVariable<Object> var_left(left, this);
11984 TVARIABLE(Object, var_right, right)TVariable<Object> var_right(right, this);
11985 VariableList loop_variable_list({&var_left, &var_right}, zone());
11986 if (var_type_feedback != nullptr) {
11987 // Initialize the type feedback to None. The current feedback is combined
11988 // with the previous feedback.
11989 *var_type_feedback = SmiConstant(CompareOperationFeedback::kNone);
11990 loop_variable_list.push_back(var_type_feedback);
11991 }
11992 Label loop(this, loop_variable_list);
11993 Goto(&loop);
11994 BIND(&loop)Bind(&loop);
11995 {
11996 left = var_left.value();
11997 right = var_right.value();
11998
11999 Label if_left_smi(this), if_left_not_smi(this);
12000 Branch(TaggedIsSmi(left), &if_left_smi, &if_left_not_smi);
12001
12002 BIND(&if_left_smi)Bind(&if_left_smi);
12003 {
12004 TNode<Smi> smi_left = CAST(left)Cast(left);
12005 Label if_right_smi(this), if_right_heapnumber(this),
12006 if_right_bigint(this, Label::kDeferred),
12007 if_right_not_numeric(this, Label::kDeferred);
12008 GotoIf(TaggedIsSmi(right), &if_right_smi);
12009 TNode<Map> right_map = LoadMap(CAST(right)Cast(right));
12010 GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
12011 TNode<Uint16T> right_instance_type = LoadMapInstanceType(right_map);
12012 Branch(IsBigIntInstanceType(right_instance_type), &if_right_bigint,
12013 &if_right_not_numeric);
12014
12015 BIND(&if_right_smi)Bind(&if_right_smi);
12016 {
12017 TNode<Smi> smi_right = CAST(right)Cast(right);
12018 CombineFeedback(var_type_feedback,
12019 CompareOperationFeedback::kSignedSmall);
12020 switch (op) {
12021 case Operation::kLessThan:
12022 BranchIfSmiLessThan(smi_left, smi_right, &return_true,
12023 &return_false);
12024 break;
12025 case Operation::kLessThanOrEqual:
12026 BranchIfSmiLessThanOrEqual(smi_left, smi_right, &return_true,
12027 &return_false);
12028 break;
12029 case Operation::kGreaterThan:
12030 BranchIfSmiLessThan(smi_right, smi_left, &return_true,
12031 &return_false);
12032 break;
12033 case Operation::kGreaterThanOrEqual:
12034 BranchIfSmiLessThanOrEqual(smi_right, smi_left, &return_true,
12035 &return_false);
12036 break;
12037 default:
12038 UNREACHABLE()V8_Fatal("unreachable code");
12039 }
12040 }
12041
12042 BIND(&if_right_heapnumber)Bind(&if_right_heapnumber);
12043 {
12044 CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
12045 var_left_float = SmiToFloat64(smi_left);
12046 var_right_float = LoadHeapNumberValue(CAST(right)Cast(right));
12047 Goto(&do_float_comparison);
12048 }
12049
12050 BIND(&if_right_bigint)Bind(&if_right_bigint);
12051 {
12052 OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
12053 var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,Cast(CallRuntime(Runtime::kBigIntCompareToNumber, NoContextConstant
(), SmiConstant(Reverse(op)), right, left))
12054 NoContextConstant(),Cast(CallRuntime(Runtime::kBigIntCompareToNumber, NoContextConstant
(), SmiConstant(Reverse(op)), right, left))
12055 SmiConstant(Reverse(op)), right, left))Cast(CallRuntime(Runtime::kBigIntCompareToNumber, NoContextConstant
(), SmiConstant(Reverse(op)), right, left))
;
12056 Goto(&end);
12057 }
12058
12059 BIND(&if_right_not_numeric)Bind(&if_right_not_numeric);
12060 {
12061 OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
12062 // Convert {right} to a Numeric; we don't need to perform the
12063 // dedicated ToPrimitive(right, hint Number) operation, as the
12064 // ToNumeric(right) will by itself already invoke ToPrimitive with
12065 // a Number hint.
12066 var_right = CallBuiltin(Builtin::kNonNumberToNumeric, context(), right);
12067 Goto(&loop);
12068 }
12069 }
12070
12071 BIND(&if_left_not_smi)Bind(&if_left_not_smi);
12072 {
12073 TNode<Map> left_map = LoadMap(CAST(left)Cast(left));
12074
12075 Label if_right_smi(this), if_right_not_smi(this);
12076 Branch(TaggedIsSmi(right), &if_right_smi, &if_right_not_smi);
12077
12078 BIND(&if_right_smi)Bind(&if_right_smi);
12079 {
12080 Label if_left_heapnumber(this), if_left_bigint(this, Label::kDeferred),
12081 if_left_not_numeric(this, Label::kDeferred);
12082 GotoIf(IsHeapNumberMap(left_map), &if_left_heapnumber);
12083 TNode<Uint16T> left_instance_type = LoadMapInstanceType(left_map);
12084 Branch(IsBigIntInstanceType(left_instance_type), &if_left_bigint,
12085 &if_left_not_numeric);
12086
12087 BIND(&if_left_heapnumber)Bind(&if_left_heapnumber);
12088 {
12089 CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
12090 var_left_float = LoadHeapNumberValue(CAST(left)Cast(left));
12091 var_right_float = SmiToFloat64(CAST(right)Cast(right));
12092 Goto(&do_float_comparison);
12093 }
12094
12095 BIND(&if_left_bigint)Bind(&if_left_bigint);
12096 {
12097 OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
12098 var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,Cast(CallRuntime(Runtime::kBigIntCompareToNumber, NoContextConstant
(), SmiConstant(op), left, right))
12099 NoContextConstant(), SmiConstant(op),Cast(CallRuntime(Runtime::kBigIntCompareToNumber, NoContextConstant
(), SmiConstant(op), left, right))
12100 left, right))Cast(CallRuntime(Runtime::kBigIntCompareToNumber, NoContextConstant
(), SmiConstant(op), left, right))
;
12101 Goto(&end);
12102 }
12103
12104 BIND(&if_left_not_numeric)Bind(&if_left_not_numeric);
12105 {
12106 OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
12107 // Convert {left} to a Numeric; we don't need to perform the
12108 // dedicated ToPrimitive(left, hint Number) operation, as the
12109 // ToNumeric(left) will by itself already invoke ToPrimitive with
12110 // a Number hint.
12111 var_left = CallBuiltin(Builtin::kNonNumberToNumeric, context(), left);
12112 Goto(&loop);
12113 }
12114 }
12115
12116 BIND(&if_right_not_smi)Bind(&if_right_not_smi);
12117 {
12118 TNode<Map> right_map = LoadMap(CAST(right)Cast(right));
12119
12120 Label if_left_heapnumber(this), if_left_bigint(this, Label::kDeferred),
12121 if_left_string(this, Label::kDeferred),
12122 if_left_other(this, Label::kDeferred);
12123 GotoIf(IsHeapNumberMap(left_map), &if_left_heapnumber);
12124 TNode<Uint16T> left_instance_type = LoadMapInstanceType(left_map);
12125 GotoIf(IsBigIntInstanceType(left_instance_type), &if_left_bigint);
12126 Branch(IsStringInstanceType(left_instance_type), &if_left_string,
12127 &if_left_other);
12128
12129 BIND(&if_left_heapnumber)Bind(&if_left_heapnumber);
12130 {
12131 Label if_right_heapnumber(this),
12132 if_right_bigint(this, Label::kDeferred),
12133 if_right_not_numeric(this, Label::kDeferred);
12134 GotoIf(TaggedEqual(right_map, left_map), &if_right_heapnumber);
12135 TNode<Uint16T> right_instance_type = LoadMapInstanceType(right_map);
12136 Branch(IsBigIntInstanceType(right_instance_type), &if_right_bigint,
12137 &if_right_not_numeric);
12138
12139 BIND(&if_right_heapnumber)Bind(&if_right_heapnumber);
12140 {
12141 CombineFeedback(var_type_feedback,
12142 CompareOperationFeedback::kNumber);
12143 var_left_float = LoadHeapNumberValue(CAST(left)Cast(left));
12144 var_right_float = LoadHeapNumberValue(CAST(right)Cast(right));
12145 Goto(&do_float_comparison);
12146 }
12147
12148 BIND(&if_right_bigint)Bind(&if_right_bigint);
12149 {
12150 OverwriteFeedback(var_type_feedback,
12151 CompareOperationFeedback::kAny);
12152 var_result = CAST(CallRuntime(Cast(CallRuntime( Runtime::kBigIntCompareToNumber, NoContextConstant
(), SmiConstant(Reverse(op)), right, left))
12153 Runtime::kBigIntCompareToNumber, NoContextConstant(),Cast(CallRuntime( Runtime::kBigIntCompareToNumber, NoContextConstant
(), SmiConstant(Reverse(op)), right, left))
12154 SmiConstant(Reverse(op)), right, left))Cast(CallRuntime( Runtime::kBigIntCompareToNumber, NoContextConstant
(), SmiConstant(Reverse(op)), right, left))
;
12155 Goto(&end);
12156 }
12157
12158 BIND(&if_right_not_numeric)Bind(&if_right_not_numeric);
12159 {
12160 OverwriteFeedback(var_type_feedback,
12161 CompareOperationFeedback::kAny);
12162 // Convert {right} to a Numeric; we don't need to perform
12163 // dedicated ToPrimitive(right, hint Number) operation, as the
12164 // ToNumeric(right) will by itself already invoke ToPrimitive with
12165 // a Number hint.
12166 var_right =
12167 CallBuiltin(Builtin::kNonNumberToNumeric, context(), right);
12168 Goto(&loop);
12169 }
12170 }
12171
12172 BIND(&if_left_bigint)Bind(&if_left_bigint);
12173 {
12174 Label if_right_heapnumber(this), if_right_bigint(this),
12175 if_right_string(this), if_right_other(this);
12176 GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
12177 TNode<Uint16T> right_instance_type = LoadMapInstanceType(right_map);
12178 GotoIf(IsBigIntInstanceType(right_instance_type), &if_right_bigint);
12179 Branch(IsStringInstanceType(right_instance_type), &if_right_string,
12180 &if_right_other);
12181
12182 BIND(&if_right_heapnumber)Bind(&if_right_heapnumber);
12183 {
12184 OverwriteFeedback(var_type_feedback,
12185 CompareOperationFeedback::kAny);
12186 var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,Cast(CallRuntime(Runtime::kBigIntCompareToNumber, NoContextConstant
(), SmiConstant(op), left, right))
12187 NoContextConstant(), SmiConstant(op),Cast(CallRuntime(Runtime::kBigIntCompareToNumber, NoContextConstant
(), SmiConstant(op), left, right))
12188 left, right))Cast(CallRuntime(Runtime::kBigIntCompareToNumber, NoContextConstant
(), SmiConstant(op), left, right))
;
12189 Goto(&end);
12190 }
12191
12192 BIND(&if_right_bigint)Bind(&if_right_bigint);
12193 {
12194 CombineFeedback(var_type_feedback,
12195 CompareOperationFeedback::kBigInt);
12196 var_result = CAST(CallRuntime(Runtime::kBigIntCompareToBigInt,Cast(CallRuntime(Runtime::kBigIntCompareToBigInt, NoContextConstant
(), SmiConstant(op), left, right))
12197 NoContextConstant(), SmiConstant(op),Cast(CallRuntime(Runtime::kBigIntCompareToBigInt, NoContextConstant
(), SmiConstant(op), left, right))
12198 left, right))Cast(CallRuntime(Runtime::kBigIntCompareToBigInt, NoContextConstant
(), SmiConstant(op), left, right))
;
12199 Goto(&end);
12200 }
12201
12202 BIND(&if_right_string)Bind(&if_right_string);
12203 {
12204 OverwriteFeedback(var_type_feedback,
12205 CompareOperationFeedback::kAny);
12206 var_result = CAST(CallRuntime(Runtime::kBigIntCompareToString,Cast(CallRuntime(Runtime::kBigIntCompareToString, NoContextConstant
(), SmiConstant(op), left, right))
12207 NoContextConstant(), SmiConstant(op),Cast(CallRuntime(Runtime::kBigIntCompareToString, NoContextConstant
(), SmiConstant(op), left, right))
12208 left, right))Cast(CallRuntime(Runtime::kBigIntCompareToString, NoContextConstant
(), SmiConstant(op), left, right))
;
12209 Goto(&end);
12210 }
12211
12212 // {right} is not a Number, BigInt, or String.
12213 BIND(&if_right_other)Bind(&if_right_other);
12214 {
12215 OverwriteFeedback(var_type_feedback,
12216 CompareOperationFeedback::kAny);
12217 // Convert {right} to a Numeric; we don't need to perform
12218 // dedicated ToPrimitive(right, hint Number) operation, as the
12219 // ToNumeric(right) will by itself already invoke ToPrimitive with
12220 // a Number hint.
12221 var_right =
12222 CallBuiltin(Builtin::kNonNumberToNumeric, context(), right);
12223 Goto(&loop);
12224 }
12225 }
12226
12227 BIND(&if_left_string)Bind(&if_left_string);
12228 {
12229 TNode<Uint16T> right_instance_type = LoadMapInstanceType(right_map);
12230
12231 Label if_right_not_string(this, Label::kDeferred);
12232 GotoIfNot(IsStringInstanceType(right_instance_type),
12233 &if_right_not_string);
12234
12235 // Both {left} and {right} are strings.
12236 CombineFeedback(var_type_feedback, CompareOperationFeedback::kString);
12237 Builtin builtin;
12238 switch (op) {
12239 case Operation::kLessThan:
12240 builtin = Builtin::kStringLessThan;
12241 break;
12242 case Operation::kLessThanOrEqual:
12243 builtin = Builtin::kStringLessThanOrEqual;
12244 break;
12245 case Operation::kGreaterThan:
12246 builtin = Builtin::kStringGreaterThan;
12247 break;
12248 case Operation::kGreaterThanOrEqual:
12249 builtin = Builtin::kStringGreaterThanOrEqual;
12250 break;
12251 default:
12252 UNREACHABLE()V8_Fatal("unreachable code");
12253 }
12254 var_result = CAST(CallBuiltin(builtin, context(), left, right))Cast(CallBuiltin(builtin, context(), left, right));
12255 Goto(&end);
12256
12257 BIND(&if_right_not_string)Bind(&if_right_not_string);
12258 {
12259 OverwriteFeedback(var_type_feedback,
12260 CompareOperationFeedback::kAny);
12261 // {left} is a String, while {right} isn't. Check if {right} is
12262 // a BigInt, otherwise call ToPrimitive(right, hint Number) if
12263 // {right} is a receiver, or ToNumeric(left) and then
12264 // ToNumeric(right) in the other cases.
12265 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE)static_assert(LAST_JS_RECEIVER_TYPE == LAST_TYPE, "LAST_JS_RECEIVER_TYPE == LAST_TYPE"
)
;
12266 Label if_right_bigint(this),
12267 if_right_receiver(this, Label::kDeferred);
12268 GotoIf(IsBigIntInstanceType(right_instance_type), &if_right_bigint);
12269 GotoIf(IsJSReceiverInstanceType(right_instance_type),
12270 &if_right_receiver);
12271
12272 var_left =
12273 CallBuiltin(Builtin::kNonNumberToNumeric, context(), left);
12274 var_right = CallBuiltin(Builtin::kToNumeric, context(), right);
12275 Goto(&loop);
12276
12277 BIND(&if_right_bigint)Bind(&if_right_bigint);
12278 {
12279 var_result = CAST(CallRuntime(Cast(CallRuntime( Runtime::kBigIntCompareToString, NoContextConstant
(), SmiConstant(Reverse(op)), right, left))
12280 Runtime::kBigIntCompareToString, NoContextConstant(),Cast(CallRuntime( Runtime::kBigIntCompareToString, NoContextConstant
(), SmiConstant(Reverse(op)), right, left))
12281 SmiConstant(Reverse(op)), right, left))Cast(CallRuntime( Runtime::kBigIntCompareToString, NoContextConstant
(), SmiConstant(Reverse(op)), right, left))
;
12282 Goto(&end);
12283 }
12284
12285 BIND(&if_right_receiver)Bind(&if_right_receiver);
12286 {
12287 Callable callable = CodeFactory::NonPrimitiveToPrimitive(
12288 isolate(), ToPrimitiveHint::kNumber);
12289 var_right = CallStub(callable, context(), right);
12290 Goto(&loop);
12291 }
12292 }
12293 }
12294
12295 BIND(&if_left_other)Bind(&if_left_other);
12296 {
12297 // {left} is neither a Numeric nor a String, and {right} is not a Smi.
12298 if (var_type_feedback != nullptr) {
12299 // Collect NumberOrOddball feedback if {left} is an Oddball
12300 // and {right} is either a HeapNumber or Oddball. Otherwise collect
12301 // Any feedback.
12302 Label collect_any_feedback(this), collect_oddball_feedback(this),
12303 collect_feedback_done(this);
12304 GotoIfNot(InstanceTypeEqual(left_instance_type, ODDBALL_TYPE),
12305 &collect_any_feedback);
12306
12307 GotoIf(IsHeapNumberMap(right_map), &collect_oddball_feedback);
12308 TNode<Uint16T> right_instance_type = LoadMapInstanceType(right_map);
12309 Branch(InstanceTypeEqual(right_instance_type, ODDBALL_TYPE),
12310 &collect_oddball_feedback, &collect_any_feedback);
12311
12312 BIND(&collect_oddball_feedback)Bind(&collect_oddball_feedback);
12313 {
12314 CombineFeedback(var_type_feedback,
12315 CompareOperationFeedback::kNumberOrOddball);
12316 Goto(&collect_feedback_done);
12317 }
12318
12319 BIND(&collect_any_feedback)Bind(&collect_any_feedback);
12320 {
12321 OverwriteFeedback(var_type_feedback,
12322 CompareOperationFeedback::kAny);
12323 Goto(&collect_feedback_done);
12324 }
12325
12326 BIND(&collect_feedback_done)Bind(&collect_feedback_done);
12327 }
12328
12329 // If {left} is a receiver, call ToPrimitive(left, hint Number).
12330 // Otherwise call ToNumeric(right) and then ToNumeric(left), the
12331 // order here is important as it's observable by user code.
12332 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE)static_assert(LAST_JS_RECEIVER_TYPE == LAST_TYPE, "LAST_JS_RECEIVER_TYPE == LAST_TYPE"
)
;
12333 Label if_left_receiver(this, Label::kDeferred);
12334 GotoIf(IsJSReceiverInstanceType(left_instance_type),
12335 &if_left_receiver);
12336
12337 var_right = CallBuiltin(Builtin::kToNumeric, context(), right);
12338 var_left = CallBuiltin(Builtin::kNonNumberToNumeric, context(), left);
12339 Goto(&loop);
12340
12341 BIND(&if_left_receiver)Bind(&if_left_receiver);
12342 {
12343 Callable callable = CodeFactory::NonPrimitiveToPrimitive(
12344 isolate(), ToPrimitiveHint::kNumber);
12345 var_left = CallStub(callable, context(), left);
12346 Goto(&loop);
12347 }
12348 }
12349 }
12350 }
12351 }
12352
12353 BIND(&do_float_comparison)Bind(&do_float_comparison);
12354 {
12355 switch (op) {
12356 case Operation::kLessThan:
12357 Branch(Float64LessThan(var_left_float.value(), var_right_float.value()),
12358 &return_true, &return_false);
12359 break;
12360 case Operation::kLessThanOrEqual:
12361 Branch(Float64LessThanOrEqual(var_left_float.value(),
12362 var_right_float.value()),
12363 &return_true, &return_false);
12364 break;
12365 case Operation::kGreaterThan:
12366 Branch(
12367 Float64GreaterThan(var_left_float.value(), var_right_float.value()),
12368 &return_true, &return_false);
12369 break;
12370 case Operation::kGreaterThanOrEqual:
12371 Branch(Float64GreaterThanOrEqual(var_left_float.value(),
12372 var_right_float.value()),
12373 &return_true, &return_false);
12374 break;
12375 default:
12376 UNREACHABLE()V8_Fatal("unreachable code");
12377 }
12378 }
12379
12380 BIND(&return_true)Bind(&return_true);
12381 {
12382 var_result = TrueConstant();
12383 Goto(&end);
12384 }
12385
12386 BIND(&return_false)Bind(&return_false);
12387 {
12388 var_result = FalseConstant();
12389 Goto(&end);
12390 }
12391
12392 BIND(&end)Bind(&end);
12393 return var_result.value();
12394}
12395
12396TNode<Smi> CodeStubAssembler::CollectFeedbackForString(
12397 TNode<Int32T> instance_type) {
12398 TNode<Smi> feedback = SelectSmiConstant(
12399 Word32Equal(
12400 Word32And(instance_type, Int32Constant(kIsNotInternalizedMask)),
12401 Int32Constant(kInternalizedTag)),
12402 CompareOperationFeedback::kInternalizedString,
12403 CompareOperationFeedback::kString);
12404 return feedback;
12405}
12406
12407void CodeStubAssembler::GenerateEqual_Same(TNode<Object> value, Label* if_equal,
12408 Label* if_notequal,
12409 TVariable<Smi>* var_type_feedback) {
12410 // In case of abstract or strict equality checks, we need additional checks
12411 // for NaN values because they are not considered equal, even if both the
12412 // left and the right hand side reference exactly the same value.
12413
12414 Label if_smi(this), if_heapnumber(this);
12415 GotoIf(TaggedIsSmi(value), &if_smi);
12416
12417 TNode<HeapObject> value_heapobject = CAST(value)Cast(value);
12418 TNode<Map> value_map = LoadMap(value_heapobject);
12419 GotoIf(IsHeapNumberMap(value_map), &if_heapnumber);
12420
12421 // For non-HeapNumbers, all we do is collect type feedback.
12422 if (var_type_feedback != nullptr) {
12423 TNode<Uint16T> instance_type = LoadMapInstanceType(value_map);
12424
12425 Label if_string(this), if_receiver(this), if_oddball(this), if_symbol(this),
12426 if_bigint(this);
12427 GotoIf(IsStringInstanceType(instance_type), &if_string);
12428 GotoIf(IsJSReceiverInstanceType(instance_type), &if_receiver);
12429 GotoIf(IsOddballInstanceType(instance_type), &if_oddball);
12430 Branch(IsBigIntInstanceType(instance_type), &if_bigint, &if_symbol);
12431
12432 BIND(&if_string)Bind(&if_string);
12433 {
12434 CSA_DCHECK(this, IsString(value_heapobject))((void)0);
12435 CombineFeedback(var_type_feedback,
12436 CollectFeedbackForString(instance_type));
12437 Goto(if_equal);
12438 }
12439
12440 BIND(&if_symbol)Bind(&if_symbol);
12441 {
12442 CSA_DCHECK(this, IsSymbol(value_heapobject))((void)0);
12443 CombineFeedback(var_type_feedback, CompareOperationFeedback::kSymbol);
12444 Goto(if_equal);
12445 }
12446
12447 BIND(&if_receiver)Bind(&if_receiver);
12448 {
12449 CSA_DCHECK(this, IsJSReceiver(value_heapobject))((void)0);
12450 CombineFeedback(var_type_feedback, CompareOperationFeedback::kReceiver);
12451 Goto(if_equal);
12452 }
12453
12454 BIND(&if_bigint)Bind(&if_bigint);
12455 {
12456 CSA_DCHECK(this, IsBigInt(value_heapobject))((void)0);
12457 CombineFeedback(var_type_feedback, CompareOperationFeedback::kBigInt);
12458 Goto(if_equal);
12459 }
12460
12461 BIND(&if_oddball)Bind(&if_oddball);
12462 {
12463 CSA_DCHECK(this, IsOddball(value_heapobject))((void)0);
12464 Label if_boolean(this), if_not_boolean(this);
12465 Branch(IsBooleanMap(value_map), &if_boolean, &if_not_boolean);
12466
12467 BIND(&if_boolean)Bind(&if_boolean);
12468 {
12469 CombineFeedback(var_type_feedback, CompareOperationFeedback::kBoolean);
12470 Goto(if_equal);
12471 }
12472
12473 BIND(&if_not_boolean)Bind(&if_not_boolean);
12474 {
12475 CSA_DCHECK(this, IsNullOrUndefined(value_heapobject))((void)0);
12476 CombineFeedback(var_type_feedback,
12477 CompareOperationFeedback::kReceiverOrNullOrUndefined);
12478 Goto(if_equal);
12479 }
12480 }
12481 } else {
12482 Goto(if_equal);
12483 }
12484
12485 BIND(&if_heapnumber)Bind(&if_heapnumber);
12486 {
12487 CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
12488 TNode<Float64T> number_value = LoadHeapNumberValue(value_heapobject);
12489 BranchIfFloat64IsNaN(number_value, if_notequal, if_equal);
12490 }
12491
12492 BIND(&if_smi)Bind(&if_smi);
12493 {
12494 CombineFeedback(var_type_feedback, CompareOperationFeedback::kSignedSmall);
12495 Goto(if_equal);
12496 }
12497}
12498
12499// ES6 section 7.2.12 Abstract Equality Comparison
12500TNode<Oddball> CodeStubAssembler::Equal(TNode<Object> left, TNode<Object> right,
12501 const LazyNode<Context>& context,
12502 TVariable<Smi>* var_type_feedback) {
12503 // This is a slightly optimized version of Object::Equals. Whenever you
12504 // change something functionality wise in here, remember to update the
12505 // Object::Equals method as well.
12506
12507 Label if_equal(this), if_notequal(this), do_float_comparison(this),
12508 do_right_stringtonumber(this, Label::kDeferred), end(this);
12509 TVARIABLE(Oddball, result)TVariable<Oddball> result(this);
12510 TVARIABLE(Float64T, var_left_float)TVariable<Float64T> var_left_float(this);
12511 TVARIABLE(Float64T, var_right_float)TVariable<Float64T> var_right_float(this);
12512
12513 // We can avoid code duplication by exploiting the fact that abstract equality
12514 // is symmetric.
12515 Label use_symmetry(this);
12516
12517 // We might need to loop several times due to ToPrimitive and/or ToNumber
12518 // conversions.
12519 TVARIABLE(Object, var_left, left)TVariable<Object> var_left(left, this);
12520 TVARIABLE(Object, var_right, right)TVariable<Object> var_right(right, this);
12521 VariableList loop_variable_list({&var_left, &var_right}, zone());
12522 if (var_type_feedback != nullptr) {
12523 // Initialize the type feedback to None. The current feedback will be
12524 // combined with the previous feedback.
12525 OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kNone);
12526 loop_variable_list.push_back(var_type_feedback);
12527 }
12528 Label loop(this, loop_variable_list);
12529 Goto(&loop);
12530 BIND(&loop)Bind(&loop);
12531 {
12532 left = var_left.value();
12533 right = var_right.value();
12534
12535 Label if_notsame(this);
12536 GotoIf(TaggedNotEqual(left, right), &if_notsame);
12537 {
12538 // {left} and {right} reference the exact same value, yet we need special
12539 // treatment for HeapNumber, as NaN is not equal to NaN.
12540 GenerateEqual_Same(left, &if_equal, &if_notequal, var_type_feedback);
12541 }
12542
12543 BIND(&if_notsame)Bind(&if_notsame);
12544 Label if_left_smi(this), if_left_not_smi(this);
12545 Branch(TaggedIsSmi(left), &if_left_smi, &if_left_not_smi);
12546
12547 BIND(&if_left_smi)Bind(&if_left_smi);
12548 {
12549 Label if_right_smi(this), if_right_not_smi(this);
12550 CombineFeedback(var_type_feedback,
12551 CompareOperationFeedback::kSignedSmall);
12552 Branch(TaggedIsSmi(right), &if_right_smi, &if_right_not_smi);
12553
12554 BIND(&if_right_smi)Bind(&if_right_smi);
12555 {
12556 // We have already checked for {left} and {right} being the same value,
12557 // so when we get here they must be different Smis.
12558 Goto(&if_notequal);
12559 }
12560
12561 BIND(&if_right_not_smi)Bind(&if_right_not_smi);
12562 {
12563 TNode<Map> right_map = LoadMap(CAST(right)Cast(right));
12564 Label if_right_heapnumber(this), if_right_oddball(this),
12565 if_right_bigint(this, Label::kDeferred),
12566 if_right_receiver(this, Label::kDeferred);
12567 GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
12568
12569 // {left} is Smi and {right} is not HeapNumber or Smi.
12570 TNode<Uint16T> right_type = LoadMapInstanceType(right_map);
12571 GotoIf(IsStringInstanceType(right_type), &do_right_stringtonumber);
12572 GotoIf(IsOddballInstanceType(right_type), &if_right_oddball);
12573 GotoIf(IsBigIntInstanceType(right_type), &if_right_bigint);
12574 GotoIf(IsJSReceiverInstanceType(right_type), &if_right_receiver);
12575 CombineFeedback(var_type_feedback, CompareOperationFeedback::kAny);
12576 Goto(&if_notequal);
12577
12578 BIND(&if_right_heapnumber)Bind(&if_right_heapnumber);
12579 {
12580 CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
12581 var_left_float = SmiToFloat64(CAST(left)Cast(left));
12582 var_right_float = LoadHeapNumberValue(CAST(right)Cast(right));
12583 Goto(&do_float_comparison);
12584 }
12585
12586 BIND(&if_right_oddball)Bind(&if_right_oddball);
12587 {
12588 Label if_right_boolean(this);
12589 GotoIf(IsBooleanMap(right_map), &if_right_boolean);
12590 CombineFeedback(var_type_feedback,
12591 CompareOperationFeedback::kOddball);
12592 Goto(&if_notequal);
12593
12594 BIND(&if_right_boolean)Bind(&if_right_boolean);
12595 {
12596 CombineFeedback(var_type_feedback,
12597 CompareOperationFeedback::kBoolean);
12598 var_right = LoadObjectField(CAST(right)Cast(right), Oddball::kToNumberOffset);
12599 Goto(&loop);
12600 }
12601 }
12602
12603 BIND(&if_right_bigint)Bind(&if_right_bigint);
12604 {
12605 CombineFeedback(var_type_feedback, CompareOperationFeedback::kBigInt);
12606 result = CAST(CallRuntime(Runtime::kBigIntEqualToNumber,Cast(CallRuntime(Runtime::kBigIntEqualToNumber, NoContextConstant
(), right, left))
12607 NoContextConstant(), right, left))Cast(CallRuntime(Runtime::kBigIntEqualToNumber, NoContextConstant
(), right, left))
;
12608 Goto(&end);
12609 }
12610
12611 BIND(&if_right_receiver)Bind(&if_right_receiver);
12612 {
12613 CombineFeedback(var_type_feedback,
12614 CompareOperationFeedback::kReceiver);
12615 Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
12616 var_right = CallStub(callable, context(), right);
12617 Goto(&loop);
12618 }
12619 }
12620 }
12621
12622 BIND(&if_left_not_smi)Bind(&if_left_not_smi);
12623 {
12624 GotoIf(TaggedIsSmi(right), &use_symmetry);
12625
12626 Label if_left_symbol(this), if_left_number(this),
12627 if_left_string(this, Label::kDeferred),
12628 if_left_bigint(this, Label::kDeferred), if_left_oddball(this),
12629 if_left_receiver(this);
12630
12631 TNode<Map> left_map = LoadMap(CAST(left)Cast(left));
12632 TNode<Map> right_map = LoadMap(CAST(right)Cast(right));
12633 TNode<Uint16T> left_type = LoadMapInstanceType(left_map);
12634 TNode<Uint16T> right_type = LoadMapInstanceType(right_map);
12635
12636 GotoIf(IsStringInstanceType(left_type), &if_left_string);
12637 GotoIf(IsSymbolInstanceType(left_type), &if_left_symbol);
12638 GotoIf(IsHeapNumberInstanceType(left_type), &if_left_number);
12639 GotoIf(IsOddballInstanceType(left_type), &if_left_oddball);
12640 Branch(IsBigIntInstanceType(left_type), &if_left_bigint,
12641 &if_left_receiver);
12642
12643 BIND(&if_left_string)Bind(&if_left_string);
12644 {
12645 GotoIfNot(IsStringInstanceType(right_type), &use_symmetry);
12646 result =
12647 CAST(CallBuiltin(Builtin::kStringEqual, context(), left, right))Cast(CallBuiltin(Builtin::kStringEqual, context(), left, right
))
;
12648 CombineFeedback(var_type_feedback,
12649 SmiOr(CollectFeedbackForString(left_type),
12650 CollectFeedbackForString(right_type)));
12651 Goto(&end);
12652 }
12653
12654 BIND(&if_left_number)Bind(&if_left_number);
12655 {
12656 Label if_right_not_number(this);
12657
12658 CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
12659 GotoIf(Word32NotEqual(left_type, right_type), &if_right_not_number);
12660
12661 var_left_float = LoadHeapNumberValue(CAST(left)Cast(left));
12662 var_right_float = LoadHeapNumberValue(CAST(right)Cast(right));
12663 Goto(&do_float_comparison);
12664
12665 BIND(&if_right_not_number)Bind(&if_right_not_number);
12666 {
12667 Label if_right_oddball(this);
12668
12669 GotoIf(IsStringInstanceType(right_type), &do_right_stringtonumber);
12670 GotoIf(IsOddballInstanceType(right_type), &if_right_oddball);
12671 GotoIf(IsBigIntInstanceType(right_type), &use_symmetry);
12672 GotoIf(IsJSReceiverInstanceType(right_type), &use_symmetry);
12673 CombineFeedback(var_type_feedback, CompareOperationFeedback::kAny);
12674 Goto(&if_notequal);
12675
12676 BIND(&if_right_oddball)Bind(&if_right_oddball);
12677 {
12678 Label if_right_boolean(this);
12679 GotoIf(IsBooleanMap(right_map), &if_right_boolean);
12680 CombineFeedback(var_type_feedback,
12681 CompareOperationFeedback::kOddball);
12682 Goto(&if_notequal);
12683
12684 BIND(&if_right_boolean)Bind(&if_right_boolean);
12685 {
12686 CombineFeedback(var_type_feedback,
12687 CompareOperationFeedback::kBoolean);
12688 var_right =
12689 LoadObjectField(CAST(right)Cast(right), Oddball::kToNumberOffset);
12690 Goto(&loop);
12691 }
12692 }
12693 }
12694 }
12695
12696 BIND(&if_left_bigint)Bind(&if_left_bigint);
12697 {
12698 Label if_right_heapnumber(this), if_right_bigint(this),
12699 if_right_string(this), if_right_boolean(this);
12700 CombineFeedback(var_type_feedback, CompareOperationFeedback::kBigInt);
12701
12702 GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
12703 GotoIf(IsBigIntInstanceType(right_type), &if_right_bigint);
12704 GotoIf(IsStringInstanceType(right_type), &if_right_string);
12705 GotoIf(IsBooleanMap(right_map), &if_right_boolean);
12706 Branch(IsJSReceiverInstanceType(right_type), &use_symmetry,
12707 &if_notequal);
12708
12709 BIND(&if_right_heapnumber)Bind(&if_right_heapnumber);
12710 {
12711 CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
12712 result = CAST(CallRuntime(Runtime::kBigIntEqualToNumber,Cast(CallRuntime(Runtime::kBigIntEqualToNumber, NoContextConstant
(), left, right))
12713 NoContextConstant(), left, right))Cast(CallRuntime(Runtime::kBigIntEqualToNumber, NoContextConstant
(), left, right))
;
12714 Goto(&end);
12715 }
12716
12717 BIND(&if_right_bigint)Bind(&if_right_bigint);
12718 {
12719 // We already have BigInt feedback.
12720 result = CAST(CallRuntime(Runtime::kBigIntEqualToBigInt,Cast(CallRuntime(Runtime::kBigIntEqualToBigInt, NoContextConstant
(), left, right))
12721 NoContextConstant(), left, right))Cast(CallRuntime(Runtime::kBigIntEqualToBigInt, NoContextConstant
(), left, right))
;
12722 Goto(&end);
12723 }
12724
12725 BIND(&if_right_string)Bind(&if_right_string);
12726 {
12727 CombineFeedback(var_type_feedback, CompareOperationFeedback::kString);
12728 result = CAST(CallRuntime(Runtime::kBigIntEqualToString,Cast(CallRuntime(Runtime::kBigIntEqualToString, NoContextConstant
(), left, right))
12729 NoContextConstant(), left, right))Cast(CallRuntime(Runtime::kBigIntEqualToString, NoContextConstant
(), left, right))
;
12730 Goto(&end);
12731 }
12732
12733 BIND(&if_right_boolean)Bind(&if_right_boolean);
12734 {
12735 CombineFeedback(var_type_feedback,
12736 CompareOperationFeedback::kBoolean);
12737 var_right = LoadObjectField(CAST(right)Cast(right), Oddball::kToNumberOffset);
12738 Goto(&loop);
12739 }
12740 }
12741
12742 BIND(&if_left_oddball)Bind(&if_left_oddball);
12743 {
12744 Label if_left_boolean(this), if_left_not_boolean(this);
12745 GotoIf(IsBooleanMap(left_map), &if_left_boolean);
12746 if (var_type_feedback != nullptr) {
12747 CombineFeedback(var_type_feedback,
12748 CompareOperationFeedback::kNullOrUndefined);
12749 GotoIf(IsUndetectableMap(left_map), &if_left_not_boolean);
12750 }
12751 Goto(&if_left_not_boolean);
12752
12753 BIND(&if_left_not_boolean)Bind(&if_left_not_boolean);
12754 {
12755 // {left} is either Null or Undefined. Check if {right} is
12756 // undetectable (which includes Null and Undefined).
12757 Label if_right_undetectable(this), if_right_number(this),
12758 if_right_oddball(this),
12759 if_right_not_number_or_oddball_or_undetectable(this);
12760 GotoIf(IsUndetectableMap(right_map), &if_right_undetectable);
12761 GotoIf(IsHeapNumberInstanceType(right_type), &if_right_number);
12762 GotoIf(IsOddballInstanceType(right_type), &if_right_oddball);
12763 Goto(&if_right_not_number_or_oddball_or_undetectable);
12764
12765 BIND(&if_right_undetectable)Bind(&if_right_undetectable);
12766 {
12767 // If {right} is undetectable, it must be either also
12768 // Null or Undefined, or a Receiver (aka document.all).
12769 CombineFeedback(
12770 var_type_feedback,
12771 CompareOperationFeedback::kReceiverOrNullOrUndefined);
12772 Goto(&if_equal);
12773 }
12774
12775 BIND(&if_right_number)Bind(&if_right_number);
12776 {
12777 CombineFeedback(var_type_feedback,
12778 CompareOperationFeedback::kNumber);
12779 Goto(&if_notequal);
12780 }
12781
12782 BIND(&if_right_oddball)Bind(&if_right_oddball);
12783 {
12784 CombineFeedback(var_type_feedback,
12785 CompareOperationFeedback::kOddball);
12786 Goto(&if_notequal);
12787 }
12788
12789 BIND(&if_right_not_number_or_oddball_or_undetectable)Bind(&if_right_not_number_or_oddball_or_undetectable);
12790 {
12791 if (var_type_feedback != nullptr) {
12792 // Track whether {right} is Null, Undefined or Receiver.
12793 CombineFeedback(
12794 var_type_feedback,
12795 CompareOperationFeedback::kReceiverOrNullOrUndefined);
12796 GotoIf(IsJSReceiverInstanceType(right_type), &if_notequal);
12797 CombineFeedback(var_type_feedback,
12798 CompareOperationFeedback::kAny);
12799 }
12800 Goto(&if_notequal);
12801 }
12802 }
12803
12804 BIND(&if_left_boolean)Bind(&if_left_boolean);
12805 {
12806 CombineFeedback(var_type_feedback,
12807 CompareOperationFeedback::kBoolean);
12808
12809 // If {right} is a Boolean too, it must be a different Boolean.
12810 GotoIf(TaggedEqual(right_map, left_map), &if_notequal);
12811
12812 // Otherwise, convert {left} to number and try again.
12813 var_left = LoadObjectField(CAST(left)Cast(left), Oddball::kToNumberOffset);
12814 Goto(&loop);
12815 }
12816 }
12817
12818 BIND(&if_left_symbol)Bind(&if_left_symbol);
12819 {
12820 Label if_right_receiver(this);
12821 GotoIf(IsJSReceiverInstanceType(right_type), &if_right_receiver);
12822 // {right} is not a JSReceiver and also not the same Symbol as {left},
12823 // so the result is "not equal".
12824 if (var_type_feedback != nullptr) {
12825 Label if_right_symbol(this);
12826 GotoIf(IsSymbolInstanceType(right_type), &if_right_symbol);
12827 *var_type_feedback = SmiConstant(CompareOperationFeedback::kAny);
12828 Goto(&if_notequal);
12829
12830 BIND(&if_right_symbol)Bind(&if_right_symbol);
12831 {
12832 CombineFeedback(var_type_feedback,
12833 CompareOperationFeedback::kSymbol);
12834 Goto(&if_notequal);
12835 }
12836 } else {
12837 Goto(&if_notequal);
12838 }
12839
12840 BIND(&if_right_receiver)Bind(&if_right_receiver);
12841 {
12842 // {left} is a Primitive and {right} is a JSReceiver, so swapping
12843 // the order is not observable.
12844 if (var_type_feedback != nullptr) {
12845 *var_type_feedback = SmiConstant(CompareOperationFeedback::kAny);
12846 }
12847 Goto(&use_symmetry);
12848 }
12849 }
12850
12851 BIND(&if_left_receiver)Bind(&if_left_receiver);
12852 {
12853 CSA_DCHECK(this, IsJSReceiverInstanceType(left_type))((void)0);
12854 Label if_right_receiver(this), if_right_not_receiver(this);
12855 Branch(IsJSReceiverInstanceType(right_type), &if_right_receiver,
12856 &if_right_not_receiver);
12857
12858 BIND(&if_right_receiver)Bind(&if_right_receiver);
12859 {
12860 // {left} and {right} are different JSReceiver references.
12861 CombineFeedback(var_type_feedback,
12862 CompareOperationFeedback::kReceiver);
12863 Goto(&if_notequal);
12864 }
12865
12866 BIND(&if_right_not_receiver)Bind(&if_right_not_receiver);
12867 {
12868 // Check if {right} is undetectable, which means it must be Null
12869 // or Undefined, since we already ruled out Receiver for {right}.
12870 Label if_right_undetectable(this),
12871 if_right_not_undetectable(this, Label::kDeferred);
12872 Branch(IsUndetectableMap(right_map), &if_right_undetectable,
12873 &if_right_not_undetectable);
12874
12875 BIND(&if_right_undetectable)Bind(&if_right_undetectable);
12876 {
12877 // When we get here, {right} must be either Null or Undefined.
12878 CSA_DCHECK(this, IsNullOrUndefined(right))((void)0);
12879 if (var_type_feedback != nullptr) {
12880 *var_type_feedback = SmiConstant(
12881 CompareOperationFeedback::kReceiverOrNullOrUndefined);
12882 }
12883 Branch(IsUndetectableMap(left_map), &if_equal, &if_notequal);
12884 }
12885
12886 BIND(&if_right_not_undetectable)Bind(&if_right_not_undetectable);
12887 {
12888 // {right} is a Primitive, and neither Null or Undefined;
12889 // convert {left} to Primitive too.
12890 CombineFeedback(var_type_feedback, CompareOperationFeedback::kAny);
12891 Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
12892 var_left = CallStub(callable, context(), left);
12893 Goto(&loop);
12894 }
12895 }
12896 }
12897 }
12898
12899 BIND(&do_right_stringtonumber)Bind(&do_right_stringtonumber);
12900 {
12901 if (var_type_feedback != nullptr) {
12902 TNode<Map> right_map = LoadMap(CAST(right)Cast(right));
12903 TNode<Uint16T> right_type = LoadMapInstanceType(right_map);
12904 CombineFeedback(var_type_feedback,
12905 CollectFeedbackForString(right_type));
12906 }
12907 var_right = CallBuiltin(Builtin::kStringToNumber, context(), right);
12908 Goto(&loop);
12909 }
12910
12911 BIND(&use_symmetry)Bind(&use_symmetry);
12912 {
12913 var_left = right;
12914 var_right = left;
12915 Goto(&loop);
12916 }
12917 }
12918
12919 BIND(&do_float_comparison)Bind(&do_float_comparison);
12920 {
12921 Branch(Float64Equal(var_left_float.value(), var_right_float.value()),
12922 &if_equal, &if_notequal);
12923 }
12924
12925 BIND(&if_equal)Bind(&if_equal);
12926 {
12927 result = TrueConstant();
12928 Goto(&end);
12929 }
12930
12931 BIND(&if_notequal)Bind(&if_notequal);
12932 {
12933 result = FalseConstant();
12934 Goto(&end);
12935 }
12936
12937 BIND(&end)Bind(&end);
12938 return result.value();
12939}
12940
12941TNode<Oddball> CodeStubAssembler::StrictEqual(
12942 TNode<Object> lhs, TNode<Object> rhs, TVariable<Smi>* var_type_feedback) {
12943 // Pseudo-code for the algorithm below:
12944 //
12945 // if (lhs == rhs) {
12946 // if (lhs->IsHeapNumber()) return HeapNumber::cast(lhs)->value() != NaN;
12947 // return true;
12948 // }
12949 // if (!lhs->IsSmi()) {
12950 // if (lhs->IsHeapNumber()) {
12951 // if (rhs->IsSmi()) {
12952 // return Smi::ToInt(rhs) == HeapNumber::cast(lhs)->value();
12953 // } else if (rhs->IsHeapNumber()) {
12954 // return HeapNumber::cast(rhs)->value() ==
12955 // HeapNumber::cast(lhs)->value();
12956 // } else {
12957 // return false;
12958 // }
12959 // } else {
12960 // if (rhs->IsSmi()) {
12961 // return false;
12962 // } else {
12963 // if (lhs->IsString()) {
12964 // if (rhs->IsString()) {
12965 // return %StringEqual(lhs, rhs);
12966 // } else {
12967 // return false;
12968 // }
12969 // } else if (lhs->IsBigInt()) {
12970 // if (rhs->IsBigInt()) {
12971 // return %BigIntEqualToBigInt(lhs, rhs);
12972 // } else {
12973 // return false;
12974 // }
12975 // } else {
12976 // return false;
12977 // }
12978 // }
12979 // }
12980 // } else {
12981 // if (rhs->IsSmi()) {
12982 // return false;
12983 // } else {
12984 // if (rhs->IsHeapNumber()) {
12985 // return Smi::ToInt(lhs) == HeapNumber::cast(rhs)->value();
12986 // } else {
12987 // return false;
12988 // }
12989 // }
12990 // }
12991
12992 Label if_equal(this), if_notequal(this), if_not_equivalent_types(this),
12993 end(this);
12994 TVARIABLE(Oddball, result)TVariable<Oddball> result(this);
12995
12996 OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kNone);
12997
12998 // Check if {lhs} and {rhs} refer to the same object.
12999 Label if_same(this), if_notsame(this);
13000 Branch(TaggedEqual(lhs, rhs), &if_same, &if_notsame);
13001
13002 BIND(&if_same)Bind(&if_same);
13003 {
13004 // The {lhs} and {rhs} reference the exact same value, yet we need special
13005 // treatment for HeapNumber, as NaN is not equal to NaN.
13006 GenerateEqual_Same(lhs, &if_equal, &if_notequal, var_type_feedback);
13007 }
13008
13009 BIND(&if_notsame)Bind(&if_notsame);
13010 {
13011 // The {lhs} and {rhs} reference different objects, yet for Smi, HeapNumber,
13012 // BigInt and String they can still be considered equal.
13013
13014 // Check if {lhs} is a Smi or a HeapObject.
13015 Label if_lhsissmi(this), if_lhsisnotsmi(this);
13016 Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisnotsmi);
13017
13018 BIND(&if_lhsisnotsmi)Bind(&if_lhsisnotsmi);
13019 {
13020 // Load the map of {lhs}.
13021 TNode<Map> lhs_map = LoadMap(CAST(lhs)Cast(lhs));
13022
13023 // Check if {lhs} is a HeapNumber.
13024 Label if_lhsisnumber(this), if_lhsisnotnumber(this);
13025 Branch(IsHeapNumberMap(lhs_map), &if_lhsisnumber, &if_lhsisnotnumber);
13026
13027 BIND(&if_lhsisnumber)Bind(&if_lhsisnumber);
13028 {
13029 // Check if {rhs} is a Smi or a HeapObject.
13030 Label if_rhsissmi(this), if_rhsisnotsmi(this);
13031 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
13032
13033 BIND(&if_rhsissmi)Bind(&if_rhsissmi);
13034 {
13035 // Convert {lhs} and {rhs} to floating point values.
13036 TNode<Float64T> lhs_value = LoadHeapNumberValue(CAST(lhs)Cast(lhs));
13037 TNode<Float64T> rhs_value = SmiToFloat64(CAST(rhs)Cast(rhs));
13038
13039 CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
13040
13041 // Perform a floating point comparison of {lhs} and {rhs}.
13042 Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
13043 }
13044
13045 BIND(&if_rhsisnotsmi)Bind(&if_rhsisnotsmi);
13046 {
13047 TNode<HeapObject> rhs_ho = CAST(rhs)Cast(rhs);
13048 // Load the map of {rhs}.
13049 TNode<Map> rhs_map = LoadMap(rhs_ho);
13050
13051 // Check if {rhs} is also a HeapNumber.
13052 Label if_rhsisnumber(this), if_rhsisnotnumber(this);
13053 Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
13054
13055 BIND(&if_rhsisnumber)Bind(&if_rhsisnumber);
13056 {
13057 // Convert {lhs} and {rhs} to floating point values.
13058 TNode<Float64T> lhs_value = LoadHeapNumberValue(CAST(lhs)Cast(lhs));
13059 TNode<Float64T> rhs_value = LoadHeapNumberValue(CAST(rhs)Cast(rhs));
13060
13061 CombineFeedback(var_type_feedback,
13062 CompareOperationFeedback::kNumber);
13063
13064 // Perform a floating point comparison of {lhs} and {rhs}.
13065 Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
13066 }
13067
13068 BIND(&if_rhsisnotnumber)Bind(&if_rhsisnotnumber);
13069 Goto(&if_not_equivalent_types);
13070 }
13071 }
13072
13073 BIND(&if_lhsisnotnumber)Bind(&if_lhsisnotnumber);
13074 {
13075 // Check if {rhs} is a Smi or a HeapObject.
13076 Label if_rhsissmi(this), if_rhsisnotsmi(this);
13077 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
13078
13079 BIND(&if_rhsissmi)Bind(&if_rhsissmi);
13080 Goto(&if_not_equivalent_types);
13081
13082 BIND(&if_rhsisnotsmi)Bind(&if_rhsisnotsmi);
13083 {
13084 // Load the instance type of {lhs}.
13085 TNode<Uint16T> lhs_instance_type = LoadMapInstanceType(lhs_map);
13086
13087 // Check if {lhs} is a String.
13088 Label if_lhsisstring(this, Label::kDeferred), if_lhsisnotstring(this);
13089 Branch(IsStringInstanceType(lhs_instance_type), &if_lhsisstring,
13090 &if_lhsisnotstring);
13091
13092 BIND(&if_lhsisstring)Bind(&if_lhsisstring);
13093 {
13094 // Load the instance type of {rhs}.
13095 TNode<Uint16T> rhs_instance_type = LoadInstanceType(CAST(rhs)Cast(rhs));
13096
13097 // Check if {rhs} is also a String.
13098 Label if_rhsisstring(this, Label::kDeferred),
13099 if_rhsisnotstring(this);
13100 Branch(IsStringInstanceType(rhs_instance_type), &if_rhsisstring,
13101 &if_rhsisnotstring);
13102
13103 BIND(&if_rhsisstring)Bind(&if_rhsisstring);
13104 {
13105 if (var_type_feedback != nullptr) {
13106 TNode<Smi> lhs_feedback =
13107 CollectFeedbackForString(lhs_instance_type);
13108 TNode<Smi> rhs_feedback =
13109 CollectFeedbackForString(rhs_instance_type);
13110 *var_type_feedback = SmiOr(lhs_feedback, rhs_feedback);
13111 }
13112 result = CAST(CallBuiltin(Builtin::kStringEqual,Cast(CallBuiltin(Builtin::kStringEqual, NoContextConstant(), lhs
, rhs))
13113 NoContextConstant(), lhs, rhs))Cast(CallBuiltin(Builtin::kStringEqual, NoContextConstant(), lhs
, rhs))
;
13114 Goto(&end);
13115 }
13116
13117 BIND(&if_rhsisnotstring)Bind(&if_rhsisnotstring);
13118 Goto(&if_not_equivalent_types);
13119 }
13120
13121 BIND(&if_lhsisnotstring)Bind(&if_lhsisnotstring);
13122 {
13123 // Check if {lhs} is a BigInt.
13124 Label if_lhsisbigint(this), if_lhsisnotbigint(this);
13125 Branch(IsBigIntInstanceType(lhs_instance_type), &if_lhsisbigint,
13126 &if_lhsisnotbigint);
13127
13128 BIND(&if_lhsisbigint)Bind(&if_lhsisbigint);
13129 {
13130 // Load the instance type of {rhs}.
13131 TNode<Uint16T> rhs_instance_type = LoadInstanceType(CAST(rhs)Cast(rhs));
13132
13133 // Check if {rhs} is also a BigInt.
13134 Label if_rhsisbigint(this, Label::kDeferred),
13135 if_rhsisnotbigint(this);
13136 Branch(IsBigIntInstanceType(rhs_instance_type), &if_rhsisbigint,
13137 &if_rhsisnotbigint);
13138
13139 BIND(&if_rhsisbigint)Bind(&if_rhsisbigint);
13140 {
13141 CombineFeedback(var_type_feedback,
13142 CompareOperationFeedback::kBigInt);
13143 result = CAST(CallRuntime(Runtime::kBigIntEqualToBigInt,Cast(CallRuntime(Runtime::kBigIntEqualToBigInt, NoContextConstant
(), lhs, rhs))
13144 NoContextConstant(), lhs, rhs))Cast(CallRuntime(Runtime::kBigIntEqualToBigInt, NoContextConstant
(), lhs, rhs))
;
13145 Goto(&end);
13146 }
13147
13148 BIND(&if_rhsisnotbigint)Bind(&if_rhsisnotbigint);
13149 Goto(&if_not_equivalent_types);
13150 }
13151
13152 BIND(&if_lhsisnotbigint)Bind(&if_lhsisnotbigint);
13153 if (var_type_feedback != nullptr) {
13154 // Load the instance type of {rhs}.
13155 TNode<Map> rhs_map = LoadMap(CAST(rhs)Cast(rhs));
13156 TNode<Uint16T> rhs_instance_type = LoadMapInstanceType(rhs_map);
13157
13158 Label if_lhsissymbol(this), if_lhsisreceiver(this),
13159 if_lhsisoddball(this);
13160 GotoIf(IsJSReceiverInstanceType(lhs_instance_type),
13161 &if_lhsisreceiver);
13162 GotoIf(IsBooleanMap(lhs_map), &if_not_equivalent_types);
13163 GotoIf(IsOddballInstanceType(lhs_instance_type),
13164 &if_lhsisoddball);
13165 Branch(IsSymbolInstanceType(lhs_instance_type), &if_lhsissymbol,
13166 &if_not_equivalent_types);
13167
13168 BIND(&if_lhsisreceiver)Bind(&if_lhsisreceiver);
13169 {
13170 GotoIf(IsBooleanMap(rhs_map), &if_not_equivalent_types);
13171 OverwriteFeedback(var_type_feedback,
13172 CompareOperationFeedback::kReceiver);
13173 GotoIf(IsJSReceiverInstanceType(rhs_instance_type),
13174 &if_notequal);
13175 OverwriteFeedback(
13176 var_type_feedback,
13177 CompareOperationFeedback::kReceiverOrNullOrUndefined);
13178 GotoIf(IsOddballInstanceType(rhs_instance_type), &if_notequal);
13179 Goto(&if_not_equivalent_types);
13180 }
13181
13182 BIND(&if_lhsisoddball)Bind(&if_lhsisoddball);
13183 {
13184 Label if_lhsisboolean(this), if_lhsisnotboolean(this);
13185 Branch(IsBooleanMap(lhs_map), &if_lhsisboolean,
13186 &if_lhsisnotboolean);
13187
13188 BIND(&if_lhsisboolean)Bind(&if_lhsisboolean);
13189 {
13190 OverwriteFeedback(var_type_feedback,
13191 CompareOperationFeedback::kNumberOrOddball);
13192 GotoIf(IsBooleanMap(rhs_map), &if_notequal);
13193 Goto(&if_not_equivalent_types);
13194 }
13195
13196 BIND(&if_lhsisnotboolean)Bind(&if_lhsisnotboolean);
13197 {
13198 Label if_rhsisheapnumber(this), if_rhsisnotheapnumber(this);
13199
13200 STATIC_ASSERT(LAST_PRIMITIVE_HEAP_OBJECT_TYPE ==static_assert(LAST_PRIMITIVE_HEAP_OBJECT_TYPE == ODDBALL_TYPE
, "LAST_PRIMITIVE_HEAP_OBJECT_TYPE == ODDBALL_TYPE")
13201 ODDBALL_TYPE)static_assert(LAST_PRIMITIVE_HEAP_OBJECT_TYPE == ODDBALL_TYPE
, "LAST_PRIMITIVE_HEAP_OBJECT_TYPE == ODDBALL_TYPE")
;
13202 GotoIf(Int32LessThan(rhs_instance_type,
13203 Int32Constant(ODDBALL_TYPE)),
13204 &if_not_equivalent_types);
13205
13206 Branch(IsHeapNumberMap(rhs_map), &if_rhsisheapnumber,
13207 &if_rhsisnotheapnumber);
13208
13209 BIND(&if_rhsisheapnumber)Bind(&if_rhsisheapnumber);
13210 {
13211 OverwriteFeedback(
13212 var_type_feedback,
13213 CompareOperationFeedback::kNumberOrOddball);
13214 Goto(&if_not_equivalent_types);
13215 }
13216
13217 BIND(&if_rhsisnotheapnumber)Bind(&if_rhsisnotheapnumber);
13218 {
13219 OverwriteFeedback(
13220 var_type_feedback,
13221 CompareOperationFeedback::kReceiverOrNullOrUndefined);
13222 Goto(&if_notequal);
13223 }
13224 }
13225 }
13226
13227 BIND(&if_lhsissymbol)Bind(&if_lhsissymbol);
13228 {
13229 GotoIfNot(IsSymbolInstanceType(rhs_instance_type),
13230 &if_not_equivalent_types);
13231 OverwriteFeedback(var_type_feedback,
13232 CompareOperationFeedback::kSymbol);
13233 Goto(&if_notequal);
13234 }
13235 } else {
13236 Goto(&if_notequal);
13237 }
13238 }
13239 }
13240 }
13241 }
13242
13243 BIND(&if_lhsissmi)Bind(&if_lhsissmi);
13244 {
13245 // We already know that {lhs} and {rhs} are not reference equal, and {lhs}
13246 // is a Smi; so {lhs} and {rhs} can only be strictly equal if {rhs} is a
13247 // HeapNumber with an equal floating point value.
13248
13249 // Check if {rhs} is a Smi or a HeapObject.
13250 Label if_rhsissmi(this), if_rhsisnotsmi(this);
13251 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
13252
13253 BIND(&if_rhsissmi)Bind(&if_rhsissmi);
13254 CombineFeedback(var_type_feedback,
13255 CompareOperationFeedback::kSignedSmall);
13256 Goto(&if_notequal);
13257
13258 BIND(&if_rhsisnotsmi)Bind(&if_rhsisnotsmi);
13259 {
13260 // Load the map of the {rhs}.
13261 TNode<Map> rhs_map = LoadMap(CAST(rhs)Cast(rhs));
13262
13263 // The {rhs} could be a HeapNumber with the same value as {lhs}.
13264 Label if_rhsisnumber(this), if_rhsisnotnumber(this);
13265 Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
13266
13267 BIND(&if_rhsisnumber)Bind(&if_rhsisnumber);
13268 {
13269 // Convert {lhs} and {rhs} to floating point values.
13270 TNode<Float64T> lhs_value = SmiToFloat64(CAST(lhs)Cast(lhs));
13271 TNode<Float64T> rhs_value = LoadHeapNumberValue(CAST(rhs)Cast(rhs));
13272
13273 CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
13274
13275 // Perform a floating point comparison of {lhs} and {rhs}.
13276 Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
13277 }
13278
13279 BIND(&if_rhsisnotnumber)Bind(&if_rhsisnotnumber);
13280 {
13281 TNode<Uint16T> rhs_instance_type = LoadMapInstanceType(rhs_map);
13282 GotoIfNot(IsOddballInstanceType(rhs_instance_type),
13283 &if_not_equivalent_types);
13284 OverwriteFeedback(var_type_feedback,
13285 CompareOperationFeedback::kNumberOrOddball);
13286 Goto(&if_notequal);
13287 }
13288 }
13289 }
13290 }
13291
13292 BIND(&if_equal)Bind(&if_equal);
13293 {
13294 result = TrueConstant();
13295 Goto(&end);
13296 }
13297
13298 BIND(&if_not_equivalent_types)Bind(&if_not_equivalent_types);
13299 {
13300 OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
13301 Goto(&if_notequal);
13302 }
13303
13304 BIND(&if_notequal)Bind(&if_notequal);
13305 {
13306 result = FalseConstant();
13307 Goto(&end);
13308 }
13309
13310 BIND(&end)Bind(&end);
13311 return result.value();
13312}
13313
13314// ECMA#sec-samevalue
13315// This algorithm differs from the Strict Equality Comparison Algorithm in its
13316// treatment of signed zeroes and NaNs.
13317void CodeStubAssembler::BranchIfSameValue(TNode<Object> lhs, TNode<Object> rhs,
13318 Label* if_true, Label* if_false,
13319 SameValueMode mode) {
13320 TVARIABLE(Float64T, var_lhs_value)TVariable<Float64T> var_lhs_value(this);
13321 TVARIABLE(Float64T, var_rhs_value)TVariable<Float64T> var_rhs_value(this);
13322 Label do_fcmp(this);
13323
13324 // Immediately jump to {if_true} if {lhs} == {rhs}, because - unlike
13325 // StrictEqual - SameValue considers two NaNs to be equal.
13326 GotoIf(TaggedEqual(lhs, rhs), if_true);
13327
13328 // Check if the {lhs} is a Smi.
13329 Label if_lhsissmi(this), if_lhsisheapobject(this);
13330 Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisheapobject);
13331
13332 BIND(&if_lhsissmi)Bind(&if_lhsissmi);
13333 {
13334 // Since {lhs} is a Smi, the comparison can only yield true
13335 // iff the {rhs} is a HeapNumber with the same float64 value.
13336 Branch(TaggedIsSmi(rhs), if_false, [&] {
13337 GotoIfNot(IsHeapNumber(CAST(rhs)Cast(rhs)), if_false);
13338 var_lhs_value = SmiToFloat64(CAST(lhs)Cast(lhs));
13339 var_rhs_value = LoadHeapNumberValue(CAST(rhs)Cast(rhs));
13340 Goto(&do_fcmp);
13341 });
13342 }
13343
13344 BIND(&if_lhsisheapobject)Bind(&if_lhsisheapobject);
13345 {
13346 // Check if the {rhs} is a Smi.
13347 Branch(
13348 TaggedIsSmi(rhs),
13349 [&] {
13350 // Since {rhs} is a Smi, the comparison can only yield true
13351 // iff the {lhs} is a HeapNumber with the same float64 value.
13352 GotoIfNot(IsHeapNumber(CAST(lhs)Cast(lhs)), if_false);
13353 var_lhs_value = LoadHeapNumberValue(CAST(lhs)Cast(lhs));
13354 var_rhs_value = SmiToFloat64(CAST(rhs)Cast(rhs));
13355 Goto(&do_fcmp);
13356 },
13357 [&] {
13358 // Now this can only yield true if either both {lhs} and {rhs} are
13359 // HeapNumbers with the same value, or both are Strings with the
13360 // same character sequence, or both are BigInts with the same
13361 // value.
13362 Label if_lhsisheapnumber(this), if_lhsisstring(this),
13363 if_lhsisbigint(this);
13364 const TNode<Map> lhs_map = LoadMap(CAST(lhs)Cast(lhs));
13365 GotoIf(IsHeapNumberMap(lhs_map), &if_lhsisheapnumber);
13366 if (mode != SameValueMode::kNumbersOnly) {
13367 const TNode<Uint16T> lhs_instance_type =
13368 LoadMapInstanceType(lhs_map);
13369 GotoIf(IsStringInstanceType(lhs_instance_type), &if_lhsisstring);
13370 GotoIf(IsBigIntInstanceType(lhs_instance_type), &if_lhsisbigint);
13371 }
13372 Goto(if_false);
13373
13374 BIND(&if_lhsisheapnumber)Bind(&if_lhsisheapnumber);
13375 {
13376 GotoIfNot(IsHeapNumber(CAST(rhs)Cast(rhs)), if_false);
13377 var_lhs_value = LoadHeapNumberValue(CAST(lhs)Cast(lhs));
13378 var_rhs_value = LoadHeapNumberValue(CAST(rhs)Cast(rhs));
13379 Goto(&do_fcmp);
13380 }
13381
13382 if (mode != SameValueMode::kNumbersOnly) {
13383 BIND(&if_lhsisstring)Bind(&if_lhsisstring);
13384 {
13385 // Now we can only yield true if {rhs} is also a String
13386 // with the same sequence of characters.
13387 GotoIfNot(IsString(CAST(rhs)Cast(rhs)), if_false);
13388 const TNode<Object> result = CallBuiltin(
13389 Builtin::kStringEqual, NoContextConstant(), lhs, rhs);
13390 Branch(IsTrue(result), if_true, if_false);
13391 }
13392
13393 BIND(&if_lhsisbigint)Bind(&if_lhsisbigint);
13394 {
13395 GotoIfNot(IsBigInt(CAST(rhs)Cast(rhs)), if_false);
13396 const TNode<Object> result = CallRuntime(
13397 Runtime::kBigIntEqualToBigInt, NoContextConstant(), lhs, rhs);
13398 Branch(IsTrue(result), if_true, if_false);
13399 }
13400 }
13401 });
13402 }
13403
13404 BIND(&do_fcmp)Bind(&do_fcmp);
13405 {
13406 TNode<Float64T> lhs_value = UncheckedCast<Float64T>(var_lhs_value.value());
13407 TNode<Float64T> rhs_value = UncheckedCast<Float64T>(var_rhs_value.value());
13408 BranchIfSameNumberValue(lhs_value, rhs_value, if_true, if_false);
13409 }
13410}
13411
13412void CodeStubAssembler::BranchIfSameNumberValue(TNode<Float64T> lhs_value,
13413 TNode<Float64T> rhs_value,
13414 Label* if_true,
13415 Label* if_false) {
13416 Label if_equal(this), if_notequal(this);
13417 Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
13418
13419 BIND(&if_equal)Bind(&if_equal);
13420 {
13421 // We still need to handle the case when {lhs} and {rhs} are -0.0 and
13422 // 0.0 (or vice versa). Compare the high word to
13423 // distinguish between the two.
13424 const TNode<Uint32T> lhs_hi_word = Float64ExtractHighWord32(lhs_value);
13425 const TNode<Uint32T> rhs_hi_word = Float64ExtractHighWord32(rhs_value);
13426
13427 // If x is +0 and y is -0, return false.
13428 // If x is -0 and y is +0, return false.
13429 Branch(Word32Equal(lhs_hi_word, rhs_hi_word), if_true, if_false);
13430 }
13431
13432 BIND(&if_notequal)Bind(&if_notequal);
13433 {
13434 // Return true iff both {rhs} and {lhs} are NaN.
13435 GotoIf(Float64Equal(lhs_value, lhs_value), if_false);
13436 Branch(Float64Equal(rhs_value, rhs_value), if_false, if_true);
13437 }
13438}
13439
13440TNode<Oddball> CodeStubAssembler::HasProperty(TNode<Context> context,
13441 TNode<Object> object,
13442 TNode<Object> key,
13443 HasPropertyLookupMode mode) {
13444 Label call_runtime(this, Label::kDeferred), return_true(this),
13445 return_false(this), end(this), if_proxy(this, Label::kDeferred);
13446
13447 CodeStubAssembler::LookupPropertyInHolder lookup_property_in_holder =
13448 [this, &return_true](
13449 TNode<HeapObject> receiver, TNode<HeapObject> holder,
13450 TNode<Map> holder_map, TNode<Int32T> holder_instance_type,
13451 TNode<Name> unique_name, Label* next_holder, Label* if_bailout) {
13452 TryHasOwnProperty(holder, holder_map, holder_instance_type, unique_name,
13453 &return_true, next_holder, if_bailout);
13454 };
13455
13456 CodeStubAssembler::LookupElementInHolder lookup_element_in_holder =
13457 [this, &return_true, &return_false](
13458 TNode<HeapObject> receiver, TNode<HeapObject> holder,
13459 TNode<Map> holder_map, TNode<Int32T> holder_instance_type,
13460 TNode<IntPtrT> index, Label* next_holder, Label* if_bailout) {
13461 TryLookupElement(holder, holder_map, holder_instance_type, index,
13462 &return_true, &return_false, next_holder, if_bailout);
13463 };
13464
13465 const bool kHandlePrivateNames = mode == HasPropertyLookupMode::kHasProperty;
13466 TryPrototypeChainLookup(object, object, key, lookup_property_in_holder,
13467 lookup_element_in_holder, &return_false,
13468 &call_runtime, &if_proxy, kHandlePrivateNames);
13469
13470 TVARIABLE(Oddball, result)TVariable<Oddball> result(this);
13471
13472 BIND(&if_proxy)Bind(&if_proxy);
13473 {
13474 TNode<Name> name = CAST(CallBuiltin(Builtin::kToName, context, key))Cast(CallBuiltin(Builtin::kToName, context, key));
13475 switch (mode) {
13476 case kHasProperty:
13477 GotoIf(IsPrivateSymbol(name), &call_runtime);
13478
13479 result = CAST(Cast(CallBuiltin(Builtin::kProxyHasProperty, context, object,
name))
13480 CallBuiltin(Builtin::kProxyHasProperty, context, object, name))Cast(CallBuiltin(Builtin::kProxyHasProperty, context, object,
name))
;
13481 Goto(&end);
13482 break;
13483 case kForInHasProperty:
13484 Goto(&call_runtime);
13485 break;
13486 }
13487 }
13488
13489 BIND(&return_true)Bind(&return_true);
13490 {
13491 result = TrueConstant();
13492 Goto(&end);
13493 }
13494
13495 BIND(&return_false)Bind(&return_false);
13496 {
13497 result = FalseConstant();
13498 Goto(&end);
13499 }
13500
13501 BIND(&call_runtime)Bind(&call_runtime);
13502 {
13503 Runtime::FunctionId fallback_runtime_function_id;
13504 switch (mode) {
13505 case kHasProperty:
13506 fallback_runtime_function_id = Runtime::kHasProperty;
13507 break;
13508 case kForInHasProperty:
13509 fallback_runtime_function_id = Runtime::kForInHasProperty;
13510 break;
13511 }
13512
13513 result =
13514 CAST(CallRuntime(fallback_runtime_function_id, context, object, key))Cast(CallRuntime(fallback_runtime_function_id, context, object
, key))
;
13515 Goto(&end);
13516 }
13517
13518 BIND(&end)Bind(&end);
13519 CSA_DCHECK(this, IsBoolean(result.value()))((void)0);
13520 return result.value();
13521}
13522
13523void CodeStubAssembler::ForInPrepare(TNode<HeapObject> enumerator,
13524 TNode<UintPtrT> slot,
13525 TNode<HeapObject> maybe_feedback_vector,
13526 TNode<FixedArray>* cache_array_out,
13527 TNode<Smi>* cache_length_out,
13528 UpdateFeedbackMode update_feedback_mode) {
13529 // Check if we're using an enum cache.
13530 TVARIABLE(FixedArray, cache_array)TVariable<FixedArray> cache_array(this);
13531 TVARIABLE(Smi, cache_length)TVariable<Smi> cache_length(this);
13532 Label if_fast(this), if_slow(this, Label::kDeferred), out(this);
13533 Branch(IsMap(enumerator), &if_fast, &if_slow);
13534
13535 BIND(&if_fast)Bind(&if_fast);
13536 {
13537 // Load the enumeration length and cache from the {enumerator}.
13538 TNode<Map> map_enumerator = CAST(enumerator)Cast(enumerator);
13539 TNode<WordT> enum_length = LoadMapEnumLength(map_enumerator);
13540 CSA_DCHECK(this, WordNotEqual(enum_length,((void)0)
13541 IntPtrConstant(kInvalidEnumCacheSentinel)))((void)0);
13542 TNode<DescriptorArray> descriptors = LoadMapDescriptors(map_enumerator);
13543 TNode<EnumCache> enum_cache = LoadObjectField<EnumCache>(
13544 descriptors, DescriptorArray::kEnumCacheOffset);
13545 TNode<FixedArray> enum_keys =
13546 LoadObjectField<FixedArray>(enum_cache, EnumCache::kKeysOffset);
13547
13548 // Check if we have enum indices available.
13549 TNode<FixedArray> enum_indices =
13550 LoadObjectField<FixedArray>(enum_cache, EnumCache::kIndicesOffset);
13551 TNode<IntPtrT> enum_indices_length =
13552 LoadAndUntagFixedArrayBaseLength(enum_indices);
13553 TNode<Smi> feedback = SelectSmiConstant(
13554 IntPtrLessThanOrEqual(enum_length, enum_indices_length),
13555 static_cast<int>(ForInFeedback::kEnumCacheKeysAndIndices),
13556 static_cast<int>(ForInFeedback::kEnumCacheKeys));
13557 UpdateFeedback(feedback, maybe_feedback_vector, slot, update_feedback_mode);
13558
13559 cache_array = enum_keys;
13560 cache_length = SmiTag(Signed(enum_length));
13561 Goto(&out);
13562 }
13563
13564 BIND(&if_slow)Bind(&if_slow);
13565 {
13566 // The {enumerator} is a FixedArray with all the keys to iterate.
13567 TNode<FixedArray> array_enumerator = CAST(enumerator)Cast(enumerator);
13568
13569 // Record the fact that we hit the for-in slow-path.
13570 UpdateFeedback(SmiConstant(ForInFeedback::kAny), maybe_feedback_vector,
13571 slot, update_feedback_mode);
13572
13573 cache_array = array_enumerator;
13574 cache_length = LoadFixedArrayBaseLength(array_enumerator);
13575 Goto(&out);
13576 }
13577
13578 BIND(&out)Bind(&out);
13579 *cache_array_out = cache_array.value();
13580 *cache_length_out = cache_length.value();
13581}
13582
13583TNode<String> CodeStubAssembler::Typeof(TNode<Object> value) {
13584 TVARIABLE(String, result_var)TVariable<String> result_var(this);
13585
13586 Label return_number(this, Label::kDeferred), if_oddball(this),
13587 return_function(this), return_undefined(this), return_object(this),
13588 return_string(this), return_bigint(this), return_result(this);
13589
13590 GotoIf(TaggedIsSmi(value), &return_number);
13591
13592 TNode<HeapObject> value_heap_object = CAST(value)Cast(value);
13593 TNode<Map> map = LoadMap(value_heap_object);
13594
13595 GotoIf(IsHeapNumberMap(map), &return_number);
13596
13597 TNode<Uint16T> instance_type = LoadMapInstanceType(map);
13598
13599 GotoIf(InstanceTypeEqual(instance_type, ODDBALL_TYPE), &if_oddball);
13600
13601 TNode<Int32T> callable_or_undetectable_mask =
13602 Word32And(LoadMapBitField(map),
13603 Int32Constant(Map::Bits1::IsCallableBit::kMask |
13604 Map::Bits1::IsUndetectableBit::kMask));
13605
13606 GotoIf(Word32Equal(callable_or_undetectable_mask,
13607 Int32Constant(Map::Bits1::IsCallableBit::kMask)),
13608 &return_function);
13609
13610 GotoIfNot(Word32Equal(callable_or_undetectable_mask, Int32Constant(0)),
13611 &return_undefined);
13612
13613 GotoIf(IsJSReceiverInstanceType(instance_type), &return_object);
13614
13615 GotoIf(IsStringInstanceType(instance_type), &return_string);
13616
13617 GotoIf(IsBigIntInstanceType(instance_type), &return_bigint);
13618
13619 CSA_DCHECK(this, InstanceTypeEqual(instance_type, SYMBOL_TYPE))((void)0);
13620 result_var = HeapConstant(isolate()->factory()->symbol_string());
13621 Goto(&return_result);
13622
13623 BIND(&return_number)Bind(&return_number);
13624 {
13625 result_var = HeapConstant(isolate()->factory()->number_string());
13626 Goto(&return_result);
13627 }
13628
13629 BIND(&if_oddball)Bind(&if_oddball);
13630 {
13631 TNode<String> type =
13632 CAST(LoadObjectField(value_heap_object, Oddball::kTypeOfOffset))Cast(LoadObjectField(value_heap_object, Oddball::kTypeOfOffset
))
;
13633 result_var = type;
13634 Goto(&return_result);
13635 }
13636
13637 BIND(&return_function)Bind(&return_function);
13638 {
13639 result_var = HeapConstant(isolate()->factory()->function_string());
13640 Goto(&return_result);
13641 }
13642
13643 BIND(&return_undefined)Bind(&return_undefined);
13644 {
13645 result_var = HeapConstant(isolate()->factory()->undefined_string());
13646 Goto(&return_result);
13647 }
13648
13649 BIND(&return_object)Bind(&return_object);
13650 {
13651 result_var = HeapConstant(isolate()->factory()->object_string());
13652 Goto(&return_result);
13653 }
13654
13655 BIND(&return_string)Bind(&return_string);
13656 {
13657 result_var = HeapConstant(isolate()->factory()->string_string());
13658 Goto(&return_result);
13659 }
13660
13661 BIND(&return_bigint)Bind(&return_bigint);
13662 {
13663 result_var = HeapConstant(isolate()->factory()->bigint_string());
13664 Goto(&return_result);
13665 }
13666
13667 BIND(&return_result)Bind(&return_result);
13668 return result_var.value();
13669}
13670
13671TNode<HeapObject> CodeStubAssembler::GetSuperConstructor(
13672 TNode<JSFunction> active_function) {
13673 TNode<Map> map = LoadMap(active_function);
13674 return LoadMapPrototype(map);
13675}
13676
13677TNode<JSReceiver> CodeStubAssembler::SpeciesConstructor(
13678 TNode<Context> context, TNode<Object> object,
13679 TNode<JSReceiver> default_constructor) {
13680 Isolate* isolate = this->isolate();
13681 TVARIABLE(JSReceiver, var_result, default_constructor)TVariable<JSReceiver> var_result(default_constructor, this
)
;
13682
13683 // 2. Let C be ? Get(O, "constructor").
13684 TNode<Object> constructor =
13685 GetProperty(context, object, isolate->factory()->constructor_string());
13686
13687 // 3. If C is undefined, return defaultConstructor.
13688 Label out(this);
13689 GotoIf(IsUndefined(constructor), &out);
13690
13691 // 4. If Type(C) is not Object, throw a TypeError exception.
13692 ThrowIfNotJSReceiver(context, constructor,
13693 MessageTemplate::kConstructorNotReceiver, "");
13694
13695 // 5. Let S be ? Get(C, @@species).
13696 TNode<Object> species =
13697 GetProperty(context, constructor, isolate->factory()->species_symbol());
13698
13699 // 6. If S is either undefined or null, return defaultConstructor.
13700 GotoIf(IsNullOrUndefined(species), &out);
13701
13702 // 7. If IsConstructor(S) is true, return S.
13703 Label throw_error(this);
13704 GotoIf(TaggedIsSmi(species), &throw_error);
13705 GotoIfNot(IsConstructorMap(LoadMap(CAST(species)Cast(species))), &throw_error);
13706 var_result = CAST(species)Cast(species);
13707 Goto(&out);
13708
13709 // 8. Throw a TypeError exception.
13710 BIND(&throw_error)Bind(&throw_error);
13711 ThrowTypeError(context, MessageTemplate::kSpeciesNotConstructor);
13712
13713 BIND(&out)Bind(&out);
13714 return var_result.value();
13715}
13716
13717TNode<Oddball> CodeStubAssembler::InstanceOf(TNode<Object> object,
13718 TNode<Object> callable,
13719 TNode<Context> context) {
13720 TVARIABLE(Oddball, var_result)TVariable<Oddball> var_result(this);
13721 Label if_notcallable(this, Label::kDeferred),
13722 if_notreceiver(this, Label::kDeferred), if_otherhandler(this),
13723 if_nohandler(this, Label::kDeferred), return_true(this),
13724 return_false(this), return_result(this, &var_result);
13725
13726 // Ensure that the {callable} is actually a JSReceiver.
13727 GotoIf(TaggedIsSmi(callable), &if_notreceiver);
13728 GotoIfNot(IsJSReceiver(CAST(callable)Cast(callable)), &if_notreceiver);
13729
13730 // Load the @@hasInstance property from {callable}.
13731 TNode<Object> inst_of_handler =
13732 GetProperty(context, callable, HasInstanceSymbolConstant());
13733
13734 // Optimize for the likely case where {inst_of_handler} is the builtin
13735 // Function.prototype[@@hasInstance] method, and emit a direct call in
13736 // that case without any additional checking.
13737 TNode<NativeContext> native_context = LoadNativeContext(context);
13738 TNode<Object> function_has_instance =
13739 LoadContextElement(native_context, Context::FUNCTION_HAS_INSTANCE_INDEX);
13740 GotoIfNot(TaggedEqual(inst_of_handler, function_has_instance),
13741 &if_otherhandler);
13742 {
13743 // Call to Function.prototype[@@hasInstance] directly.
13744 Callable builtin(BUILTIN_CODE(isolate(), FunctionPrototypeHasInstance)(isolate())->builtins()->code_handle(i::Builtin::kFunctionPrototypeHasInstance
)
,
13745 CallTrampolineDescriptor{});
13746 var_result =
13747 CAST(CallJS(builtin, context, inst_of_handler, callable, object))Cast(CallJS(builtin, context, inst_of_handler, callable, object
))
;
13748 Goto(&return_result);
13749 }
13750
13751 BIND(&if_otherhandler)Bind(&if_otherhandler);
13752 {
13753 // Check if there's actually an {inst_of_handler}.
13754 GotoIf(IsNull(inst_of_handler), &if_nohandler);
13755 GotoIf(IsUndefined(inst_of_handler), &if_nohandler);
13756
13757 // Call the {inst_of_handler} for {callable} and {object}.
13758 TNode<Object> result = Call(context, inst_of_handler, callable, object);
13759
13760 // Convert the {result} to a Boolean.
13761 BranchIfToBooleanIsTrue(result, &return_true, &return_false);
13762 }
13763
13764 BIND(&if_nohandler)Bind(&if_nohandler);
13765 {
13766 // Ensure that the {callable} is actually Callable.
13767 GotoIfNot(IsCallable(CAST(callable)Cast(callable)), &if_notcallable);
13768
13769 // Use the OrdinaryHasInstance algorithm.
13770 var_result = CAST(Cast(CallBuiltin(Builtin::kOrdinaryHasInstance, context, callable
, object))
13771 CallBuiltin(Builtin::kOrdinaryHasInstance, context, callable, object))Cast(CallBuiltin(Builtin::kOrdinaryHasInstance, context, callable
, object))
;
13772 Goto(&return_result);
13773 }
13774
13775 BIND(&if_notcallable)Bind(&if_notcallable);
13776 { ThrowTypeError(context, MessageTemplate::kNonCallableInInstanceOfCheck); }
13777
13778 BIND(&if_notreceiver)Bind(&if_notreceiver);
13779 { ThrowTypeError(context, MessageTemplate::kNonObjectInInstanceOfCheck); }
13780
13781 BIND(&return_true)Bind(&return_true);
13782 var_result = TrueConstant();
13783 Goto(&return_result);
13784
13785 BIND(&return_false)Bind(&return_false);
13786 var_result = FalseConstant();
13787 Goto(&return_result);
13788
13789 BIND(&return_result)Bind(&return_result);
13790 return var_result.value();
13791}
13792
13793TNode<Number> CodeStubAssembler::NumberInc(TNode<Number> value) {
13794 TVARIABLE(Number, var_result)TVariable<Number> var_result(this);
13795 TVARIABLE(Float64T, var_finc_value)TVariable<Float64T> var_finc_value(this);
13796 Label if_issmi(this), if_isnotsmi(this), do_finc(this), end(this);
13797 Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi);
13798
13799 BIND(&if_issmi)Bind(&if_issmi);
13800 {
13801 Label if_overflow(this);
13802 TNode<Smi> smi_value = CAST(value)Cast(value);
13803 TNode<Smi> one = SmiConstant(1);
13804 var_result = TrySmiAdd(smi_value, one, &if_overflow);
13805 Goto(&end);
13806
13807 BIND(&if_overflow)Bind(&if_overflow);
13808 {
13809 var_finc_value = SmiToFloat64(smi_value);
13810 Goto(&do_finc);
13811 }
13812 }
13813
13814 BIND(&if_isnotsmi)Bind(&if_isnotsmi);
13815 {
13816 TNode<HeapNumber> heap_number_value = CAST(value)Cast(value);
13817
13818 // Load the HeapNumber value.
13819 var_finc_value = LoadHeapNumberValue(heap_number_value);
13820 Goto(&do_finc);
13821 }
13822
13823 BIND(&do_finc)Bind(&do_finc);
13824 {
13825 TNode<Float64T> finc_value = var_finc_value.value();
13826 TNode<Float64T> one = Float64Constant(1.0);
13827 TNode<Float64T> finc_result = Float64Add(finc_value, one);
13828 var_result = AllocateHeapNumberWithValue(finc_result);
13829 Goto(&end);
13830 }
13831
13832 BIND(&end)Bind(&end);
13833 return var_result.value();
13834}
13835
13836TNode<Number> CodeStubAssembler::NumberDec(TNode<Number> value) {
13837 TVARIABLE(Number, var_result)TVariable<Number> var_result(this);
13838 TVARIABLE(Float64T, var_fdec_value)TVariable<Float64T> var_fdec_value(this);
13839 Label if_issmi(this), if_isnotsmi(this), do_fdec(this), end(this);
13840 Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi);
13841
13842 BIND(&if_issmi)Bind(&if_issmi);
13843 {
13844 TNode<Smi> smi_value = CAST(value)Cast(value);
13845 TNode<Smi> one = SmiConstant(1);
13846 Label if_overflow(this);
13847 var_result = TrySmiSub(smi_value, one, &if_overflow);
13848 Goto(&end);
13849
13850 BIND(&if_overflow)Bind(&if_overflow);
13851 {
13852 var_fdec_value = SmiToFloat64(smi_value);
13853 Goto(&do_fdec);
13854 }
13855 }
13856
13857 BIND(&if_isnotsmi)Bind(&if_isnotsmi);
13858 {
13859 TNode<HeapNumber> heap_number_value = CAST(value)Cast(value);
13860
13861 // Load the HeapNumber value.
13862 var_fdec_value = LoadHeapNumberValue(heap_number_value);
13863 Goto(&do_fdec);
13864 }
13865
13866 BIND(&do_fdec)Bind(&do_fdec);
13867 {
13868 TNode<Float64T> fdec_value = var_fdec_value.value();
13869 TNode<Float64T> minus_one = Float64Constant(-1.0);
13870 TNode<Float64T> fdec_result = Float64Add(fdec_value, minus_one);
13871 var_result = AllocateHeapNumberWithValue(fdec_result);
13872 Goto(&end);
13873 }
13874
13875 BIND(&end)Bind(&end);
13876 return var_result.value();
13877}
13878
13879TNode<Number> CodeStubAssembler::NumberAdd(TNode<Number> a, TNode<Number> b) {
13880 TVARIABLE(Number, var_result)TVariable<Number> var_result(this);
13881 Label float_add(this, Label::kDeferred), end(this);
13882 GotoIf(TaggedIsNotSmi(a), &float_add);
13883 GotoIf(TaggedIsNotSmi(b), &float_add);
13884
13885 // Try fast Smi addition first.
13886 var_result = TrySmiAdd(CAST(a)Cast(a), CAST(b)Cast(b), &float_add);
13887 Goto(&end);
13888
13889 BIND(&float_add)Bind(&float_add);
13890 {
13891 var_result = ChangeFloat64ToTagged(
13892 Float64Add(ChangeNumberToFloat64(a), ChangeNumberToFloat64(b)));
13893 Goto(&end);
13894 }
13895
13896 BIND(&end)Bind(&end);
13897 return var_result.value();
13898}
13899
13900TNode<Number> CodeStubAssembler::NumberSub(TNode<Number> a, TNode<Number> b) {
13901 TVARIABLE(Number, var_result)TVariable<Number> var_result(this);
13902 Label float_sub(this, Label::kDeferred), end(this);
13903 GotoIf(TaggedIsNotSmi(a), &float_sub);
13904 GotoIf(TaggedIsNotSmi(b), &float_sub);
13905
13906 // Try fast Smi subtraction first.
13907 var_result = TrySmiSub(CAST(a)Cast(a), CAST(b)Cast(b), &float_sub);
13908 Goto(&end);
13909
13910 BIND(&float_sub)Bind(&float_sub);
13911 {
13912 var_result = ChangeFloat64ToTagged(
13913 Float64Sub(ChangeNumberToFloat64(a), ChangeNumberToFloat64(b)));
13914 Goto(&end);
13915 }
13916
13917 BIND(&end)Bind(&end);
13918 return var_result.value();
13919}
13920
13921void CodeStubAssembler::GotoIfNotNumber(TNode<Object> input,
13922 Label* is_not_number) {
13923 Label is_number(this);
13924 GotoIf(TaggedIsSmi(input), &is_number);
13925 Branch(IsHeapNumber(CAST(input)Cast(input)), &is_number, is_not_number);
13926 BIND(&is_number)Bind(&is_number);
13927}
13928
13929void CodeStubAssembler::GotoIfNumber(TNode<Object> input, Label* is_number) {
13930 GotoIf(TaggedIsSmi(input), is_number);
13931 GotoIf(IsHeapNumber(CAST(input)Cast(input)), is_number);
13932}
13933
13934TNode<Number> CodeStubAssembler::BitwiseOp(TNode<Word32T> left32,
13935 TNode<Word32T> right32,
13936 Operation bitwise_op) {
13937 switch (bitwise_op) {
13938 case Operation::kBitwiseAnd:
13939 return ChangeInt32ToTagged(Signed(Word32And(left32, right32)));
13940 case Operation::kBitwiseOr:
13941 return ChangeInt32ToTagged(Signed(Word32Or(left32, right32)));
13942 case Operation::kBitwiseXor:
13943 return ChangeInt32ToTagged(Signed(Word32Xor(left32, right32)));
13944 case Operation::kShiftLeft:
13945 if (!Word32ShiftIsSafe()) {
13946 right32 = Word32And(right32, Int32Constant(0x1F));
13947 }
13948 return ChangeInt32ToTagged(Signed(Word32Shl(left32, right32)));
13949 case Operation::kShiftRight:
13950 if (!Word32ShiftIsSafe()) {
13951 right32 = Word32And(right32, Int32Constant(0x1F));
13952 }
13953 return ChangeInt32ToTagged(Signed(Word32Sar(left32, right32)));
13954 case Operation::kShiftRightLogical:
13955 if (!Word32ShiftIsSafe()) {
13956 right32 = Word32And(right32, Int32Constant(0x1F));
13957 }
13958 return ChangeUint32ToTagged(Unsigned(Word32Shr(left32, right32)));
13959 default:
13960 break;
13961 }
13962 UNREACHABLE()V8_Fatal("unreachable code");
13963}
13964
13965TNode<Number> CodeStubAssembler::BitwiseSmiOp(TNode<Smi> left, TNode<Smi> right,
13966 Operation bitwise_op) {
13967 switch (bitwise_op) {
13968 case Operation::kBitwiseAnd:
13969 return SmiAnd(left, right);
13970 case Operation::kBitwiseOr:
13971 return SmiOr(left, right);
13972 case Operation::kBitwiseXor:
13973 return SmiXor(left, right);
13974 // Smi shift left and logical shift rihgt can have (Heap)Number output, so
13975 // perform int32 operation.
13976 case Operation::kShiftLeft:
13977 case Operation::kShiftRightLogical:
13978 return BitwiseOp(SmiToInt32(left), SmiToInt32(right), bitwise_op);
13979 // Arithmetic shift right of a Smi can't overflow to the heap number, so
13980 // perform int32 operation but don't check for overflow.
13981 case Operation::kShiftRight: {
13982 TNode<Int32T> left32 = SmiToInt32(left);
13983 TNode<Int32T> right32 = SmiToInt32(right);
13984 if (!Word32ShiftIsSafe()) {
13985 right32 = Word32And(right32, Int32Constant(0x1F));
13986 }
13987 return ChangeInt32ToTaggedNoOverflow(Word32Sar(left32, right32));
13988 }
13989 default:
13990 break;
13991 }
13992 UNREACHABLE()V8_Fatal("unreachable code");
13993}
13994
13995TNode<JSObject> CodeStubAssembler::AllocateJSIteratorResult(
13996 TNode<Context> context, TNode<Object> value, TNode<Oddball> done) {
13997 CSA_DCHECK(this, IsBoolean(done))((void)0);
13998 TNode<NativeContext> native_context = LoadNativeContext(context);
13999 TNode<Map> map = CAST(Cast(LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX
))
14000 LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX))Cast(LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX
))
;
14001 TNode<HeapObject> result = Allocate(JSIteratorResult::kSize);
14002 StoreMapNoWriteBarrier(result, map);
14003 StoreObjectFieldRoot(result, JSIteratorResult::kPropertiesOrHashOffset,
14004 RootIndex::kEmptyFixedArray);
14005 StoreObjectFieldRoot(result, JSIteratorResult::kElementsOffset,
14006 RootIndex::kEmptyFixedArray);
14007 StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kValueOffset, value);
14008 StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kDoneOffset, done);
14009 return CAST(result)Cast(result);
14010}
14011
14012TNode<JSObject> CodeStubAssembler::AllocateJSIteratorResultForEntry(
14013 TNode<Context> context, TNode<Object> key, TNode<Object> value) {
14014 TNode<NativeContext> native_context = LoadNativeContext(context);
14015 TNode<Smi> length = SmiConstant(2);
14016 int const elements_size = FixedArray::SizeFor(2);
14017 TNode<FixedArray> elements =
14018 UncheckedCast<FixedArray>(Allocate(elements_size));
14019 StoreObjectFieldRoot(elements, FixedArray::kMapOffset,
14020 RootIndex::kFixedArrayMap);
14021 StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset, length);
14022 StoreFixedArrayElement(elements, 0, key);
14023 StoreFixedArrayElement(elements, 1, value);
14024 TNode<Map> array_map = CAST(LoadContextElement(Cast(LoadContextElement( native_context, Context::JS_ARRAY_PACKED_ELEMENTS_MAP_INDEX
))
14025 native_context, Context::JS_ARRAY_PACKED_ELEMENTS_MAP_INDEX))Cast(LoadContextElement( native_context, Context::JS_ARRAY_PACKED_ELEMENTS_MAP_INDEX
))
;
14026 TNode<HeapObject> array = Allocate(JSArray::kHeaderSize);
14027 StoreMapNoWriteBarrier(array, array_map);
14028 StoreObjectFieldRoot(array, JSArray::kPropertiesOrHashOffset,
14029 RootIndex::kEmptyFixedArray);
14030 StoreObjectFieldNoWriteBarrier(array, JSArray::kElementsOffset, elements);
14031 StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
14032 TNode<Map> iterator_map = CAST(Cast(LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX
))
14033 LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX))Cast(LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX
))
;
14034 TNode<HeapObject> result = Allocate(JSIteratorResult::kSize);
14035 StoreMapNoWriteBarrier(result, iterator_map);
14036 StoreObjectFieldRoot(result, JSIteratorResult::kPropertiesOrHashOffset,
14037 RootIndex::kEmptyFixedArray);
14038 StoreObjectFieldRoot(result, JSIteratorResult::kElementsOffset,
14039 RootIndex::kEmptyFixedArray);
14040 StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kValueOffset, array);
14041 StoreObjectFieldRoot(result, JSIteratorResult::kDoneOffset,
14042 RootIndex::kFalseValue);
14043 return CAST(result)Cast(result);
14044}
14045
14046TNode<JSReceiver> CodeStubAssembler::ArraySpeciesCreate(TNode<Context> context,
14047 TNode<Object> o,
14048 TNode<Number> len) {
14049 TNode<JSReceiver> constructor =
14050 CAST(CallRuntime(Runtime::kArraySpeciesConstructor, context, o))Cast(CallRuntime(Runtime::kArraySpeciesConstructor, context, o
))
;
14051 return Construct(context, constructor, len);
14052}
14053
14054void CodeStubAssembler::ThrowIfArrayBufferIsDetached(
14055 TNode<Context> context, TNode<JSArrayBuffer> array_buffer,
14056 const char* method_name) {
14057 Label if_detached(this, Label::kDeferred), if_not_detached(this);
14058 Branch(IsDetachedBuffer(array_buffer), &if_detached, &if_not_detached);
14059 BIND(&if_detached)Bind(&if_detached);
14060 ThrowTypeError(context, MessageTemplate::kDetachedOperation, method_name);
14061 BIND(&if_not_detached)Bind(&if_not_detached);
14062}
14063
14064void CodeStubAssembler::ThrowIfArrayBufferViewBufferIsDetached(
14065 TNode<Context> context, TNode<JSArrayBufferView> array_buffer_view,
14066 const char* method_name) {
14067 TNode<JSArrayBuffer> buffer = LoadJSArrayBufferViewBuffer(array_buffer_view);
14068 ThrowIfArrayBufferIsDetached(context, buffer, method_name);
14069}
14070
14071TNode<RawPtrT> CodeStubAssembler::LoadJSArrayBufferBackingStorePtr(
14072 TNode<JSArrayBuffer> array_buffer) {
14073 return LoadSandboxedPointerFromObject(array_buffer,
14074 JSArrayBuffer::kBackingStoreOffset);
14075}
14076
14077TNode<JSArrayBuffer> CodeStubAssembler::LoadJSArrayBufferViewBuffer(
14078 TNode<JSArrayBufferView> array_buffer_view) {
14079 return LoadObjectField<JSArrayBuffer>(array_buffer_view,
14080 JSArrayBufferView::kBufferOffset);
14081}
14082
14083TNode<UintPtrT> CodeStubAssembler::LoadJSArrayBufferViewByteLength(
14084 TNode<JSArrayBufferView> array_buffer_view) {
14085 return LoadObjectField<UintPtrT>(array_buffer_view,
14086 JSArrayBufferView::kByteLengthOffset);
14087}
14088
14089TNode<UintPtrT> CodeStubAssembler::LoadJSArrayBufferViewByteOffset(
14090 TNode<JSArrayBufferView> array_buffer_view) {
14091 return LoadObjectField<UintPtrT>(array_buffer_view,
14092 JSArrayBufferView::kByteOffsetOffset);
14093}
14094
14095TNode<UintPtrT> CodeStubAssembler::LoadJSTypedArrayLengthAndCheckDetached(
14096 TNode<JSTypedArray> typed_array, Label* detached) {
14097 TVARIABLE(UintPtrT, result)TVariable<UintPtrT> result(this);
14098 TNode<JSArrayBuffer> buffer = LoadJSArrayBufferViewBuffer(typed_array);
14099
14100 Label variable_length(this), fixed_length(this), end(this);
14101 Branch(IsVariableLengthJSArrayBufferView(typed_array), &variable_length,
14102 &fixed_length);
14103 BIND(&variable_length)Bind(&variable_length);
14104 {
14105 result =
14106 LoadVariableLengthJSTypedArrayLength(typed_array, buffer, detached);
14107 Goto(&end);
14108 }
14109
14110 BIND(&fixed_length)Bind(&fixed_length);
14111 {
14112 Label not_detached(this);
14113 Branch(IsDetachedBuffer(buffer), detached, &not_detached);
14114 BIND(&not_detached)Bind(&not_detached);
14115 result = LoadJSTypedArrayLength(typed_array);
14116 Goto(&end);
14117 }
14118 BIND(&end)Bind(&end);
14119 return result.value();
14120}
14121
14122// ES #sec-integerindexedobjectlength
14123TNode<UintPtrT> CodeStubAssembler::LoadVariableLengthJSTypedArrayLength(
14124 TNode<JSTypedArray> array, TNode<JSArrayBuffer> buffer,
14125 Label* detached_or_out_of_bounds) {
14126 // byte_length already takes array's offset into account.
14127 TNode<UintPtrT> byte_length = LoadVariableLengthJSArrayBufferViewByteLength(
14128 array, buffer, detached_or_out_of_bounds);
14129 TNode<IntPtrT> element_size =
14130 RabGsabElementsKindToElementByteSize(LoadElementsKind(array));
14131 return Unsigned(IntPtrDiv(Signed(byte_length), element_size));
14132}
14133
14134TNode<UintPtrT>
14135CodeStubAssembler::LoadVariableLengthJSArrayBufferViewByteLength(
14136 TNode<JSArrayBufferView> array, TNode<JSArrayBuffer> buffer,
14137 Label* detached_or_out_of_bounds) {
14138 Label is_gsab(this), is_rab(this), end(this);
14139 TVARIABLE(UintPtrT, result)TVariable<UintPtrT> result(this);
14140 TNode<UintPtrT> array_byte_offset = LoadJSArrayBufferViewByteOffset(array);
14141
14142 Branch(IsSharedArrayBuffer(buffer), &is_gsab, &is_rab);
14143 BIND(&is_gsab)Bind(&is_gsab);
14144 {
14145 // Non-length-tracking GSAB-backed ArrayBufferViews shouldn't end up here.
14146 CSA_DCHECK(this, IsLengthTrackingJSArrayBufferView(array))((void)0);
14147 // Read the byte length from the BackingStore.
14148 const TNode<ExternalReference> byte_length_function =
14149 ExternalConstant(ExternalReference::gsab_byte_length());
14150 TNode<ExternalReference> isolate_ptr =
14151 ExternalConstant(ExternalReference::isolate_address(isolate()));
14152 TNode<UintPtrT> buffer_byte_length = UncheckedCast<UintPtrT>(
14153 CallCFunction(byte_length_function, MachineType::UintPtr(),
14154 std::make_pair(MachineType::Pointer(), isolate_ptr),
14155 std::make_pair(MachineType::AnyTagged(), buffer)));
14156 // Since the SharedArrayBuffer can't shrink, and we've managed to create
14157 // this JSArrayBufferDataView without throwing an exception, we know that
14158 // buffer_byte_length >= array_byte_offset.
14159 CSA_CHECK(this,(this)->FastCheck(UintPtrGreaterThanOrEqual(buffer_byte_length
, array_byte_offset))
14160 UintPtrGreaterThanOrEqual(buffer_byte_length, array_byte_offset))(this)->FastCheck(UintPtrGreaterThanOrEqual(buffer_byte_length
, array_byte_offset))
;
14161 result = UintPtrSub(buffer_byte_length, array_byte_offset);
14162 Goto(&end);
14163 }
14164
14165 BIND(&is_rab)Bind(&is_rab);
14166 {
14167 GotoIf(IsDetachedBuffer(buffer), detached_or_out_of_bounds);
14168
14169 TNode<UintPtrT> buffer_byte_length = LoadJSArrayBufferByteLength(buffer);
14170
14171 Label is_length_tracking(this), not_length_tracking(this);
14172 Branch(IsLengthTrackingJSArrayBufferView(array), &is_length_tracking,
14173 &not_length_tracking);
14174
14175 BIND(&is_length_tracking)Bind(&is_length_tracking);
14176 {
14177 // The backing RAB might have been shrunk so that the start of the
14178 // TypedArray is already out of bounds.
14179 GotoIfNot(UintPtrLessThanOrEqual(array_byte_offset, buffer_byte_length),
14180 detached_or_out_of_bounds);
14181 result = UintPtrSub(buffer_byte_length, array_byte_offset);
14182 Goto(&end);
14183 }
14184
14185 BIND(&not_length_tracking)Bind(&not_length_tracking);
14186 {
14187 // Check if the backing RAB has shrunk so that the buffer is out of
14188 // bounds.
14189 TNode<UintPtrT> array_byte_length =
14190 LoadJSArrayBufferViewByteLength(array);
14191 GotoIfNot(UintPtrGreaterThanOrEqual(
14192 buffer_byte_length,
14193 UintPtrAdd(array_byte_offset, array_byte_length)),
14194 detached_or_out_of_bounds);
14195 result = array_byte_length;
14196 Goto(&end);
14197 }
14198 }
14199 BIND(&end)Bind(&end);
14200 return result.value();
14201}
14202
14203void CodeStubAssembler::IsJSArrayBufferViewDetachedOrOutOfBounds(
14204 TNode<JSArrayBufferView> array_buffer_view, Label* detached_or_oob,
14205 Label* not_detached_nor_oob) {
14206 TNode<JSArrayBuffer> buffer = LoadJSArrayBufferViewBuffer(array_buffer_view);
14207
14208 GotoIf(IsDetachedBuffer(buffer), detached_or_oob);
14209 GotoIfNot(IsVariableLengthJSArrayBufferView(array_buffer_view),
14210 not_detached_nor_oob);
14211 GotoIf(IsSharedArrayBuffer(buffer), not_detached_nor_oob);
14212
14213 {
14214 TNode<UintPtrT> buffer_byte_length = LoadJSArrayBufferByteLength(buffer);
14215 TNode<UintPtrT> array_byte_offset =
14216 LoadJSArrayBufferViewByteOffset(array_buffer_view);
14217
14218 Label length_tracking(this), not_length_tracking(this);
14219 Branch(IsLengthTrackingJSArrayBufferView(array_buffer_view),
14220 &length_tracking, &not_length_tracking);
14221
14222 BIND(&length_tracking)Bind(&length_tracking);
14223 {
14224 // The backing RAB might have been shrunk so that the start of the
14225 // TypedArray is already out of bounds.
14226 Branch(UintPtrLessThanOrEqual(array_byte_offset, buffer_byte_length),
14227 not_detached_nor_oob, detached_or_oob);
14228 }
14229
14230 BIND(&not_length_tracking)Bind(&not_length_tracking);
14231 {
14232 // Check if the backing RAB has shrunk so that the buffer is out of
14233 // bounds.
14234 TNode<UintPtrT> array_byte_length =
14235 LoadJSArrayBufferViewByteLength(array_buffer_view);
14236 Branch(UintPtrGreaterThanOrEqual(
14237 buffer_byte_length,
14238 UintPtrAdd(array_byte_offset, array_byte_length)),
14239 not_detached_nor_oob, detached_or_oob);
14240 }
14241 }
14242}
14243
14244TNode<BoolT> CodeStubAssembler::IsJSArrayBufferViewDetachedOrOutOfBoundsBoolean(
14245 TNode<JSArrayBufferView> array_buffer_view) {
14246 Label is_detached_or_out_of_bounds(this),
14247 not_detached_nor_out_of_bounds(this), end(this);
14248 TVARIABLE(BoolT, result)TVariable<BoolT> result(this);
14249
14250 IsJSArrayBufferViewDetachedOrOutOfBounds(array_buffer_view,
14251 &is_detached_or_out_of_bounds,
14252 &not_detached_nor_out_of_bounds);
14253 BIND(&is_detached_or_out_of_bounds)Bind(&is_detached_or_out_of_bounds);
14254 {
14255 result = BoolConstant(true);
14256 Goto(&end);
14257 }
14258 BIND(&not_detached_nor_out_of_bounds)Bind(&not_detached_nor_out_of_bounds);
14259 {
14260 result = BoolConstant(false);
14261 Goto(&end);
14262 }
14263 BIND(&end)Bind(&end);
14264 return result.value();
14265}
14266
14267void CodeStubAssembler::CheckJSTypedArrayIndex(
14268 TNode<UintPtrT> index, TNode<JSTypedArray> typed_array,
14269 Label* detached_or_out_of_bounds) {
14270 TNode<UintPtrT> len = LoadJSTypedArrayLengthAndCheckDetached(
14271 typed_array, detached_or_out_of_bounds);
14272
14273 GotoIf(UintPtrGreaterThanOrEqual(index, len), detached_or_out_of_bounds);
14274}
14275
14276// ES #sec-integerindexedobjectbytelength
14277TNode<UintPtrT> CodeStubAssembler::LoadVariableLengthJSTypedArrayByteLength(
14278 TNode<Context> context, TNode<JSTypedArray> array,
14279 TNode<JSArrayBuffer> buffer) {
14280 Label miss(this), end(this);
14281 TVARIABLE(UintPtrT, result)TVariable<UintPtrT> result(this);
14282
14283 TNode<UintPtrT> length =
14284 LoadVariableLengthJSTypedArrayLength(array, buffer, &miss);
14285 TNode<IntPtrT> element_size =
14286 RabGsabElementsKindToElementByteSize(LoadElementsKind(array));
14287 // Conversion to signed is OK since length < JSArrayBuffer::kMaxByteLength.
14288 TNode<IntPtrT> byte_length = IntPtrMul(Signed(length), element_size);
14289 result = Unsigned(byte_length);
14290 Goto(&end);
14291 BIND(&miss)Bind(&miss);
14292 {
14293 result = UintPtrConstant(0);
14294 Goto(&end);
14295 }
14296 BIND(&end)Bind(&end);
14297 return result.value();
14298}
14299
14300TNode<IntPtrT> CodeStubAssembler::RabGsabElementsKindToElementByteSize(
14301 TNode<Int32T> elements_kind) {
14302 TVARIABLE(IntPtrT, result)TVariable<IntPtrT> result(this);
14303 Label elements_8(this), elements_16(this), elements_32(this),
14304 elements_64(this), not_found(this), end(this);
14305 int32_t elements_kinds[] = {
14306 RAB_GSAB_UINT8_ELEMENTS, RAB_GSAB_UINT8_CLAMPED_ELEMENTS,
14307 RAB_GSAB_INT8_ELEMENTS, RAB_GSAB_UINT16_ELEMENTS,
14308 RAB_GSAB_INT16_ELEMENTS, RAB_GSAB_UINT32_ELEMENTS,
14309 RAB_GSAB_INT32_ELEMENTS, RAB_GSAB_FLOAT32_ELEMENTS,
14310 RAB_GSAB_FLOAT64_ELEMENTS, RAB_GSAB_BIGINT64_ELEMENTS,
14311 RAB_GSAB_BIGUINT64_ELEMENTS};
14312 Label* elements_kind_labels[] = {&elements_8, &elements_8, &elements_8,
14313 &elements_16, &elements_16, &elements_32,
14314 &elements_32, &elements_32, &elements_64,
14315 &elements_64, &elements_64};
14316 const size_t kTypedElementsKindCount =
14317 LAST_RAB_GSAB_FIXED_TYPED_ARRAY_ELEMENTS_KIND -
14318 FIRST_RAB_GSAB_FIXED_TYPED_ARRAY_ELEMENTS_KIND + 1;
14319 DCHECK_EQ(kTypedElementsKindCount, arraysize(elements_kinds))((void) 0);
14320 DCHECK_EQ(kTypedElementsKindCount, arraysize(elements_kind_labels))((void) 0);
14321 Switch(elements_kind, &not_found, elements_kinds, elements_kind_labels,
14322 kTypedElementsKindCount);
14323 BIND(&elements_8)Bind(&elements_8);
14324 {
14325 result = IntPtrConstant(1);
14326 Goto(&end);
14327 }
14328 BIND(&elements_16)Bind(&elements_16);
14329 {
14330 result = IntPtrConstant(2);
14331 Goto(&end);
14332 }
14333 BIND(&elements_32)Bind(&elements_32);
14334 {
14335 result = IntPtrConstant(4);
14336 Goto(&end);
14337 }
14338 BIND(&elements_64)Bind(&elements_64);
14339 {
14340 result = IntPtrConstant(8);
14341 Goto(&end);
14342 }
14343 BIND(&not_found)Bind(&not_found);
14344 { Unreachable(); }
14345 BIND(&end)Bind(&end);
14346 return result.value();
14347}
14348
14349TNode<JSArrayBuffer> CodeStubAssembler::GetTypedArrayBuffer(
14350 TNode<Context> context, TNode<JSTypedArray> array) {
14351 Label call_runtime(this), done(this);
14352 TVARIABLE(Object, var_result)TVariable<Object> var_result(this);
14353
14354 GotoIf(IsOnHeapTypedArray(array), &call_runtime);
14355
14356 TNode<JSArrayBuffer> buffer = LoadJSArrayBufferViewBuffer(array);
14357 GotoIf(IsDetachedBuffer(buffer), &call_runtime);
14358 var_result = buffer;
14359 Goto(&done);
14360
14361 BIND(&call_runtime)Bind(&call_runtime);
14362 {
14363 var_result = CallRuntime(Runtime::kTypedArrayGetBuffer, context, array);
14364 Goto(&done);
14365 }
14366
14367 BIND(&done)Bind(&done);
14368 return CAST(var_result.value())Cast(var_result.value());
14369}
14370
14371CodeStubArguments::CodeStubArguments(CodeStubAssembler* assembler,
14372 TNode<IntPtrT> argc, TNode<RawPtrT> fp)
14373 : assembler_(assembler),
14374 argc_(argc),
14375 base_(),
14376 fp_(fp != nullptr ? fp : assembler_->LoadFramePointer()) {
14377 TNode<IntPtrT> offset = assembler_->IntPtrConstant(
14378 (StandardFrameConstants::kFixedSlotCountAboveFp + 1) *
14379 kSystemPointerSize);
14380 DCHECK_NOT_NULL(argc_)((void) 0);
14381 // base_ points to the first argument, not the receiver
14382 // whether present or not.
14383 base_ = assembler_->RawPtrAdd(fp_, offset);
14384}
14385
14386TNode<Object> CodeStubArguments::GetReceiver() const {
14387 intptr_t offset = -kSystemPointerSize;
14388 return assembler_->LoadFullTagged(base_, assembler_->IntPtrConstant(offset));
14389}
14390
14391void CodeStubArguments::SetReceiver(TNode<Object> object) const {
14392 intptr_t offset = -kSystemPointerSize;
14393 assembler_->StoreFullTaggedNoWriteBarrier(
14394 base_, assembler_->IntPtrConstant(offset), object);
14395}
14396
14397TNode<RawPtrT> CodeStubArguments::AtIndexPtr(TNode<IntPtrT> index) const {
14398 TNode<IntPtrT> offset =
14399 assembler_->ElementOffsetFromIndex(index, SYSTEM_POINTER_ELEMENTS, 0);
14400 return assembler_->RawPtrAdd(base_, offset);
14401}
14402
14403TNode<Object> CodeStubArguments::AtIndex(TNode<IntPtrT> index) const {
14404 CSA_DCHECK(assembler_, assembler_->UintPtrOrSmiLessThan(((void)0)
14405 index, GetLengthWithoutReceiver()))((void)0);
14406 return assembler_->LoadFullTagged(AtIndexPtr(index));
14407}
14408
14409TNode<Object> CodeStubArguments::AtIndex(int index) const {
14410 return AtIndex(assembler_->IntPtrConstant(index));
14411}
14412
14413TNode<IntPtrT> CodeStubArguments::GetLengthWithoutReceiver() const {
14414 return assembler_->IntPtrSub(
14415 argc_, assembler_->IntPtrConstant(kJSArgcReceiverSlots));
14416}
14417
14418TNode<IntPtrT> CodeStubArguments::GetLengthWithReceiver() const {
14419 return argc_;
14420}
14421
14422TNode<Object> CodeStubArguments::GetOptionalArgumentValue(
14423 TNode<IntPtrT> index, TNode<Object> default_value) {
14424 CodeStubAssembler::TVariable<Object> result(assembler_);
14425 CodeStubAssembler::Label argument_missing(assembler_),
14426 argument_done(assembler_, &result);
14427
14428 assembler_->GotoIf(
14429 assembler_->UintPtrGreaterThanOrEqual(index, GetLengthWithoutReceiver()),
14430 &argument_missing);
14431 result = AtIndex(index);
14432 assembler_->Goto(&argument_done);
14433
14434 assembler_->BIND(&argument_missing)Bind(&argument_missing);
14435 result = default_value;
14436 assembler_->Goto(&argument_done);
14437
14438 assembler_->BIND(&argument_done)Bind(&argument_done);
14439 return result.value();
14440}
14441
14442void CodeStubArguments::ForEach(
14443 const CodeStubAssembler::VariableList& vars,
14444 const CodeStubArguments::ForEachBodyFunction& body, TNode<IntPtrT> first,
14445 TNode<IntPtrT> last) const {
14446 assembler_->Comment("CodeStubArguments::ForEach");
14447 if (first == nullptr) {
14448 first = assembler_->IntPtrConstant(0);
14449 }
14450 if (last == nullptr) {
14451 last = GetLengthWithoutReceiver();
14452 }
14453 TNode<RawPtrT> start = AtIndexPtr(first);
14454 TNode<RawPtrT> end = AtIndexPtr(last);
14455 const int increment = kSystemPointerSize;
14456 assembler_->BuildFastLoop<RawPtrT>(
14457 vars, start, end,
14458 [&](TNode<RawPtrT> current) {
14459 TNode<Object> arg = assembler_->LoadFullTagged(current);
14460 body(arg);
14461 },
14462 increment, CodeStubAssembler::IndexAdvanceMode::kPost);
14463}
14464
14465void CodeStubArguments::PopAndReturn(TNode<Object> value) {
14466 TNode<IntPtrT> pop_count = GetLengthWithReceiver();
14467 assembler_->PopAndReturn(pop_count, value);
14468}
14469
14470TNode<BoolT> CodeStubAssembler::IsFastElementsKind(
14471 TNode<Int32T> elements_kind) {
14472 STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND)static_assert(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND
, "FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND")
;
14473 return Uint32LessThanOrEqual(elements_kind,
14474 Int32Constant(LAST_FAST_ELEMENTS_KIND));
14475}
14476
14477TNode<BoolT> CodeStubAssembler::IsFastOrNonExtensibleOrSealedElementsKind(
14478 TNode<Int32T> elements_kind) {
14479 STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND)static_assert(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND
, "FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND")
;
14480 STATIC_ASSERT(LAST_FAST_ELEMENTS_KIND + 1 == PACKED_NONEXTENSIBLE_ELEMENTS)static_assert(LAST_FAST_ELEMENTS_KIND + 1 == PACKED_NONEXTENSIBLE_ELEMENTS
, "LAST_FAST_ELEMENTS_KIND + 1 == PACKED_NONEXTENSIBLE_ELEMENTS"
)
;
14481 STATIC_ASSERT(PACKED_NONEXTENSIBLE_ELEMENTS + 1 ==static_assert(PACKED_NONEXTENSIBLE_ELEMENTS + 1 == HOLEY_NONEXTENSIBLE_ELEMENTS
, "PACKED_NONEXTENSIBLE_ELEMENTS + 1 == HOLEY_NONEXTENSIBLE_ELEMENTS"
)
14482 HOLEY_NONEXTENSIBLE_ELEMENTS)static_assert(PACKED_NONEXTENSIBLE_ELEMENTS + 1 == HOLEY_NONEXTENSIBLE_ELEMENTS
, "PACKED_NONEXTENSIBLE_ELEMENTS + 1 == HOLEY_NONEXTENSIBLE_ELEMENTS"
)
;
14483 STATIC_ASSERT(HOLEY_NONEXTENSIBLE_ELEMENTS + 1 == PACKED_SEALED_ELEMENTS)static_assert(HOLEY_NONEXTENSIBLE_ELEMENTS + 1 == PACKED_SEALED_ELEMENTS
, "HOLEY_NONEXTENSIBLE_ELEMENTS + 1 == PACKED_SEALED_ELEMENTS"
)
;
14484 STATIC_ASSERT(PACKED_SEALED_ELEMENTS + 1 == HOLEY_SEALED_ELEMENTS)static_assert(PACKED_SEALED_ELEMENTS + 1 == HOLEY_SEALED_ELEMENTS
, "PACKED_SEALED_ELEMENTS + 1 == HOLEY_SEALED_ELEMENTS")
;
14485 return Uint32LessThanOrEqual(elements_kind,
14486 Int32Constant(HOLEY_SEALED_ELEMENTS));
14487}
14488
14489TNode<BoolT> CodeStubAssembler::IsDoubleElementsKind(
14490 TNode<Int32T> elements_kind) {
14491 STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND)static_assert(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND
, "FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND")
;
14492 STATIC_ASSERT((PACKED_DOUBLE_ELEMENTS & 1) == 0)static_assert((PACKED_DOUBLE_ELEMENTS & 1) == 0, "(PACKED_DOUBLE_ELEMENTS & 1) == 0"
)
;
14493 STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS + 1 == HOLEY_DOUBLE_ELEMENTS)static_assert(PACKED_DOUBLE_ELEMENTS + 1 == HOLEY_DOUBLE_ELEMENTS
, "PACKED_DOUBLE_ELEMENTS + 1 == HOLEY_DOUBLE_ELEMENTS")
;
14494 return Word32Equal(Word32Shr(elements_kind, Int32Constant(1)),
14495 Int32Constant(PACKED_DOUBLE_ELEMENTS / 2));
14496}
14497
14498TNode<BoolT> CodeStubAssembler::IsFastSmiOrTaggedElementsKind(
14499 TNode<Int32T> elements_kind) {
14500 STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND)static_assert(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND
, "FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND")
;
14501 STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS > TERMINAL_FAST_ELEMENTS_KIND)static_assert(PACKED_DOUBLE_ELEMENTS > TERMINAL_FAST_ELEMENTS_KIND
, "PACKED_DOUBLE_ELEMENTS > TERMINAL_FAST_ELEMENTS_KIND")
;
14502 STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS > TERMINAL_FAST_ELEMENTS_KIND)static_assert(HOLEY_DOUBLE_ELEMENTS > TERMINAL_FAST_ELEMENTS_KIND
, "HOLEY_DOUBLE_ELEMENTS > TERMINAL_FAST_ELEMENTS_KIND")
;
14503 return Uint32LessThanOrEqual(elements_kind,
14504 Int32Constant(TERMINAL_FAST_ELEMENTS_KIND));
14505}
14506
14507TNode<BoolT> CodeStubAssembler::IsFastSmiElementsKind(
14508 TNode<Int32T> elements_kind) {
14509 return Uint32LessThanOrEqual(elements_kind,
14510 Int32Constant(HOLEY_SMI_ELEMENTS));
14511}
14512
14513TNode<BoolT> CodeStubAssembler::IsHoleyFastElementsKind(
14514 TNode<Int32T> elements_kind) {
14515 CSA_DCHECK(this, IsFastElementsKind(elements_kind))((void)0);
14516
14517 STATIC_ASSERT(HOLEY_SMI_ELEMENTS == (PACKED_SMI_ELEMENTS | 1))static_assert(HOLEY_SMI_ELEMENTS == (PACKED_SMI_ELEMENTS | 1)
, "HOLEY_SMI_ELEMENTS == (PACKED_SMI_ELEMENTS | 1)")
;
14518 STATIC_ASSERT(HOLEY_ELEMENTS == (PACKED_ELEMENTS | 1))static_assert(HOLEY_ELEMENTS == (PACKED_ELEMENTS | 1), "HOLEY_ELEMENTS == (PACKED_ELEMENTS | 1)"
)
;
14519 STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == (PACKED_DOUBLE_ELEMENTS | 1))static_assert(HOLEY_DOUBLE_ELEMENTS == (PACKED_DOUBLE_ELEMENTS
| 1), "HOLEY_DOUBLE_ELEMENTS == (PACKED_DOUBLE_ELEMENTS | 1)"
)
;
14520 return IsSetWord32(elements_kind, 1);
14521}
14522
14523TNode<BoolT> CodeStubAssembler::IsHoleyFastElementsKindForRead(
14524 TNode<Int32T> elements_kind) {
14525 CSA_DCHECK(this, Uint32LessThanOrEqual(((void)0)
14526 elements_kind,((void)0)
14527 Int32Constant(LAST_ANY_NONEXTENSIBLE_ELEMENTS_KIND)))((void)0);
14528
14529 STATIC_ASSERT(HOLEY_SMI_ELEMENTS == (PACKED_SMI_ELEMENTS | 1))static_assert(HOLEY_SMI_ELEMENTS == (PACKED_SMI_ELEMENTS | 1)
, "HOLEY_SMI_ELEMENTS == (PACKED_SMI_ELEMENTS | 1)")
;
14530 STATIC_ASSERT(HOLEY_ELEMENTS == (PACKED_ELEMENTS | 1))static_assert(HOLEY_ELEMENTS == (PACKED_ELEMENTS | 1), "HOLEY_ELEMENTS == (PACKED_ELEMENTS | 1)"
)
;
14531 STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == (PACKED_DOUBLE_ELEMENTS | 1))static_assert(HOLEY_DOUBLE_ELEMENTS == (PACKED_DOUBLE_ELEMENTS
| 1), "HOLEY_DOUBLE_ELEMENTS == (PACKED_DOUBLE_ELEMENTS | 1)"
)
;
14532 STATIC_ASSERT(HOLEY_NONEXTENSIBLE_ELEMENTS ==static_assert(HOLEY_NONEXTENSIBLE_ELEMENTS == (PACKED_NONEXTENSIBLE_ELEMENTS
| 1), "HOLEY_NONEXTENSIBLE_ELEMENTS == (PACKED_NONEXTENSIBLE_ELEMENTS | 1)"
)
14533 (PACKED_NONEXTENSIBLE_ELEMENTS | 1))static_assert(HOLEY_NONEXTENSIBLE_ELEMENTS == (PACKED_NONEXTENSIBLE_ELEMENTS
| 1), "HOLEY_NONEXTENSIBLE_ELEMENTS == (PACKED_NONEXTENSIBLE_ELEMENTS | 1)"
)
;
14534 STATIC_ASSERT(HOLEY_SEALED_ELEMENTS == (PACKED_SEALED_ELEMENTS | 1))static_assert(HOLEY_SEALED_ELEMENTS == (PACKED_SEALED_ELEMENTS
| 1), "HOLEY_SEALED_ELEMENTS == (PACKED_SEALED_ELEMENTS | 1)"
)
;
14535 STATIC_ASSERT(HOLEY_FROZEN_ELEMENTS == (PACKED_FROZEN_ELEMENTS | 1))static_assert(HOLEY_FROZEN_ELEMENTS == (PACKED_FROZEN_ELEMENTS
| 1), "HOLEY_FROZEN_ELEMENTS == (PACKED_FROZEN_ELEMENTS | 1)"
)
;
14536 return IsSetWord32(elements_kind, 1);
14537}
14538
14539TNode<BoolT> CodeStubAssembler::IsElementsKindGreaterThan(
14540 TNode<Int32T> target_kind, ElementsKind reference_kind) {
14541 return Int32GreaterThan(target_kind, Int32Constant(reference_kind));
14542}
14543
14544TNode<BoolT> CodeStubAssembler::IsElementsKindGreaterThanOrEqual(
14545 TNode<Int32T> target_kind, ElementsKind reference_kind) {
14546 return Int32GreaterThanOrEqual(target_kind, Int32Constant(reference_kind));
14547}
14548
14549TNode<BoolT> CodeStubAssembler::IsElementsKindLessThanOrEqual(
14550 TNode<Int32T> target_kind, ElementsKind reference_kind) {
14551 return Int32LessThanOrEqual(target_kind, Int32Constant(reference_kind));
14552}
14553
14554TNode<BoolT> CodeStubAssembler::IsDebugActive() {
14555 TNode<Uint8T> is_debug_active = Load<Uint8T>(
14556 ExternalConstant(ExternalReference::debug_is_active_address(isolate())));
14557 return Word32NotEqual(is_debug_active, Int32Constant(0));
14558}
14559
14560TNode<BoolT> CodeStubAssembler::IsSideEffectFreeDebuggingActive() {
14561 TNode<Uint8T> debug_execution_mode = Load<Uint8T>(ExternalConstant(
14562 ExternalReference::debug_execution_mode_address(isolate())));
14563
14564 TNode<BoolT> is_active =
14565 Word32Equal(debug_execution_mode,
14566 Int32Constant(DebugInfo::ExecutionMode::kSideEffects));
14567
14568 return is_active;
14569}
14570
14571TNode<BoolT> CodeStubAssembler::HasAsyncEventDelegate() {
14572 const TNode<RawPtrT> async_event_delegate = Load<RawPtrT>(ExternalConstant(
14573 ExternalReference::async_event_delegate_address(isolate())));
14574 return WordNotEqual(async_event_delegate, IntPtrConstant(0));
14575}
14576
14577TNode<Uint32T> CodeStubAssembler::PromiseHookFlags() {
14578 return Load<Uint32T>(ExternalConstant(
14579 ExternalReference::promise_hook_flags_address(isolate())));
14580}
14581
14582TNode<BoolT> CodeStubAssembler::IsAnyPromiseHookEnabled(TNode<Uint32T> flags) {
14583 uint32_t mask = Isolate::PromiseHookFields::HasContextPromiseHook::kMask |
14584 Isolate::PromiseHookFields::HasIsolatePromiseHook::kMask;
14585 return IsSetWord32(flags, mask);
14586}
14587
14588TNode<BoolT> CodeStubAssembler::IsIsolatePromiseHookEnabled(
14589 TNode<Uint32T> flags) {
14590 return IsSetWord32<Isolate::PromiseHookFields::HasIsolatePromiseHook>(flags);
14591}
14592
14593#ifdef V8_ENABLE_JAVASCRIPT_PROMISE_HOOKS1
14594TNode<BoolT> CodeStubAssembler::IsContextPromiseHookEnabled(
14595 TNode<Uint32T> flags) {
14596 return IsSetWord32<Isolate::PromiseHookFields::HasContextPromiseHook>(flags);
14597}
14598#endif
14599
14600TNode<BoolT> CodeStubAssembler::
14601 IsIsolatePromiseHookEnabledOrHasAsyncEventDelegate(TNode<Uint32T> flags) {
14602 uint32_t mask = Isolate::PromiseHookFields::HasIsolatePromiseHook::kMask |
14603 Isolate::PromiseHookFields::HasAsyncEventDelegate::kMask;
14604 return IsSetWord32(flags, mask);
14605}
14606
14607TNode<BoolT> CodeStubAssembler::
14608 IsIsolatePromiseHookEnabledOrDebugIsActiveOrHasAsyncEventDelegate(
14609 TNode<Uint32T> flags) {
14610 uint32_t mask = Isolate::PromiseHookFields::HasIsolatePromiseHook::kMask |
14611 Isolate::PromiseHookFields::HasAsyncEventDelegate::kMask |
14612 Isolate::PromiseHookFields::IsDebugActive::kMask;
14613 return IsSetWord32(flags, mask);
14614}
14615
14616TNode<BoolT> CodeStubAssembler::NeedsAnyPromiseHooks(TNode<Uint32T> flags) {
14617 return Word32NotEqual(flags, Int32Constant(0));
14618}
14619
14620TNode<CodeT> CodeStubAssembler::LoadBuiltin(TNode<Smi> builtin_id) {
14621 CSA_DCHECK(this, SmiBelow(builtin_id, SmiConstant(Builtins::kBuiltinCount)))((void)0);
14622
14623 TNode<IntPtrT> offset =
14624 ElementOffsetFromIndex(SmiToBInt(builtin_id), SYSTEM_POINTER_ELEMENTS);
14625
14626 TNode<ExternalReference> table =
14627 ExternalConstant(ExternalReference::builtins_table(isolate()));
14628
14629 return CAST(BitcastWordToTagged(Load<RawPtrT>(table, offset)))Cast(BitcastWordToTagged(Load<RawPtrT>(table, offset)));
14630}
14631
14632TNode<CodeT> CodeStubAssembler::GetSharedFunctionInfoCode(
14633 TNode<SharedFunctionInfo> shared_info, TVariable<Uint16T>* data_type_out,
14634 Label* if_compile_lazy) {
14635 TNode<Object> sfi_data =
14636 LoadObjectField(shared_info, SharedFunctionInfo::kFunctionDataOffset);
14637
14638 TVARIABLE(CodeT, sfi_code)TVariable<CodeT> sfi_code(this);
14639
14640 Label done(this);
14641 Label check_instance_type(this);
14642
14643 // IsSmi: Is builtin
14644 GotoIf(TaggedIsNotSmi(sfi_data), &check_instance_type);
14645 if (data_type_out) {
14646 *data_type_out = Uint16Constant(0);
14647 }
14648 if (if_compile_lazy) {
14649 GotoIf(SmiEqual(CAST(sfi_data)Cast(sfi_data), SmiConstant(Builtin::kCompileLazy)),
14650 if_compile_lazy);
14651 }
14652 sfi_code = LoadBuiltin(CAST(sfi_data)Cast(sfi_data));
14653 Goto(&done);
14654
14655 // Switch on data's instance type.
14656 BIND(&check_instance_type)Bind(&check_instance_type);
14657 TNode<Uint16T> data_type = LoadInstanceType(CAST(sfi_data)Cast(sfi_data));
14658 if (data_type_out) {
14659 *data_type_out = data_type;
14660 }
14661
14662 int32_t case_values[] = {
14663 BYTECODE_ARRAY_TYPE,
14664 CODET_TYPE,
14665 UNCOMPILED_DATA_WITHOUT_PREPARSE_DATA_TYPE,
14666 UNCOMPILED_DATA_WITH_PREPARSE_DATA_TYPE,
14667 UNCOMPILED_DATA_WITHOUT_PREPARSE_DATA_WITH_JOB_TYPE,
14668 UNCOMPILED_DATA_WITH_PREPARSE_DATA_AND_JOB_TYPE,
14669 FUNCTION_TEMPLATE_INFO_TYPE,
14670#if V8_ENABLE_WEBASSEMBLY1
14671 WASM_CAPI_FUNCTION_DATA_TYPE,
14672 WASM_EXPORTED_FUNCTION_DATA_TYPE,
14673 WASM_JS_FUNCTION_DATA_TYPE,
14674 ASM_WASM_DATA_TYPE,
14675 WASM_ON_FULFILLED_DATA_TYPE,
14676#endif // V8_ENABLE_WEBASSEMBLY
14677 };
14678 Label check_is_bytecode_array(this);
14679 Label check_is_baseline_data(this);
14680 Label check_is_asm_wasm_data(this);
14681 Label check_is_uncompiled_data(this);
14682 Label check_is_function_template_info(this);
14683 Label check_is_interpreter_data(this);
14684 Label check_is_wasm_function_data(this);
14685 Label check_is_wasm_on_fulfilled(this);
14686 Label* case_labels[] = {
14687 &check_is_bytecode_array,
14688 &check_is_baseline_data,
14689 &check_is_uncompiled_data,
14690 &check_is_uncompiled_data,
14691 &check_is_uncompiled_data,
14692 &check_is_uncompiled_data,
14693 &check_is_function_template_info,
14694#if V8_ENABLE_WEBASSEMBLY1
14695 &check_is_wasm_function_data,
14696 &check_is_wasm_function_data,
14697 &check_is_wasm_function_data,
14698 &check_is_asm_wasm_data,
14699 &check_is_wasm_on_fulfilled,
14700#endif // V8_ENABLE_WEBASSEMBLY
14701 };
14702 STATIC_ASSERT(arraysize(case_values) == arraysize(case_labels))static_assert((sizeof(ArraySizeHelper(case_values))) == (sizeof
(ArraySizeHelper(case_labels))), "arraysize(case_values) == arraysize(case_labels)"
)
;
14703 Switch(data_type, &check_is_interpreter_data, case_values, case_labels,
14704 arraysize(case_labels)(sizeof(ArraySizeHelper(case_labels))));
14705
14706 // IsBytecodeArray: Interpret bytecode
14707 BIND(&check_is_bytecode_array)Bind(&check_is_bytecode_array);
14708 sfi_code = HeapConstant(BUILTIN_CODE(isolate(), InterpreterEntryTrampoline)(isolate())->builtins()->code_handle(i::Builtin::kInterpreterEntryTrampoline
)
);
14709 Goto(&done);
14710
14711 // IsBaselineData: Execute baseline code
14712 BIND(&check_is_baseline_data)Bind(&check_is_baseline_data);
14713 {
14714 TNode<CodeT> baseline_code = CAST(sfi_data)Cast(sfi_data);
14715 sfi_code = baseline_code;
14716 Goto(&done);
14717 }
14718
14719 // IsUncompiledDataWithPreparseData | IsUncompiledDataWithoutPreparseData:
14720 // Compile lazy
14721 BIND(&check_is_uncompiled_data)Bind(&check_is_uncompiled_data);
14722 sfi_code = HeapConstant(BUILTIN_CODE(isolate(), CompileLazy)(isolate())->builtins()->code_handle(i::Builtin::kCompileLazy
)
);
14723 Goto(if_compile_lazy ? if_compile_lazy : &done);
14724
14725 // IsFunctionTemplateInfo: API call
14726 BIND(&check_is_function_template_info)Bind(&check_is_function_template_info);
14727 sfi_code = HeapConstant(BUILTIN_CODE(isolate(), HandleApiCall)(isolate())->builtins()->code_handle(i::Builtin::kHandleApiCall
)
);
14728 Goto(&done);
14729
14730 // IsInterpreterData: Interpret bytecode
14731 BIND(&check_is_interpreter_data)Bind(&check_is_interpreter_data);
14732 // This is the default branch, so assert that we have the expected data type.
14733 CSA_DCHECK(this,((void)0)
14734 Word32Equal(data_type, Int32Constant(INTERPRETER_DATA_TYPE)))((void)0);
14735 {
14736 TNode<CodeT> trampoline =
14737 LoadInterpreterDataInterpreterTrampoline(CAST(sfi_data)Cast(sfi_data));
14738 sfi_code = trampoline;
14739 }
14740 Goto(&done);
14741
14742#if V8_ENABLE_WEBASSEMBLY1
14743 // IsWasmFunctionData: Use the wrapper code
14744 BIND(&check_is_wasm_function_data)Bind(&check_is_wasm_function_data);
14745 sfi_code = CAST(LoadObjectField(Cast(LoadObjectField( Cast(sfi_data), WasmExportedFunctionData
::kWrapperCodeOffset))
14746 CAST(sfi_data), WasmExportedFunctionData::kWrapperCodeOffset))Cast(LoadObjectField( Cast(sfi_data), WasmExportedFunctionData
::kWrapperCodeOffset))
;
14747 Goto(&done);
14748
14749 // IsAsmWasmData: Instantiate using AsmWasmData
14750 BIND(&check_is_asm_wasm_data)Bind(&check_is_asm_wasm_data);
14751 sfi_code = HeapConstant(BUILTIN_CODE(isolate(), InstantiateAsmJs)(isolate())->builtins()->code_handle(i::Builtin::kInstantiateAsmJs
)
);
14752 Goto(&done);
14753
14754 // IsWasmOnFulfilledData: Resume the suspended wasm continuation.
14755 BIND(&check_is_wasm_on_fulfilled)Bind(&check_is_wasm_on_fulfilled);
14756 sfi_code = HeapConstant(BUILTIN_CODE(isolate(), WasmResume)(isolate())->builtins()->code_handle(i::Builtin::kWasmResume
)
);
14757 Goto(&done);
14758#endif // V8_ENABLE_WEBASSEMBLY
14759
14760 BIND(&done)Bind(&done);
14761 return sfi_code.value();
14762}
14763
14764TNode<RawPtrT> CodeStubAssembler::GetCodeEntry(TNode<CodeT> code) {
14765#ifdef V8_EXTERNAL_CODE_SPACE
14766 TNode<CodeDataContainer> cdc = CodeDataContainerFromCodeT(code);
14767 return LoadExternalPointerFromObject(
14768 cdc, IntPtrConstant(CodeDataContainer::kCodeEntryPointOffset),
14769 kCodeEntryPointTag);
14770#else
14771 TNode<IntPtrT> object = BitcastTaggedToWord(code);
14772 return ReinterpretCast<RawPtrT>(
14773 IntPtrAdd(object, IntPtrConstant(Code::kHeaderSize - kHeapObjectTag)));
14774#endif
14775}
14776
14777TNode<JSFunction> CodeStubAssembler::AllocateFunctionWithMapAndContext(
14778 TNode<Map> map, TNode<SharedFunctionInfo> shared_info,
14779 TNode<Context> context) {
14780 const TNode<CodeT> code = GetSharedFunctionInfoCode(shared_info);
14781
14782 // TODO(ishell): All the callers of this function pass map loaded from
14783 // Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX. So we can remove
14784 // map parameter.
14785 CSA_DCHECK(this, Word32BinaryNot(IsConstructorMap(map)))((void)0);
14786 CSA_DCHECK(this, Word32BinaryNot(IsFunctionWithPrototypeSlotMap(map)))((void)0);
14787 const TNode<HeapObject> fun = Allocate(JSFunction::kSizeWithoutPrototype);
14788 STATIC_ASSERT(JSFunction::kSizeWithoutPrototype == 7 * kTaggedSize)static_assert(JSFunction::kSizeWithoutPrototype == 7 * kTaggedSize
, "JSFunction::kSizeWithoutPrototype == 7 * kTaggedSize")
;
14789 StoreMapNoWriteBarrier(fun, map);
14790 StoreObjectFieldRoot(fun, JSObject::kPropertiesOrHashOffset,
14791 RootIndex::kEmptyFixedArray);
14792 StoreObjectFieldRoot(fun, JSObject::kElementsOffset,
14793 RootIndex::kEmptyFixedArray);
14794 StoreObjectFieldRoot(fun, JSFunction::kFeedbackCellOffset,
14795 RootIndex::kManyClosuresCell);
14796 StoreObjectFieldNoWriteBarrier(fun, JSFunction::kSharedFunctionInfoOffset,
14797 shared_info);
14798 StoreObjectFieldNoWriteBarrier(fun, JSFunction::kContextOffset, context);
14799 StoreObjectField(fun, JSFunction::kCodeOffset, code);
14800 return CAST(fun)Cast(fun);
14801}
14802
14803void CodeStubAssembler::CheckPrototypeEnumCache(TNode<JSReceiver> receiver,
14804 TNode<Map> receiver_map,
14805 Label* if_fast,
14806 Label* if_slow) {
14807 TVARIABLE(JSReceiver, var_object, receiver)TVariable<JSReceiver> var_object(receiver, this);
14808 TVARIABLE(Map, object_map, receiver_map)TVariable<Map> object_map(receiver_map, this);
14809
14810 Label loop(this, {&var_object, &object_map}), done_loop(this);
14811 Goto(&loop);
14812 BIND(&loop)Bind(&loop);
14813 {
14814 // Check that there are no elements on the current {var_object}.
14815 Label if_no_elements(this);
14816
14817 // The following relies on the elements only aliasing with JSProxy::target,
14818 // which is a JavaScript value and hence cannot be confused with an elements
14819 // backing store.
14820 STATIC_ASSERT(static_cast<int>(JSObject::kElementsOffset) ==static_assert(static_cast<int>(JSObject::kElementsOffset
) == static_cast<int>(JSProxy::kTargetOffset), "static_cast<int>(JSObject::kElementsOffset) == static_cast<int>(JSProxy::kTargetOffset)"
)
14821 static_cast<int>(JSProxy::kTargetOffset))static_assert(static_cast<int>(JSObject::kElementsOffset
) == static_cast<int>(JSProxy::kTargetOffset), "static_cast<int>(JSObject::kElementsOffset) == static_cast<int>(JSProxy::kTargetOffset)"
)
;
14822 TNode<Object> object_elements =
14823 LoadObjectField(var_object.value(), JSObject::kElementsOffset);
14824 GotoIf(IsEmptyFixedArray(object_elements), &if_no_elements);
14825 GotoIf(IsEmptySlowElementDictionary(object_elements), &if_no_elements);
14826
14827 // It might still be an empty JSArray.
14828 GotoIfNot(IsJSArrayMap(object_map.value()), if_slow);
14829 TNode<Number> object_length = LoadJSArrayLength(CAST(var_object.value())Cast(var_object.value()));
14830 Branch(TaggedEqual(object_length, SmiConstant(0)), &if_no_elements,
14831 if_slow);
14832
14833 // Continue with {var_object}'s prototype.
14834 BIND(&if_no_elements)Bind(&if_no_elements);
14835 TNode<HeapObject> object = LoadMapPrototype(object_map.value());
14836 GotoIf(IsNull(object), if_fast);
14837
14838 // For all {object}s but the {receiver}, check that the cache is empty.
14839 var_object = CAST(object)Cast(object);
14840 object_map = LoadMap(object);
14841 TNode<WordT> object_enum_length = LoadMapEnumLength(object_map.value());
14842 Branch(WordEqual(object_enum_length, IntPtrConstant(0)), &loop, if_slow);
14843 }
14844}
14845
14846TNode<Map> CodeStubAssembler::CheckEnumCache(TNode<JSReceiver> receiver,
14847 Label* if_empty,
14848 Label* if_runtime) {
14849 Label if_fast(this), if_cache(this), if_no_cache(this, Label::kDeferred);
14850 TNode<Map> receiver_map = LoadMap(receiver);
14851
14852 // Check if the enum length field of the {receiver} is properly initialized,
14853 // indicating that there is an enum cache.
14854 TNode<WordT> receiver_enum_length = LoadMapEnumLength(receiver_map);
14855 Branch(WordEqual(receiver_enum_length,
14856 IntPtrConstant(kInvalidEnumCacheSentinel)),
14857 &if_no_cache, &if_cache);
14858
14859 BIND(&if_no_cache)Bind(&if_no_cache);
14860 {
14861 // Avoid runtime-call for empty dictionary receivers.
14862 GotoIfNot(IsDictionaryMap(receiver_map), if_runtime);
14863 TNode<Smi> length;
14864 TNode<HeapObject> properties = LoadSlowProperties(receiver);
14865
14866 if (V8_ENABLE_SWISS_NAME_DICTIONARY_BOOLfalse) {
14867 CSA_DCHECK(this, Word32Or(IsSwissNameDictionary(properties),((void)0)
14868 IsGlobalDictionary(properties)))((void)0);
14869
14870 length = Select<Smi>(
14871 IsSwissNameDictionary(properties),
14872 [=] {
14873 return GetNumberOfElements(
14874 UncheckedCast<SwissNameDictionary>(properties));
14875 },
14876 [=] {
14877 return GetNumberOfElements(
14878 UncheckedCast<GlobalDictionary>(properties));
14879 });
14880
14881 } else {
14882 CSA_DCHECK(this, Word32Or(IsNameDictionary(properties),((void)0)
14883 IsGlobalDictionary(properties)))((void)0);
14884 STATIC_ASSERT(static_cast<int>(NameDictionary::kNumberOfElementsIndex) ==static_assert(static_cast<int>(NameDictionary::kNumberOfElementsIndex
) == static_cast<int>(GlobalDictionary::kNumberOfElementsIndex
), "static_cast<int>(NameDictionary::kNumberOfElementsIndex) == static_cast<int>(GlobalDictionary::kNumberOfElementsIndex)"
)
14885 static_cast<int>(GlobalDictionary::kNumberOfElementsIndex))static_assert(static_cast<int>(NameDictionary::kNumberOfElementsIndex
) == static_cast<int>(GlobalDictionary::kNumberOfElementsIndex
), "static_cast<int>(NameDictionary::kNumberOfElementsIndex) == static_cast<int>(GlobalDictionary::kNumberOfElementsIndex)"
)
;
14886 length = GetNumberOfElements(UncheckedCast<HashTableBase>(properties));
14887 }
14888
14889 GotoIfNot(TaggedEqual(length, SmiConstant(0)), if_runtime);
14890 // Check that there are no elements on the {receiver} and its prototype
14891 // chain. Given that we do not create an EnumCache for dict-mode objects,
14892 // directly jump to {if_empty} if there are no elements and no properties
14893 // on the {receiver}.
14894 CheckPrototypeEnumCache(receiver, receiver_map, if_empty, if_runtime);
14895 }
14896
14897 // Check that there are no elements on the fast {receiver} and its
14898 // prototype chain.
14899 BIND(&if_cache)Bind(&if_cache);
14900 CheckPrototypeEnumCache(receiver, receiver_map, &if_fast, if_runtime);
14901
14902 BIND(&if_fast)Bind(&if_fast);
14903 return receiver_map;
14904}
14905
14906TNode<Object> CodeStubAssembler::GetArgumentValue(TorqueStructArguments args,
14907 TNode<IntPtrT> index) {
14908 return CodeStubArguments(this, args).GetOptionalArgumentValue(index);
14909}
14910
14911TorqueStructArguments CodeStubAssembler::GetFrameArguments(
14912 TNode<RawPtrT> frame, TNode<IntPtrT> argc,
14913 FrameArgumentsArgcType argc_type) {
14914 if (argc_type == FrameArgumentsArgcType::kCountExcludesReceiver) {
14915 argc = IntPtrAdd(argc, IntPtrConstant(kJSArgcReceiverSlots));
14916 }
14917 return CodeStubArguments(this, argc, frame).GetTorqueArguments();
14918}
14919
14920void CodeStubAssembler::Print(const char* s) {
14921 std::string formatted(s);
14922 formatted += "\n";
14923 CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
14924 StringConstant(formatted.c_str()));
14925}
14926
14927void CodeStubAssembler::Print(const char* prefix,
14928 TNode<MaybeObject> tagged_value) {
14929 if (prefix != nullptr) {
14930 std::string formatted(prefix);
14931 formatted += ": ";
14932 Handle<String> string = isolate()->factory()->NewStringFromAsciiChecked(
14933 formatted.c_str(), AllocationType::kOld);
14934 CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
14935 HeapConstant(string));
14936 }
14937 // CallRuntime only accepts Objects, so do an UncheckedCast to object.
14938 // DebugPrint explicitly checks whether the tagged value is a MaybeObject.
14939 TNode<Object> arg = UncheckedCast<Object>(tagged_value);
14940 CallRuntime(Runtime::kDebugPrint, NoContextConstant(), arg);
14941}
14942
14943IntegerLiteral CodeStubAssembler::ConstexprIntegerLiteralAdd(
14944 const IntegerLiteral& lhs, const IntegerLiteral& rhs) {
14945 return lhs + rhs;
14946}
14947IntegerLiteral CodeStubAssembler::ConstexprIntegerLiteralLeftShift(
14948 const IntegerLiteral& lhs, const IntegerLiteral& rhs) {
14949 return lhs << rhs;
14950}
14951IntegerLiteral CodeStubAssembler::ConstexprIntegerLiteralBitwiseOr(
14952 const IntegerLiteral& lhs, const IntegerLiteral& rhs) {
14953 return lhs | rhs;
14954}
14955
14956void CodeStubAssembler::PerformStackCheck(TNode<Context> context) {
14957 Label ok(this), stack_check_interrupt(this, Label::kDeferred);
14958
14959 TNode<UintPtrT> stack_limit = UncheckedCast<UintPtrT>(
14960 Load(MachineType::Pointer(),
14961 ExternalConstant(ExternalReference::address_of_jslimit(isolate()))));
14962 TNode<BoolT> sp_within_limit = StackPointerGreaterThan(stack_limit);
14963
14964 Branch(sp_within_limit, &ok, &stack_check_interrupt);
14965
14966 BIND(&stack_check_interrupt)Bind(&stack_check_interrupt);
14967 CallRuntime(Runtime::kStackGuard, context);
14968 Goto(&ok);
14969
14970 BIND(&ok)Bind(&ok);
14971}
14972
14973TNode<Object> CodeStubAssembler::CallApiCallback(
14974 TNode<Object> context, TNode<RawPtrT> callback, TNode<IntPtrT> argc,
14975 TNode<Object> data, TNode<Object> holder, TNode<Object> receiver) {
14976 Callable callable = CodeFactory::CallApiCallback(isolate());
14977 return CallStub(callable, context, callback, argc, data, holder, receiver);
14978}
14979
14980TNode<Object> CodeStubAssembler::CallApiCallback(
14981 TNode<Object> context, TNode<RawPtrT> callback, TNode<IntPtrT> argc,
14982 TNode<Object> data, TNode<Object> holder, TNode<Object> receiver,
14983 TNode<Object> value) {
14984 Callable callable = CodeFactory::CallApiCallback(isolate());
14985 return CallStub(callable, context, callback, argc, data, holder, receiver,
14986 value);
14987}
14988
14989TNode<Object> CodeStubAssembler::CallRuntimeNewArray(
14990 TNode<Context> context, TNode<Object> receiver, TNode<Object> length,
14991 TNode<Object> new_target, TNode<Object> allocation_site) {
14992 // Runtime_NewArray receives arguments in the JS order (to avoid unnecessary
14993 // copy). Except the last two (new_target and allocation_site) which are add
14994 // on top of the stack later.
14995 return CallRuntime(Runtime::kNewArray, context, length, receiver, new_target,
14996 allocation_site);
14997}
14998
14999void CodeStubAssembler::TailCallRuntimeNewArray(TNode<Context> context,
15000 TNode<Object> receiver,
15001 TNode<Object> length,
15002 TNode<Object> new_target,
15003 TNode<Object> allocation_site) {
15004 // Runtime_NewArray receives arguments in the JS order (to avoid unnecessary
15005 // copy). Except the last two (new_target and allocation_site) which are add
15006 // on top of the stack later.
15007 return TailCallRuntime(Runtime::kNewArray, context, length, receiver,
15008 new_target, allocation_site);
15009}
15010
15011TNode<JSArray> CodeStubAssembler::ArrayCreate(TNode<Context> context,
15012 TNode<Number> length) {
15013 TVARIABLE(JSArray, array)TVariable<JSArray> array(this);
15014 Label allocate_js_array(this);
15015
15016 Label done(this), next(this), runtime(this, Label::kDeferred);
15017 TNode<Smi> limit = SmiConstant(JSArray::kInitialMaxFastElementArray);
15018 CSA_DCHECK_BRANCH(this, [=](Label* ok, Label* not_ok) {((void)0)
15019 BranchIfNumberRelationalComparison(Operation::kGreaterThanOrEqual, length,((void)0)
15020 SmiConstant(0), ok, not_ok);((void)0)
15021 })((void)0);
15022 // This check also transitively covers the case where length is too big
15023 // to be representable by a SMI and so is not usable with
15024 // AllocateJSArray.
15025 BranchIfNumberRelationalComparison(Operation::kGreaterThanOrEqual, length,
15026 limit, &runtime, &next);
15027
15028 BIND(&runtime)Bind(&runtime);
15029 {
15030 TNode<NativeContext> native_context = LoadNativeContext(context);
15031 TNode<JSFunction> array_function =
15032 CAST(LoadContextElement(native_context, Context::ARRAY_FUNCTION_INDEX))Cast(LoadContextElement(native_context, Context::ARRAY_FUNCTION_INDEX
))
;
15033 array = CAST(CallRuntimeNewArray(context, array_function, length,Cast(CallRuntimeNewArray(context, array_function, length, array_function
, UndefinedConstant()))
15034 array_function, UndefinedConstant()))Cast(CallRuntimeNewArray(context, array_function, length, array_function
, UndefinedConstant()))
;
15035 Goto(&done);
15036 }
15037
15038 BIND(&next)Bind(&next);
15039 TNode<Smi> length_smi = CAST(length)Cast(length);
15040
15041 TNode<Map> array_map = CAST(LoadContextElement(Cast(LoadContextElement( context, Context::JS_ARRAY_PACKED_SMI_ELEMENTS_MAP_INDEX
))
15042 context, Context::JS_ARRAY_PACKED_SMI_ELEMENTS_MAP_INDEX))Cast(LoadContextElement( context, Context::JS_ARRAY_PACKED_SMI_ELEMENTS_MAP_INDEX
))
;
15043
15044 // TODO(delphick): Consider using
15045 // AllocateUninitializedJSArrayWithElements to avoid initializing an
15046 // array and then writing over it.
15047 array = AllocateJSArray(PACKED_SMI_ELEMENTS, array_map, length_smi,
15048 SmiConstant(0));
15049 Goto(&done);
15050
15051 BIND(&done)Bind(&done);
15052 return array.value();
15053}
15054
15055void CodeStubAssembler::SetPropertyLength(TNode<Context> context,
15056 TNode<Object> array,
15057 TNode<Number> length) {
15058 SetPropertyStrict(context, array, CodeStubAssembler::LengthStringConstant(),
15059 length);
15060}
15061
15062TNode<Smi> CodeStubAssembler::RefillMathRandom(
15063 TNode<NativeContext> native_context) {
15064 // Cache exhausted, populate the cache. Return value is the new index.
15065 const TNode<ExternalReference> refill_math_random =
15066 ExternalConstant(ExternalReference::refill_math_random());
15067 const TNode<ExternalReference> isolate_ptr =
15068 ExternalConstant(ExternalReference::isolate_address(isolate()));
15069 MachineType type_tagged = MachineType::AnyTagged();
15070 MachineType type_ptr = MachineType::Pointer();
15071
15072 return CAST(CallCFunction(refill_math_random, type_tagged,Cast(CallCFunction(refill_math_random, type_tagged, std::make_pair
(type_ptr, isolate_ptr), std::make_pair(type_tagged, native_context
)))
15073 std::make_pair(type_ptr, isolate_ptr),Cast(CallCFunction(refill_math_random, type_tagged, std::make_pair
(type_ptr, isolate_ptr), std::make_pair(type_tagged, native_context
)))
15074 std::make_pair(type_tagged, native_context)))Cast(CallCFunction(refill_math_random, type_tagged, std::make_pair
(type_ptr, isolate_ptr), std::make_pair(type_tagged, native_context
)))
;
15075}
15076
15077TNode<String> CodeStubAssembler::TaggedToDirectString(TNode<Object> value,
15078 Label* fail) {
15079 ToDirectStringAssembler to_direct(state(), CAST(value)Cast(value));
15080 to_direct.TryToDirect(fail);
15081 to_direct.PointerToData(fail);
15082 return CAST(value)Cast(value);
15083}
15084
15085void CodeStubAssembler::RemoveFinalizationRegistryCellFromUnregisterTokenMap(
15086 TNode<JSFinalizationRegistry> finalization_registry,
15087 TNode<WeakCell> weak_cell) {
15088 const TNode<ExternalReference> remove_cell = ExternalConstant(
15089 ExternalReference::
15090 js_finalization_registry_remove_cell_from_unregister_token_map());
15091 const TNode<ExternalReference> isolate_ptr =
15092 ExternalConstant(ExternalReference::isolate_address(isolate()));
15093
15094 CallCFunction(remove_cell, MachineType::Pointer(),
15095 std::make_pair(MachineType::Pointer(), isolate_ptr),
15096 std::make_pair(MachineType::AnyTagged(), finalization_registry),
15097 std::make_pair(MachineType::AnyTagged(), weak_cell));
15098}
15099
15100PrototypeCheckAssembler::PrototypeCheckAssembler(
15101 compiler::CodeAssemblerState* state, Flags flags,
15102 TNode<NativeContext> native_context, TNode<Map> initial_prototype_map,
15103 base::Vector<DescriptorIndexNameValue> properties)
15104 : CodeStubAssembler(state),
15105 flags_(flags),
15106 native_context_(native_context),
15107 initial_prototype_map_(initial_prototype_map),
15108 properties_(properties) {}
15109
15110void PrototypeCheckAssembler::CheckAndBranch(TNode<HeapObject> prototype,
15111 Label* if_unmodified,
15112 Label* if_modified) {
15113 TNode<Map> prototype_map = LoadMap(prototype);
15114 TNode<DescriptorArray> descriptors = LoadMapDescriptors(prototype_map);
15115
15116 // The continuation of a failed fast check: if property identity checks are
15117 // enabled, we continue there (since they may still classify the prototype as
15118 // fast), otherwise we bail out.
15119 Label property_identity_check(this, Label::kDeferred);
15120 Label* if_fast_check_failed =
15121 ((flags_ & kCheckPrototypePropertyIdentity) == 0)
15122 ? if_modified
15123 : &property_identity_check;
15124
15125 if ((flags_ & kCheckPrototypePropertyConstness) != 0) {
15126 // A simple prototype map identity check. Note that map identity does not
15127 // guarantee unmodified properties. It does guarantee that no new properties
15128 // have been added, or old properties deleted.
15129
15130 GotoIfNot(TaggedEqual(prototype_map, initial_prototype_map_),
15131 if_fast_check_failed);
15132
15133 // We need to make sure that relevant properties in the prototype have
15134 // not been tampered with. We do this by checking that their slots
15135 // in the prototype's descriptor array are still marked as const.
15136
15137 TNode<Uint32T> combined_details;
15138 for (int i = 0; i < properties_.length(); i++) {
15139 // Assert the descriptor index is in-bounds.
15140 int descriptor = properties_[i].descriptor_index;
15141 CSA_DCHECK(this, Int32LessThan(Int32Constant(descriptor),((void)0)
15142 LoadNumberOfDescriptors(descriptors)))((void)0);
15143
15144 // Assert that the name is correct. This essentially checks that
15145 // the descriptor index corresponds to the insertion order in
15146 // the bootstrapper.
15147 CSA_DCHECK(((void)0)
15148 this,((void)0)
15149 TaggedEqual(LoadKeyByDescriptorEntry(descriptors, descriptor),((void)0)
15150 CodeAssembler::LoadRoot(properties_[i].name_root_index)))((void)0);
15151
15152 TNode<Uint32T> details =
15153 DescriptorArrayGetDetails(descriptors, Uint32Constant(descriptor));
15154
15155 if (i == 0) {
15156 combined_details = details;
15157 } else {
15158 combined_details = Word32And(combined_details, details);
15159 }
15160 }
15161
15162 TNode<Uint32T> constness =
15163 DecodeWord32<PropertyDetails::ConstnessField>(combined_details);
15164
15165 Branch(
15166 Word32Equal(constness,
15167 Int32Constant(static_cast<int>(PropertyConstness::kConst))),
15168 if_unmodified, if_fast_check_failed);
15169 }
15170
15171 if ((flags_ & kCheckPrototypePropertyIdentity) != 0) {
15172 // The above checks have failed, for whatever reason (maybe the prototype
15173 // map has changed, or a property is no longer const). This block implements
15174 // a more thorough check that can also accept maps which 1. do not have the
15175 // initial map, 2. have mutable relevant properties, but 3. still match the
15176 // expected value for all relevant properties.
15177
15178 BIND(&property_identity_check)Bind(&property_identity_check);
15179
15180 int max_descriptor_index = -1;
15181 for (int i = 0; i < properties_.length(); i++) {
15182 max_descriptor_index =
15183 std::max(max_descriptor_index, properties_[i].descriptor_index);
15184 }
15185
15186 // If the greatest descriptor index is out of bounds, the map cannot be
15187 // fast.
15188 GotoIfNot(Int32LessThan(Int32Constant(max_descriptor_index),
15189 LoadNumberOfDescriptors(descriptors)),
15190 if_modified);
15191
15192 // Logic below only handles maps with fast properties.
15193 GotoIfMapHasSlowProperties(prototype_map, if_modified);
15194
15195 for (int i = 0; i < properties_.length(); i++) {
15196 const DescriptorIndexNameValue& p = properties_[i];
15197 const int descriptor = p.descriptor_index;
15198
15199 // Check if the name is correct. This essentially checks that
15200 // the descriptor index corresponds to the insertion order in
15201 // the bootstrapper.
15202 GotoIfNot(TaggedEqual(LoadKeyByDescriptorEntry(descriptors, descriptor),
15203 CodeAssembler::LoadRoot(p.name_root_index)),
15204 if_modified);
15205
15206 // Finally, check whether the actual value equals the expected value.
15207 TNode<Uint32T> details =
15208 DescriptorArrayGetDetails(descriptors, Uint32Constant(descriptor));
15209 TVARIABLE(Uint32T, var_details, details)TVariable<Uint32T> var_details(details, this);
15210 TVARIABLE(Object, var_value)TVariable<Object> var_value(this);
15211
15212 const int key_index = DescriptorArray::ToKeyIndex(descriptor);
15213 LoadPropertyFromFastObject(prototype, prototype_map, descriptors,
15214 IntPtrConstant(key_index), &var_details,
15215 &var_value);
15216
15217 TNode<Object> actual_value = var_value.value();
15218 TNode<Object> expected_value =
15219 LoadContextElement(native_context_, p.expected_value_context_index);
15220 GotoIfNot(TaggedEqual(actual_value, expected_value), if_modified);
15221 }
15222
15223 Goto(if_unmodified);
15224 }
15225}
15226
15227//
15228// Begin of SwissNameDictionary macros
15229//
15230
15231namespace {
15232
15233// Provides load and store functions that abstract over the details of accessing
15234// the meta table in memory. Instead they allow using logical indices that are
15235// independent from the underlying entry size in the meta table of a
15236// SwissNameDictionary.
15237class MetaTableAccessor {
15238 public:
15239 MetaTableAccessor(CodeStubAssembler& csa, MachineType mt)
15240 : csa{csa}, mt{mt} {}
15241
15242 TNode<Uint32T> Load(TNode<ByteArray> meta_table, TNode<IntPtrT> index) {
15243 TNode<IntPtrT> offset = OverallOffset(meta_table, index);
15244
15245 return csa.UncheckedCast<Uint32T>(
15246 csa.LoadFromObject(mt, meta_table, offset));
15247 }
15248
15249 TNode<Uint32T> Load(TNode<ByteArray> meta_table, int index) {
15250 return Load(meta_table, csa.IntPtrConstant(index));
15251 }
15252
15253 void Store(TNode<ByteArray> meta_table, TNode<IntPtrT> index,
15254 TNode<Uint32T> data) {
15255 TNode<IntPtrT> offset = OverallOffset(meta_table, index);
15256
15257#ifdef DEBUG
15258 int bits = mt.MemSize() * 8;
15259 TNode<UintPtrT> max_value = csa.UintPtrConstant((1ULL << bits) - 1);
15260
15261 CSA_DCHECK(&csa, csa.UintPtrLessThanOrEqual(csa.ChangeUint32ToWord(data),((void)0)
15262 max_value))((void)0);
15263#endif
15264
15265 csa.StoreToObject(mt.representation(), meta_table, offset, data,
15266 StoreToObjectWriteBarrier::kNone);
15267 }
15268
15269 void Store(TNode<ByteArray> meta_table, int index, TNode<Uint32T> data) {
15270 Store(meta_table, csa.IntPtrConstant(index), data);
15271 }
15272
15273 private:
15274 TNode<IntPtrT> OverallOffset(TNode<ByteArray> meta_table,
15275 TNode<IntPtrT> index) {
15276 // TODO(v8:11330): consider using ElementOffsetFromIndex().
15277
15278 int offset_to_data_minus_tag = ByteArray::kHeaderSize - kHeapObjectTag;
15279
15280 TNode<IntPtrT> overall_offset;
15281 int size = mt.MemSize();
15282 intptr_t constant;
15283 if (csa.TryToIntPtrConstant(index, &constant)) {
15284 intptr_t index_offset = constant * size;
15285 overall_offset =
15286 csa.IntPtrConstant(offset_to_data_minus_tag + index_offset);
15287 } else {
15288 TNode<IntPtrT> index_offset =
15289 csa.IntPtrMul(index, csa.IntPtrConstant(size));
15290 overall_offset = csa.IntPtrAdd(
15291 csa.IntPtrConstant(offset_to_data_minus_tag), index_offset);
15292 }
15293
15294#ifdef DEBUG
15295 TNode<IntPtrT> byte_array_data_bytes =
15296 csa.SmiToIntPtr(csa.LoadFixedArrayBaseLength(meta_table));
15297 TNode<IntPtrT> max_allowed_offset = csa.IntPtrAdd(
15298 byte_array_data_bytes, csa.IntPtrConstant(offset_to_data_minus_tag));
15299 CSA_DCHECK(&csa, csa.UintPtrLessThan(overall_offset, max_allowed_offset))((void)0);
15300#endif
15301
15302 return overall_offset;
15303 }
15304
15305 CodeStubAssembler& csa;
15306 MachineType mt;
15307};
15308
15309// Type of functions that given a MetaTableAccessor, use its load and store
15310// functions to generate code for operating on the meta table.
15311using MetaTableAccessFunction = std::function<void(MetaTableAccessor&)>;
15312
15313// Helper function for macros operating on the meta table of a
15314// SwissNameDictionary. Given a MetaTableAccessFunction, generates branching
15315// code and uses the builder to generate code for each of the three possible
15316// sizes per entry a meta table can have.
15317void GenerateMetaTableAccess(CodeStubAssembler* csa, TNode<IntPtrT> capacity,
15318 MetaTableAccessFunction builder) {
15319 MetaTableAccessor mta8 = MetaTableAccessor(*csa, MachineType::Uint8());
15320 MetaTableAccessor mta16 = MetaTableAccessor(*csa, MachineType::Uint16());
15321 MetaTableAccessor mta32 = MetaTableAccessor(*csa, MachineType::Uint32());
15322
15323 using Label = compiler::CodeAssemblerLabel;
15324 Label small(csa), medium(csa), done(csa);
15325
15326 csa->GotoIf(
15327 csa->IntPtrLessThanOrEqual(
15328 capacity,
15329 csa->IntPtrConstant(SwissNameDictionary::kMax1ByteMetaTableCapacity)),
15330 &small);
15331 csa->GotoIf(
15332 csa->IntPtrLessThanOrEqual(
15333 capacity,
15334 csa->IntPtrConstant(SwissNameDictionary::kMax2ByteMetaTableCapacity)),
15335 &medium);
15336
15337 builder(mta32);
15338 csa->Goto(&done);
15339
15340 csa->Bind(&medium);
15341 builder(mta16);
15342 csa->Goto(&done);
15343
15344 csa->Bind(&small);
15345 builder(mta8);
15346 csa->Goto(&done);
15347 csa->Bind(&done);
15348}
15349
15350} // namespace
15351
15352TNode<IntPtrT> CodeStubAssembler::LoadSwissNameDictionaryNumberOfElements(
15353 TNode<SwissNameDictionary> table, TNode<IntPtrT> capacity) {
15354 TNode<ByteArray> meta_table = LoadSwissNameDictionaryMetaTable(table);
15355
15356 TVARIABLE(Uint32T, nof, Uint32Constant(0))TVariable<Uint32T> nof(Uint32Constant(0), this);
15357 MetaTableAccessFunction builder = [&](MetaTableAccessor& mta) {
15358 nof = mta.Load(meta_table,
15359 SwissNameDictionary::kMetaTableElementCountFieldIndex);
15360 };
15361
15362 GenerateMetaTableAccess(this, capacity, builder);
15363 return ChangeInt32ToIntPtr(nof.value());
15364}
15365
15366TNode<IntPtrT>
15367CodeStubAssembler::LoadSwissNameDictionaryNumberOfDeletedElements(
15368 TNode<SwissNameDictionary> table, TNode<IntPtrT> capacity) {
15369 TNode<ByteArray> meta_table = LoadSwissNameDictionaryMetaTable(table);
15370
15371 TVARIABLE(Uint32T, nod, Uint32Constant(0))TVariable<Uint32T> nod(Uint32Constant(0), this);
15372 MetaTableAccessFunction builder = [&](MetaTableAccessor& mta) {
15373 nod =
15374 mta.Load(meta_table,
15375 SwissNameDictionary::kMetaTableDeletedElementCountFieldIndex);
15376 };
15377
15378 GenerateMetaTableAccess(this, capacity, builder);
15379 return ChangeInt32ToIntPtr(nod.value());
15380}
15381
15382void CodeStubAssembler::StoreSwissNameDictionaryEnumToEntryMapping(
15383 TNode<SwissNameDictionary> table, TNode<IntPtrT> capacity,
15384 TNode<IntPtrT> enum_index, TNode<Int32T> entry) {
15385 TNode<ByteArray> meta_table = LoadSwissNameDictionaryMetaTable(table);
15386 TNode<IntPtrT> meta_table_index = IntPtrAdd(
15387 IntPtrConstant(SwissNameDictionary::kMetaTableEnumerationDataStartIndex),
15388 enum_index);
15389
15390 MetaTableAccessFunction builder = [&](MetaTableAccessor& mta) {
15391 mta.Store(meta_table, meta_table_index, Unsigned(entry));
15392 };
15393
15394 GenerateMetaTableAccess(this, capacity, builder);
15395}
15396
15397TNode<Uint32T>
15398CodeStubAssembler::SwissNameDictionaryIncreaseElementCountOrBailout(
15399 TNode<ByteArray> meta_table, TNode<IntPtrT> capacity,
15400 TNode<Uint32T> max_usable_capacity, Label* bailout) {
15401 TVARIABLE(Uint32T, used_var, Uint32Constant(0))TVariable<Uint32T> used_var(Uint32Constant(0), this);
15402
15403 MetaTableAccessFunction builder = [&](MetaTableAccessor& mta) {
15404 TNode<Uint32T> nof = mta.Load(
15405 meta_table, SwissNameDictionary::kMetaTableElementCountFieldIndex);
15406 TNode<Uint32T> nod =
15407 mta.Load(meta_table,
15408 SwissNameDictionary::kMetaTableDeletedElementCountFieldIndex);
15409 TNode<Uint32T> used = Uint32Add(nof, nod);
15410 GotoIf(Uint32GreaterThanOrEqual(used, max_usable_capacity), bailout);
15411 TNode<Uint32T> inc_nof = Uint32Add(nof, Uint32Constant(1));
15412 mta.Store(meta_table, SwissNameDictionary::kMetaTableElementCountFieldIndex,
15413 inc_nof);
15414 used_var = used;
15415 };
15416
15417 GenerateMetaTableAccess(this, capacity, builder);
15418 return used_var.value();
15419}
15420
15421TNode<Uint32T> CodeStubAssembler::SwissNameDictionaryUpdateCountsForDeletion(
15422 TNode<ByteArray> meta_table, TNode<IntPtrT> capacity) {
15423 TVARIABLE(Uint32T, new_nof_var, Uint32Constant(0))TVariable<Uint32T> new_nof_var(Uint32Constant(0), this);
15424
15425 MetaTableAccessFunction builder = [&](MetaTableAccessor& mta) {
15426 TNode<Uint32T> nof = mta.Load(
15427 meta_table, SwissNameDictionary::kMetaTableElementCountFieldIndex);
15428 TNode<Uint32T> nod =
15429 mta.Load(meta_table,
15430 SwissNameDictionary::kMetaTableDeletedElementCountFieldIndex);
15431
15432 TNode<Uint32T> new_nof = Uint32Sub(nof, Uint32Constant(1));
15433 TNode<Uint32T> new_nod = Uint32Add(nod, Uint32Constant(1));
15434
15435 mta.Store(meta_table, SwissNameDictionary::kMetaTableElementCountFieldIndex,
15436 new_nof);
15437 mta.Store(meta_table,
15438 SwissNameDictionary::kMetaTableDeletedElementCountFieldIndex,
15439 new_nod);
15440
15441 new_nof_var = new_nof;
15442 };
15443
15444 GenerateMetaTableAccess(this, capacity, builder);
15445 return new_nof_var.value();
15446}
15447
15448TNode<SwissNameDictionary> CodeStubAssembler::AllocateSwissNameDictionary(
15449 TNode<IntPtrT> at_least_space_for) {
15450 // Note that as AllocateNameDictionary, we return a table with initial
15451 // (non-zero) capacity even if |at_least_space_for| is 0.
15452
15453 TNode<IntPtrT> capacity =
15454 IntPtrMax(IntPtrConstant(SwissNameDictionary::kInitialCapacity),
15455 SwissNameDictionaryCapacityFor(at_least_space_for));
15456
15457 return AllocateSwissNameDictionaryWithCapacity(capacity);
15458}
15459
15460TNode<SwissNameDictionary> CodeStubAssembler::AllocateSwissNameDictionary(
15461 int at_least_space_for) {
15462 return AllocateSwissNameDictionary(IntPtrConstant(at_least_space_for));
15463}
15464
15465TNode<SwissNameDictionary>
15466CodeStubAssembler::AllocateSwissNameDictionaryWithCapacity(
15467 TNode<IntPtrT> capacity) {
15468 Comment("[ AllocateSwissNameDictionaryWithCapacity");
15469 CSA_DCHECK(this, WordIsPowerOfTwo(capacity))((void)0);
15470 CSA_DCHECK(this, UintPtrGreaterThanOrEqual(((void)0)
15471 capacity,((void)0)
15472 IntPtrConstant(SwissNameDictionary::kInitialCapacity)))((void)0);
15473 CSA_DCHECK(this,((void)0)
15474 UintPtrLessThanOrEqual(((void)0)
15475 capacity, IntPtrConstant(SwissNameDictionary::MaxCapacity())))((void)0);
15476
15477 Comment("Size check.");
15478 intptr_t capacity_constant;
15479 if (ToParameterConstant(capacity, &capacity_constant)) {
15480 CHECK_LE(capacity_constant, SwissNameDictionary::MaxCapacity())do { bool _cmp = ::v8::base::CmpLEImpl< typename ::v8::base
::pass_value_or_ref<decltype(capacity_constant)>::type,
typename ::v8::base::pass_value_or_ref<decltype(SwissNameDictionary
::MaxCapacity())>::type>((capacity_constant), (SwissNameDictionary
::MaxCapacity())); do { if ((__builtin_expect(!!(!(_cmp)), 0)
)) { V8_Fatal("Check failed: %s.", "capacity_constant" " " "<="
" " "SwissNameDictionary::MaxCapacity()"); } } while (false)
; } while (false)
;
15481 } else {
15482 Label if_out_of_memory(this, Label::kDeferred), next(this);
15483 Branch(UintPtrGreaterThan(
15484 capacity, IntPtrConstant(SwissNameDictionary::MaxCapacity())),
15485 &if_out_of_memory, &next);
15486
15487 BIND(&if_out_of_memory)Bind(&if_out_of_memory);
15488 CallRuntime(Runtime::kFatalProcessOutOfMemoryInAllocateRaw,
15489 NoContextConstant());
15490 Unreachable();
15491
15492 BIND(&next)Bind(&next);
15493 }
15494
15495 // TODO(v8:11330) Consider adding dedicated handling for constant capacties,
15496 // similar to AllocateOrderedHashTableWithCapacity.
15497
15498 // We must allocate the ByteArray first. Otherwise, allocating the ByteArray
15499 // may trigger GC, which may try to verify the un-initialized
15500 // SwissNameDictionary.
15501 Comment("Meta table allocation.");
15502 TNode<IntPtrT> meta_table_payload_size =
15503 SwissNameDictionaryMetaTableSizeFor(capacity);
15504
15505 TNode<ByteArray> meta_table =
15506 AllocateNonEmptyByteArray(Unsigned(meta_table_payload_size),
15507 AllocationFlag::kAllowLargeObjectAllocation);
15508
15509 Comment("SwissNameDictionary allocation.");
15510 TNode<IntPtrT> total_size = SwissNameDictionarySizeFor(capacity);
15511
15512 TNode<SwissNameDictionary> table = UncheckedCast<SwissNameDictionary>(
15513 Allocate(total_size, AllocationFlag::kAllowLargeObjectAllocation));
15514
15515 StoreMapNoWriteBarrier(table, RootIndex::kSwissNameDictionaryMap);
15516
15517 Comment(
15518 "Initialize the hash, capacity, meta table pointer, and number of "
15519 "(deleted) elements.");
15520
15521 StoreSwissNameDictionaryHash(table,
15522 Uint32Constant(PropertyArray::kNoHashSentinel));
15523 StoreSwissNameDictionaryCapacity(table, TruncateIntPtrToInt32(capacity));
15524 StoreSwissNameDictionaryMetaTable(table, meta_table);
15525
15526 // Set present and deleted element count without doing branching needed for
15527 // meta table access twice.
15528 MetaTableAccessFunction builder = [&](MetaTableAccessor& mta) {
15529 mta.Store(meta_table, SwissNameDictionary::kMetaTableElementCountFieldIndex,
15530 Uint32Constant(0));
15531 mta.Store(meta_table,
15532 SwissNameDictionary::kMetaTableDeletedElementCountFieldIndex,
15533 Uint32Constant(0));
15534 };
15535 GenerateMetaTableAccess(this, capacity, builder);
15536
15537 Comment("Initialize the ctrl table.");
15538
15539 TNode<IntPtrT> ctrl_table_start_offset_minus_tag =
15540 SwissNameDictionaryCtrlTableStartOffsetMT(capacity);
15541
15542 TNode<IntPtrT> table_address_with_tag = BitcastTaggedToWord(table);
15543 TNode<IntPtrT> ctrl_table_size_bytes =
15544 IntPtrAdd(capacity, IntPtrConstant(SwissNameDictionary::kGroupWidth));
15545 TNode<IntPtrT> ctrl_table_start_ptr =
15546 IntPtrAdd(table_address_with_tag, ctrl_table_start_offset_minus_tag);
15547 TNode<IntPtrT> ctrl_table_end_ptr =
15548 IntPtrAdd(ctrl_table_start_ptr, ctrl_table_size_bytes);
15549
15550 // |ctrl_table_size_bytes| (= capacity + kGroupWidth) is divisble by four:
15551 STATIC_ASSERT(SwissNameDictionary::kGroupWidth % 4 == 0)static_assert(SwissNameDictionary::kGroupWidth % 4 == 0, "SwissNameDictionary::kGroupWidth % 4 == 0"
)
;
15552 STATIC_ASSERT(SwissNameDictionary::kInitialCapacity % 4 == 0)static_assert(SwissNameDictionary::kInitialCapacity % 4 == 0,
"SwissNameDictionary::kInitialCapacity % 4 == 0")
;
15553
15554 // TODO(v8:11330) For all capacities except 4, we know that
15555 // |ctrl_table_size_bytes| is divisible by 8. Consider initializing the ctrl
15556 // table with WordTs in those cases. Alternatively, always initialize as many
15557 // bytes as possbible with WordT and then, if necessary, the remaining 4 bytes
15558 // with Word32T.
15559
15560 constexpr uint8_t kEmpty = swiss_table::Ctrl::kEmpty;
15561 constexpr uint32_t kEmpty32 =
15562 (kEmpty << 24) | (kEmpty << 16) | (kEmpty << 8) | kEmpty;
15563 TNode<Int32T> empty32 = Int32Constant(kEmpty32);
15564 BuildFastLoop<IntPtrT>(
15565 ctrl_table_start_ptr, ctrl_table_end_ptr,
15566 [=](TNode<IntPtrT> current) {
15567 UnsafeStoreNoWriteBarrier(MachineRepresentation::kWord32, current,
15568 empty32);
15569 },
15570 sizeof(uint32_t), IndexAdvanceMode::kPost);
15571
15572 Comment("Initialize the data table.");
15573
15574 TNode<IntPtrT> data_table_start_offset_minus_tag =
15575 SwissNameDictionaryDataTableStartOffsetMT();
15576 TNode<IntPtrT> data_table_ptr =
15577 IntPtrAdd(table_address_with_tag, data_table_start_offset_minus_tag);
15578 TNode<IntPtrT> data_table_size = IntPtrMul(
15579 IntPtrConstant(SwissNameDictionary::kDataTableEntryCount * kTaggedSize),
15580 capacity);
15581
15582 StoreFieldsNoWriteBarrier(data_table_ptr,
15583 IntPtrAdd(data_table_ptr, data_table_size),
15584 TheHoleConstant());
15585
15586 Comment("AllocateSwissNameDictionaryWithCapacity ]");
15587
15588 return table;
15589}
15590
15591TNode<SwissNameDictionary> CodeStubAssembler::CopySwissNameDictionary(
15592 TNode<SwissNameDictionary> original) {
15593 Comment("[ CopySwissNameDictionary");
15594
15595 TNode<IntPtrT> capacity =
15596 Signed(ChangeUint32ToWord(LoadSwissNameDictionaryCapacity(original)));
15597
15598 // We must allocate the ByteArray first. Otherwise, allocating the ByteArray
15599 // may trigger GC, which may try to verify the un-initialized
15600 // SwissNameDictionary.
15601 Comment("Meta table allocation.");
15602 TNode<IntPtrT> meta_table_payload_size =
15603 SwissNameDictionaryMetaTableSizeFor(capacity);
15604
15605 TNode<ByteArray> meta_table =
15606 AllocateNonEmptyByteArray(Unsigned(meta_table_payload_size),
15607 AllocationFlag::kAllowLargeObjectAllocation);
15608
15609 Comment("SwissNameDictionary allocation.");
15610 TNode<IntPtrT> total_size = SwissNameDictionarySizeFor(capacity);
15611
15612 TNode<SwissNameDictionary> table = UncheckedCast<SwissNameDictionary>(
15613 Allocate(total_size, AllocationFlag::kAllowLargeObjectAllocation));
15614
15615 StoreMapNoWriteBarrier(table, RootIndex::kSwissNameDictionaryMap);
15616
15617 Comment("Copy the hash and capacity.");
15618
15619 StoreSwissNameDictionaryHash(table, LoadSwissNameDictionaryHash(original));
15620 StoreSwissNameDictionaryCapacity(table, TruncateIntPtrToInt32(capacity));
15621 StoreSwissNameDictionaryMetaTable(table, meta_table);
15622 // Not setting up number of (deleted elements), copying whole meta table
15623 // instead.
15624
15625 TNode<ExternalReference> memcpy =
15626 ExternalConstant(ExternalReference::libc_memcpy_function());
15627
15628 TNode<IntPtrT> old_table_address_with_tag = BitcastTaggedToWord(original);
15629 TNode<IntPtrT> new_table_address_with_tag = BitcastTaggedToWord(table);
15630
15631 TNode<IntPtrT> ctrl_table_start_offset_minus_tag =
15632 SwissNameDictionaryCtrlTableStartOffsetMT(capacity);
15633
15634 TNode<IntPtrT> ctrl_table_size_bytes =
15635 IntPtrAdd(capacity, IntPtrConstant(SwissNameDictionary::kGroupWidth));
15636
15637 Comment("Copy the ctrl table.");
15638 {
15639 TNode<IntPtrT> old_ctrl_table_start_ptr = IntPtrAdd(
15640 old_table_address_with_tag, ctrl_table_start_offset_minus_tag);
15641 TNode<IntPtrT> new_ctrl_table_start_ptr = IntPtrAdd(
15642 new_table_address_with_tag, ctrl_table_start_offset_minus_tag);
15643
15644 CallCFunction(
15645 memcpy, MachineType::Pointer(),
15646 std::make_pair(MachineType::Pointer(), new_ctrl_table_start_ptr),
15647 std::make_pair(MachineType::Pointer(), old_ctrl_table_start_ptr),
15648 std::make_pair(MachineType::UintPtr(), ctrl_table_size_bytes));
15649 }
15650
15651 Comment("Copy the data table.");
15652 {
15653 TNode<IntPtrT> start_offset =
15654 IntPtrConstant(SwissNameDictionary::DataTableStartOffset());
15655 TNode<IntPtrT> data_table_size = IntPtrMul(
15656 IntPtrConstant(SwissNameDictionary::kDataTableEntryCount * kTaggedSize),
15657 capacity);
15658
15659 BuildFastLoop<IntPtrT>(
15660 start_offset, IntPtrAdd(start_offset, data_table_size),
15661 [=](TNode<IntPtrT> offset) {
15662 TNode<Object> table_field = LoadObjectField(original, offset);
15663 StoreObjectField(table, offset, table_field);
15664 },
15665 kTaggedSize, IndexAdvanceMode::kPost);
15666 }
15667
15668 Comment("Copy the meta table");
15669 {
15670 TNode<IntPtrT> old_meta_table_address_with_tag =
15671 BitcastTaggedToWord(LoadSwissNameDictionaryMetaTable(original));
15672 TNode<IntPtrT> new_meta_table_address_with_tag =
15673 BitcastTaggedToWord(meta_table);
15674
15675 TNode<IntPtrT> meta_table_size =
15676 SwissNameDictionaryMetaTableSizeFor(capacity);
15677
15678 TNode<IntPtrT> old_data_start =
15679 IntPtrAdd(old_meta_table_address_with_tag,
15680 IntPtrConstant(ByteArray::kHeaderSize - kHeapObjectTag));
15681 TNode<IntPtrT> new_data_start =
15682 IntPtrAdd(new_meta_table_address_with_tag,
15683 IntPtrConstant(ByteArray::kHeaderSize - kHeapObjectTag));
15684
15685 CallCFunction(memcpy, MachineType::Pointer(),
15686 std::make_pair(MachineType::Pointer(), new_data_start),
15687 std::make_pair(MachineType::Pointer(), old_data_start),
15688 std::make_pair(MachineType::UintPtr(), meta_table_size));
15689 }
15690
15691 Comment("Copy the PropertyDetails table");
15692 {
15693 TNode<IntPtrT> property_details_start_offset_minus_tag =
15694 SwissNameDictionaryOffsetIntoPropertyDetailsTableMT(table, capacity,
15695 IntPtrConstant(0));
15696
15697 // Offset to property details entry
15698 TVARIABLE(IntPtrT, details_table_offset_minus_tag,TVariable<IntPtrT> details_table_offset_minus_tag(property_details_start_offset_minus_tag
, this)
15699 property_details_start_offset_minus_tag)TVariable<IntPtrT> details_table_offset_minus_tag(property_details_start_offset_minus_tag
, this)
;
15700
15701 TNode<IntPtrT> start = ctrl_table_start_offset_minus_tag;
15702
15703 VariableList in_loop_variables({&details_table_offset_minus_tag}, zone());
15704 BuildFastLoop<IntPtrT>(
15705 in_loop_variables, start, IntPtrAdd(start, ctrl_table_size_bytes),
15706 [&](TNode<IntPtrT> ctrl_table_offset) {
15707 TNode<Uint8T> ctrl = Load<Uint8T>(original, ctrl_table_offset);
15708
15709 // TODO(v8:11330) Entries in the PropertyDetails table may be
15710 // uninitialized if the corresponding buckets in the data/ctrl table
15711 // are empty. Therefore, to avoid accessing un-initialized memory
15712 // here, we need to check the ctrl table to determine whether we
15713 // should copy a certain PropertyDetails entry or not.
15714 // TODO(v8:11330) If this function becomes performance-critical, we
15715 // may consider always initializing the PropertyDetails table entirely
15716 // during allocation, to avoid the branching during copying.
15717 Label done(this);
15718 // |kNotFullMask| catches kEmpty and kDeleted, both of which indicate
15719 // entries that we don't want to copy the PropertyDetails for.
15720 GotoIf(IsSetWord32(ctrl, swiss_table::kNotFullMask), &done);
15721
15722 TNode<Uint8T> details =
15723 Load<Uint8T>(original, details_table_offset_minus_tag.value());
15724
15725 StoreToObject(MachineRepresentation::kWord8, table,
15726 details_table_offset_minus_tag.value(), details,
15727 StoreToObjectWriteBarrier::kNone);
15728 Goto(&done);
15729 BIND(&done)Bind(&done);
15730
15731 details_table_offset_minus_tag =
15732 IntPtrAdd(details_table_offset_minus_tag.value(),
15733 IntPtrConstant(kOneByteSize));
15734 },
15735 kOneByteSize, IndexAdvanceMode::kPost);
15736 }
15737
15738 Comment("CopySwissNameDictionary ]");
15739
15740 return table;
15741}
15742
15743TNode<IntPtrT> CodeStubAssembler::SwissNameDictionaryOffsetIntoDataTableMT(
15744 TNode<SwissNameDictionary> dict, TNode<IntPtrT> index, int field_index) {
15745 TNode<IntPtrT> data_table_start = SwissNameDictionaryDataTableStartOffsetMT();
15746
15747 TNode<IntPtrT> offset_within_data_table = IntPtrMul(
15748 index,
15749 IntPtrConstant(SwissNameDictionary::kDataTableEntryCount * kTaggedSize));
15750
15751 if (field_index != 0) {
15752 offset_within_data_table = IntPtrAdd(
15753 offset_within_data_table, IntPtrConstant(field_index * kTaggedSize));
15754 }
15755
15756 return IntPtrAdd(data_table_start, offset_within_data_table);
15757}
15758
15759TNode<IntPtrT>
15760CodeStubAssembler::SwissNameDictionaryOffsetIntoPropertyDetailsTableMT(
15761 TNode<SwissNameDictionary> dict, TNode<IntPtrT> capacity,
15762 TNode<IntPtrT> index) {
15763 CSA_DCHECK(this,((void)0)
15764 WordEqual(capacity, ChangeUint32ToWord(((void)0)
15765 LoadSwissNameDictionaryCapacity(dict))))((void)0);
15766
15767 TNode<IntPtrT> data_table_start = SwissNameDictionaryDataTableStartOffsetMT();
15768
15769 TNode<IntPtrT> gw = IntPtrConstant(SwissNameDictionary::kGroupWidth);
15770 TNode<IntPtrT> data_and_ctrl_table_size = IntPtrAdd(
15771 IntPtrMul(capacity,
15772 IntPtrConstant(kOneByteSize +
15773 SwissNameDictionary::kDataTableEntryCount *
15774 kTaggedSize)),
15775 gw);
15776
15777 TNode<IntPtrT> property_details_table_start =
15778 IntPtrAdd(data_table_start, data_and_ctrl_table_size);
15779
15780 CSA_DCHECK(((void)0)
15781 this,((void)0)
15782 WordEqual(FieldSliceSwissNameDictionaryPropertyDetailsTable(dict).offset,((void)0)
15783 // Our calculation subtracted the tag, Torque's offset didn't.((void)0)
15784 IntPtrAdd(property_details_table_start,((void)0)
15785 IntPtrConstant(kHeapObjectTag))))((void)0);
15786
15787 TNode<IntPtrT> offset_within_details_table = index;
15788 return IntPtrAdd(property_details_table_start, offset_within_details_table);
15789}
15790
15791void CodeStubAssembler::StoreSwissNameDictionaryCapacity(
15792 TNode<SwissNameDictionary> table, TNode<Int32T> capacity) {
15793 StoreObjectFieldNoWriteBarrier<Word32T>(
15794 table, SwissNameDictionary::CapacityOffset(), capacity);
15795}
15796
15797TNode<Name> CodeStubAssembler::LoadSwissNameDictionaryKey(
15798 TNode<SwissNameDictionary> dict, TNode<IntPtrT> entry) {
15799 TNode<IntPtrT> offset_minus_tag = SwissNameDictionaryOffsetIntoDataTableMT(
15800 dict, entry, SwissNameDictionary::kDataTableKeyEntryIndex);
15801
15802 // TODO(v8:11330) Consider using LoadObjectField here.
15803 return CAST(Load<Object>(dict, offset_minus_tag))Cast(Load<Object>(dict, offset_minus_tag));
15804}
15805
15806TNode<Uint8T> CodeStubAssembler::LoadSwissNameDictionaryPropertyDetails(
15807 TNode<SwissNameDictionary> table, TNode<IntPtrT> capacity,
15808 TNode<IntPtrT> entry) {
15809 TNode<IntPtrT> offset_minus_tag =
15810 SwissNameDictionaryOffsetIntoPropertyDetailsTableMT(table, capacity,
15811 entry);
15812 // TODO(v8:11330) Consider using LoadObjectField here.
15813 return Load<Uint8T>(table, offset_minus_tag);
15814}
15815
15816void CodeStubAssembler::StoreSwissNameDictionaryPropertyDetails(
15817 TNode<SwissNameDictionary> table, TNode<IntPtrT> capacity,
15818 TNode<IntPtrT> entry, TNode<Uint8T> details) {
15819 TNode<IntPtrT> offset_minus_tag =
15820 SwissNameDictionaryOffsetIntoPropertyDetailsTableMT(table, capacity,
15821 entry);
15822
15823 // TODO(v8:11330) Consider using StoreObjectField here.
15824 StoreToObject(MachineRepresentation::kWord8, table, offset_minus_tag, details,
15825 StoreToObjectWriteBarrier::kNone);
15826}
15827
15828void CodeStubAssembler::StoreSwissNameDictionaryKeyAndValue(
15829 TNode<SwissNameDictionary> dict, TNode<IntPtrT> entry, TNode<Object> key,
15830 TNode<Object> value) {
15831 STATIC_ASSERT(SwissNameDictionary::kDataTableKeyEntryIndex == 0)static_assert(SwissNameDictionary::kDataTableKeyEntryIndex ==
0, "SwissNameDictionary::kDataTableKeyEntryIndex == 0")
;
15832 STATIC_ASSERT(SwissNameDictionary::kDataTableValueEntryIndex == 1)static_assert(SwissNameDictionary::kDataTableValueEntryIndex ==
1, "SwissNameDictionary::kDataTableValueEntryIndex == 1")
;
15833
15834 // TODO(v8:11330) Consider using StoreObjectField here.
15835 TNode<IntPtrT> key_offset_minus_tag =
15836 SwissNameDictionaryOffsetIntoDataTableMT(
15837 dict, entry, SwissNameDictionary::kDataTableKeyEntryIndex);
15838 StoreToObject(MachineRepresentation::kTagged, dict, key_offset_minus_tag, key,
15839 StoreToObjectWriteBarrier::kFull);
15840
15841 TNode<IntPtrT> value_offset_minus_tag =
15842 IntPtrAdd(key_offset_minus_tag, IntPtrConstant(kTaggedSize));
15843 StoreToObject(MachineRepresentation::kTagged, dict, value_offset_minus_tag,
15844 value, StoreToObjectWriteBarrier::kFull);
15845}
15846
15847TNode<Uint64T> CodeStubAssembler::LoadSwissNameDictionaryCtrlTableGroup(
15848 TNode<IntPtrT> address) {
15849 TNode<RawPtrT> ptr = ReinterpretCast<RawPtrT>(address);
15850 TNode<Uint64T> data = UnalignedLoad<Uint64T>(ptr, IntPtrConstant(0));
15851
15852#ifdef V8_TARGET_LITTLE_ENDIAN1
15853 return data;
15854#else
15855 // Reverse byte order.
15856 // TODO(v8:11330) Doing this without using dedicated instructions (which we
15857 // don't have access to here) will destroy any performance benefit Swiss
15858 // Tables have. So we just support this so that we don't have to disable the
15859 // test suite for SwissNameDictionary on big endian platforms.
15860
15861 TNode<Uint64T> result = Uint64Constant(0);
15862 constexpr int count = sizeof(uint64_t);
15863 for (int i = 0; i < count; ++i) {
15864 int src_offset = i * 8;
15865 int dest_offset = (count - i - 1) * 8;
15866
15867 TNode<Uint64T> mask = Uint64Constant(0xffULL << src_offset);
15868 TNode<Uint64T> src_data = Word64And(data, mask);
15869
15870 TNode<Uint64T> shifted =
15871 src_offset < dest_offset
15872 ? Word64Shl(src_data, Uint64Constant(dest_offset - src_offset))
15873 : Word64Shr(src_data, Uint64Constant(src_offset - dest_offset));
15874 result = Unsigned(Word64Or(result, shifted));
15875 }
15876 return result;
15877#endif
15878}
15879
15880void CodeStubAssembler::SwissNameDictionarySetCtrl(
15881 TNode<SwissNameDictionary> table, TNode<IntPtrT> capacity,
15882 TNode<IntPtrT> entry, TNode<Uint8T> ctrl) {
15883 CSA_DCHECK(this,((void)0)
15884 WordEqual(capacity, ChangeUint32ToWord(((void)0)
15885 LoadSwissNameDictionaryCapacity(table))))((void)0);
15886 CSA_DCHECK(this, UintPtrLessThan(entry, capacity))((void)0);
15887
15888 TNode<IntPtrT> one = IntPtrConstant(1);
15889 TNode<IntPtrT> offset = SwissNameDictionaryCtrlTableStartOffsetMT(capacity);
15890
15891 CSA_DCHECK(this,((void)0)
15892 WordEqual(FieldSliceSwissNameDictionaryCtrlTable(table).offset,((void)0)
15893 IntPtrAdd(offset, one)))((void)0);
15894
15895 TNode<IntPtrT> offset_entry = IntPtrAdd(offset, entry);
15896 StoreToObject(MachineRepresentation::kWord8, table, offset_entry, ctrl,
15897 StoreToObjectWriteBarrier::kNone);
15898
15899 TNode<IntPtrT> mask = IntPtrSub(capacity, one);
15900 TNode<IntPtrT> group_width = IntPtrConstant(SwissNameDictionary::kGroupWidth);
15901
15902 // See SwissNameDictionary::SetCtrl for description of what's going on here.
15903
15904 // ((entry - Group::kWidth) & mask) + 1
15905 TNode<IntPtrT> copy_entry_lhs =
15906 IntPtrAdd(WordAnd(IntPtrSub(entry, group_width), mask), one);
15907 // ((Group::kWidth - 1) & mask)
15908 TNode<IntPtrT> copy_entry_rhs = WordAnd(IntPtrSub(group_width, one), mask);
15909 TNode<IntPtrT> copy_entry = IntPtrAdd(copy_entry_lhs, copy_entry_rhs);
15910 TNode<IntPtrT> offset_copy_entry = IntPtrAdd(offset, copy_entry);
15911
15912 // |entry| < |kGroupWidth| implies |copy_entry| == |capacity| + |entry|
15913 CSA_DCHECK(this, Word32Or(UintPtrGreaterThanOrEqual(entry, group_width),((void)0)
15914 WordEqual(copy_entry, IntPtrAdd(capacity, entry))))((void)0);
15915
15916 // |entry| >= |kGroupWidth| implies |copy_entry| == |entry|
15917 CSA_DCHECK(this, Word32Or(UintPtrLessThan(entry, group_width),((void)0)
15918 WordEqual(copy_entry, entry)))((void)0);
15919
15920 // TODO(v8:11330): consider using StoreObjectFieldNoWriteBarrier here.
15921 StoreToObject(MachineRepresentation::kWord8, table, offset_copy_entry, ctrl,
15922 StoreToObjectWriteBarrier::kNone);
15923}
15924
15925void CodeStubAssembler::SwissNameDictionaryFindEntry(
15926 TNode<SwissNameDictionary> table, TNode<Name> key, Label* found,
15927 TVariable<IntPtrT>* var_found_entry, Label* not_found) {
15928 if (SwissNameDictionary::kUseSIMD) {
15929 SwissNameDictionaryFindEntrySIMD(table, key, found, var_found_entry,
15930 not_found);
15931 } else {
15932 SwissNameDictionaryFindEntryPortable(table, key, found, var_found_entry,
15933 not_found);
15934 }
15935}
15936
15937void CodeStubAssembler::SwissNameDictionaryAdd(TNode<SwissNameDictionary> table,
15938 TNode<Name> key,
15939 TNode<Object> value,
15940 TNode<Uint8T> property_details,
15941 Label* needs_resize) {
15942 if (SwissNameDictionary::kUseSIMD) {
15943 SwissNameDictionaryAddSIMD(table, key, value, property_details,
15944 needs_resize);
15945 } else {
15946 SwissNameDictionaryAddPortable(table, key, value, property_details,
15947 needs_resize);
15948 }
15949}
15950
15951void CodeStubAssembler::SharedValueBarrier(
15952 TNode<Context> context, TVariable<Object>* var_shared_value) {
15953 // The barrier ensures that the value can be shared across Isolates.
15954 // The fast paths should be kept in sync with Object::Share.
15955
15956 TNode<Object> value = var_shared_value->value();
15957 Label check_in_shared_heap(this), slow(this), skip_barrier(this), done(this);
15958
15959 // Fast path: Smis are trivially shared.
15960 GotoIf(TaggedIsSmi(value), &done);
15961 // Fast path: Shared memory features imply shared RO space, so RO objects are
15962 // trivially shared.
15963 CSA_DCHECK(this, BoolConstant(ReadOnlyHeap::IsReadOnlySpaceShared()))((void)0);
15964 TNode<IntPtrT> page_flags = LoadBasicMemoryChunkFlags(CAST(value)Cast(value));
15965 GotoIf(WordNotEqual(WordAnd(page_flags,
15966 IntPtrConstant(BasicMemoryChunk::READ_ONLY_HEAP)),
15967 IntPtrConstant(0)),
15968 &skip_barrier);
15969
15970 // Fast path: Check if the HeapObject is already shared.
15971 TNode<Uint16T> value_instance_type =
15972 LoadMapInstanceType(LoadMap(CAST(value)Cast(value)));
15973 GotoIf(IsSharedStringInstanceType(value_instance_type), &skip_barrier);
15974 GotoIf(IsJSSharedStructInstanceType(value_instance_type), &skip_barrier);
15975 GotoIf(IsHeapNumberInstanceType(value_instance_type), &check_in_shared_heap);
15976 Goto(&slow);
15977
15978 BIND(&check_in_shared_heap)Bind(&check_in_shared_heap);
15979 {
15980 Branch(
15981 WordNotEqual(WordAnd(page_flags,
15982 IntPtrConstant(BasicMemoryChunk::IN_SHARED_HEAP)),
15983 IntPtrConstant(0)),
15984 &skip_barrier, &slow);
15985 }
15986
15987 // Slow path: Call out to runtime to share primitives and to throw on
15988 // non-shared JS objects.
15989 BIND(&slow)Bind(&slow);
15990 {
15991 *var_shared_value =
15992 CallRuntime(Runtime::kSharedValueBarrierSlow, context, value);
15993 Goto(&skip_barrier);
15994 }
15995
15996 BIND(&skip_barrier)Bind(&skip_barrier);
15997 {
15998 CSA_DCHECK(((void)0)
15999 this,((void)0)
16000 WordNotEqual(((void)0)
16001 WordAnd(LoadBasicMemoryChunkFlags(CAST(var_shared_value->value())),((void)0)
16002 IntPtrConstant(BasicMemoryChunk::READ_ONLY_HEAP |((void)0)
16003 BasicMemoryChunk::IN_SHARED_HEAP)),((void)0)
16004 IntPtrConstant(0)))((void)0);
16005 Goto(&done);
16006 }
16007
16008 BIND(&done)Bind(&done);
16009}
16010
16011} // namespace internal
16012} // namespace v8

../deps/v8/src/base/bit-field.h

1// Copyright 2019 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_BASE_BIT_FIELD_H_
6#define V8_BASE_BIT_FIELD_H_
7
8#include <stdint.h>
9
10#include "src/base/macros.h"
11
12namespace v8 {
13namespace base {
14
15// ----------------------------------------------------------------------------
16// BitField is a help template for encoding and decode bitfield with
17// unsigned content.
18// Instantiate them via 'using', which is cheaper than deriving a new class:
19// using MyBitField = base::BitField<MyEnum, 4, 2>;
20// The BitField class is final to enforce this style over derivation.
21
22template <class T, int shift, int size, class U = uint32_t>
23class BitField final {
24 public:
25 STATIC_ASSERT(std::is_unsigned<U>::value)static_assert(std::is_unsigned<U>::value, "std::is_unsigned<U>::value"
)
;
26 STATIC_ASSERT(shift < 8 * sizeof(U))static_assert(shift < 8 * sizeof(U), "shift < 8 * sizeof(U)"
)
; // Otherwise shifts by {shift} are UB.
27 STATIC_ASSERT(size < 8 * sizeof(U))static_assert(size < 8 * sizeof(U), "size < 8 * sizeof(U)"
)
; // Otherwise shifts by {size} are UB.
28 STATIC_ASSERT(shift + size <= 8 * sizeof(U))static_assert(shift + size <= 8 * sizeof(U), "shift + size <= 8 * sizeof(U)"
)
;
29 STATIC_ASSERT(size > 0)static_assert(size > 0, "size > 0");
30
31 using FieldType = T;
32
33 // A type U mask of bit field. To use all bits of a type U of x bits
34 // in a bitfield without compiler warnings we have to compute 2^x
35 // without using a shift count of x in the computation.
36 static constexpr int kShift = shift;
37 static constexpr int kSize = size;
38 static constexpr U kMask = ((U{1} << kShift) << kSize) - (U{1} << kShift);
39 static constexpr int kLastUsedBit = kShift + kSize - 1;
40 static constexpr U kNumValues = U{1} << kSize;
41
42 // Value for the field with all bits set.
43 static constexpr T kMax = static_cast<T>(kNumValues - 1);
44
45 template <class T2, int size2>
46 using Next = BitField<T2, kShift + kSize, size2, U>;
47
48 // Tells whether the provided value fits into the bit field.
49 static constexpr bool is_valid(T value) {
50 return (static_cast<U>(value) & ~static_cast<U>(kMax)) == 0;
51 }
52
53 // Returns a type U with the bit field value encoded.
54 static constexpr U encode(T value) {
55 DCHECK(is_valid(value))((void) 0);
56 return static_cast<U>(value) << kShift;
14
The result of the '<<' expression is undefined
57 }
58
59 // Returns a type U with the bit field value updated.
60 static constexpr U update(U previous, T value) {
61 return (previous & ~kMask) | encode(value);
62 }
63
64 // Extracts the bit field from the value.
65 static constexpr T decode(U value) {
66 return static_cast<T>((value & kMask) >> kShift);
67 }
68};
69
70template <class T, int shift, int size>
71using BitField8 = BitField<T, shift, size, uint8_t>;
72
73template <class T, int shift, int size>
74using BitField16 = BitField<T, shift, size, uint16_t>;
75
76template <class T, int shift, int size>
77using BitField64 = BitField<T, shift, size, uint64_t>;
78
79// Helper macros for defining a contiguous sequence of bit fields. Example:
80// (backslashes at the ends of respective lines of this multi-line macro
81// definition are omitted here to please the compiler)
82//
83// #define MAP_BIT_FIELD1(V, _)
84// V(IsAbcBit, bool, 1, _)
85// V(IsBcdBit, bool, 1, _)
86// V(CdeBits, int, 5, _)
87// V(DefBits, MutableMode, 1, _)
88//
89// DEFINE_BIT_FIELDS(MAP_BIT_FIELD1)
90// or
91// DEFINE_BIT_FIELDS_64(MAP_BIT_FIELD1)
92//
93#define DEFINE_BIT_FIELD_RANGE_TYPE(Name, Type, Size, _)kNameStart, kNameEnd = kNameStart + Size - 1, \
94 k##Name##Start, k##Name##End = k##Name##Start + Size - 1,
95
96#define DEFINE_BIT_RANGES(LIST_MACRO)struct LIST_MACRO_Ranges { enum { LIST_MACRO(DEFINE_BIT_FIELD_RANGE_TYPE
, _) kBitsCount }; };
\
97 struct LIST_MACRO##_Ranges { \
98 enum { LIST_MACRO(DEFINE_BIT_FIELD_RANGE_TYPE, _) kBitsCount }; \
99 };
100
101#define DEFINE_BIT_FIELD_TYPE(Name, Type, Size, RangesName)using Name = base::BitField<Type, RangesName::kNameStart, Size
>;
\
102 using Name = base::BitField<Type, RangesName::k##Name##Start, Size>;
103
104#define DEFINE_BIT_FIELD_64_TYPE(Name, Type, Size, RangesName)using Name = base::BitField64<Type, RangesName::kNameStart
, Size>;
\
105 using Name = base::BitField64<Type, RangesName::k##Name##Start, Size>;
106
107#define DEFINE_BIT_FIELDS(LIST_MACRO)struct LIST_MACRO_Ranges { enum { LIST_MACRO(DEFINE_BIT_FIELD_RANGE_TYPE
, _) kBitsCount }; }; LIST_MACRO(DEFINE_BIT_FIELD_TYPE, LIST_MACRO_Ranges
)
\
108 DEFINE_BIT_RANGES(LIST_MACRO)struct LIST_MACRO_Ranges { enum { LIST_MACRO(DEFINE_BIT_FIELD_RANGE_TYPE
, _) kBitsCount }; };
\
109 LIST_MACRO(DEFINE_BIT_FIELD_TYPE, LIST_MACRO##_Ranges)
110
111#define DEFINE_BIT_FIELDS_64(LIST_MACRO)struct LIST_MACRO_Ranges { enum { LIST_MACRO(DEFINE_BIT_FIELD_RANGE_TYPE
, _) kBitsCount }; }; LIST_MACRO(DEFINE_BIT_FIELD_64_TYPE, LIST_MACRO_Ranges
)
\
112 DEFINE_BIT_RANGES(LIST_MACRO)struct LIST_MACRO_Ranges { enum { LIST_MACRO(DEFINE_BIT_FIELD_RANGE_TYPE
, _) kBitsCount }; };
\
113 LIST_MACRO(DEFINE_BIT_FIELD_64_TYPE, LIST_MACRO##_Ranges)
114
115// ----------------------------------------------------------------------------
116// BitSetComputer is a help template for encoding and decoding information for
117// a variable number of items in an array.
118//
119// To encode boolean data in a smi array you would use:
120// using BoolComputer = BitSetComputer<bool, 1, kSmiValueSize, uint32_t>;
121//
122template <class T, int kBitsPerItem, int kBitsPerWord, class U>
123class BitSetComputer {
124 public:
125 static const int kItemsPerWord = kBitsPerWord / kBitsPerItem;
126 static const int kMask = (1 << kBitsPerItem) - 1;
127
128 // The number of array elements required to embed T information for each item.
129 static int word_count(int items) {
130 if (items == 0) return 0;
131 return (items - 1) / kItemsPerWord + 1;
132 }
133
134 // The array index to look at for item.
135 static int index(int base_index, int item) {
136 return base_index + item / kItemsPerWord;
137 }
138
139 // Extract T data for a given item from data.
140 static T decode(U data, int item) {
141 return static_cast<T>((data >> shift(item)) & kMask);
142 }
143
144 // Return the encoding for a store of value for item in previous.
145 static U encode(U previous, int item, T value) {
146 int shift_value = shift(item);
147 int set_bits = (static_cast<int>(value) << shift_value);
148 return (previous & ~(kMask << shift_value)) | set_bits;
149 }
150
151 static int shift(int item) { return (item % kItemsPerWord) * kBitsPerItem; }
152};
153
154} // namespace base
155} // namespace v8
156
157#endif // V8_BASE_BIT_FIELD_H_