aboutsummaryrefslogtreecommitdiff
path: root/src/code-stub-assembler.cc
diff options
context:
space:
mode:
Diffstat (limited to 'src/code-stub-assembler.cc')
-rw-r--r--src/code-stub-assembler.cc4739
1 files changed, 1902 insertions, 2837 deletions
diff --git a/src/code-stub-assembler.cc b/src/code-stub-assembler.cc
index b1ed2f13..e1ab040a 100644
--- a/src/code-stub-assembler.cc
+++ b/src/code-stub-assembler.cc
@@ -5,59 +5,142 @@
#include "src/code-factory.h"
#include "src/frames-inl.h"
#include "src/frames.h"
-#include "src/ic/handler-configuration.h"
-#include "src/ic/stub-cache.h"
namespace v8 {
namespace internal {
using compiler::Node;
-CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone,
- const CallInterfaceDescriptor& descriptor,
- Code::Flags flags, const char* name,
- size_t result_size)
- : compiler::CodeAssembler(isolate, zone, descriptor, flags, name,
- result_size) {}
+CodeStubAssembler::CodeStubAssembler(compiler::CodeAssemblerState* state)
+ : compiler::CodeAssembler(state) {
+ if (DEBUG_BOOL && FLAG_csa_trap_on_node != nullptr) {
+ HandleBreakOnNode();
+ }
+}
-CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone,
- int parameter_count, Code::Flags flags,
- const char* name)
- : compiler::CodeAssembler(isolate, zone, parameter_count, flags, name) {}
+void CodeStubAssembler::HandleBreakOnNode() {
+ // FLAG_csa_trap_on_node should be in a form "STUB,NODE" where STUB is a
+ // string specifying the name of a stub and NODE is number specifying node id.
+ const char* name = state()->name();
+ size_t name_length = strlen(name);
+ if (strncmp(FLAG_csa_trap_on_node, name, name_length) != 0) {
+ // Different name.
+ return;
+ }
+ size_t option_length = strlen(FLAG_csa_trap_on_node);
+ if (option_length < name_length + 2 ||
+ FLAG_csa_trap_on_node[name_length] != ',') {
+ // Option is too short.
+ return;
+ }
+ const char* start = &FLAG_csa_trap_on_node[name_length + 1];
+ char* end;
+ int node_id = static_cast<int>(strtol(start, &end, 10));
+ if (start == end) {
+ // Bad node id.
+ return;
+ }
+ BreakOnNode(node_id);
+}
-void CodeStubAssembler::Assert(ConditionBody codition_body, const char* message,
- const char* file, int line) {
+void CodeStubAssembler::Assert(const NodeGenerator& codition_body,
+ const char* message, const char* file,
+ int line) {
#if defined(DEBUG)
- Label ok(this);
- Label not_ok(this, Label::kDeferred);
- if (message != nullptr && FLAG_code_comments) {
- Comment("[ Assert: %s", message);
- } else {
- Comment("[ Assert");
- }
- Node* condition = codition_body();
- DCHECK_NOT_NULL(condition);
- Branch(condition, &ok, &not_ok);
- Bind(&not_ok);
- if (message != nullptr) {
- char chars[1024];
- Vector<char> buffer(chars);
- if (file != nullptr) {
- SNPrintF(buffer, "CSA_ASSERT failed: %s [%s:%d]\n", message, file, line);
+ if (FLAG_debug_code) {
+ Label ok(this);
+ Label not_ok(this, Label::kDeferred);
+ if (message != nullptr && FLAG_code_comments) {
+ Comment("[ Assert: %s", message);
} else {
- SNPrintF(buffer, "CSA_ASSERT failed: %s\n", message);
+ Comment("[ Assert");
+ }
+ Node* condition = codition_body();
+ DCHECK_NOT_NULL(condition);
+ Branch(condition, &ok, &not_ok);
+ Bind(&not_ok);
+ if (message != nullptr) {
+ char chars[1024];
+ Vector<char> buffer(chars);
+ if (file != nullptr) {
+ SNPrintF(buffer, "CSA_ASSERT failed: %s [%s:%d]\n", message, file,
+ line);
+ } else {
+ SNPrintF(buffer, "CSA_ASSERT failed: %s\n", message);
+ }
+ CallRuntime(
+ Runtime::kGlobalPrint, SmiConstant(Smi::kZero),
+ HeapConstant(factory()->NewStringFromAsciiChecked(&(buffer[0]))));
}
- CallRuntime(
- Runtime::kGlobalPrint, SmiConstant(Smi::kZero),
- HeapConstant(factory()->NewStringFromAsciiChecked(&(buffer[0]))));
- }
- DebugBreak();
- Goto(&ok);
- Bind(&ok);
- Comment("] Assert");
+ DebugBreak();
+ Goto(&ok);
+ Bind(&ok);
+ Comment("] Assert");
+ }
#endif
}
+Node* CodeStubAssembler::Select(Node* condition, const NodeGenerator& true_body,
+ const NodeGenerator& false_body,
+ MachineRepresentation rep) {
+ Variable value(this, rep);
+ Label vtrue(this), vfalse(this), end(this);
+ Branch(condition, &vtrue, &vfalse);
+
+ Bind(&vtrue);
+ {
+ value.Bind(true_body());
+ Goto(&end);
+ }
+ Bind(&vfalse);
+ {
+ value.Bind(false_body());
+ Goto(&end);
+ }
+
+ Bind(&end);
+ return value.value();
+}
+
+Node* CodeStubAssembler::SelectConstant(Node* condition, Node* true_value,
+ Node* false_value,
+ MachineRepresentation rep) {
+ return Select(condition, [=] { return true_value; },
+ [=] { return false_value; }, rep);
+}
+
+Node* CodeStubAssembler::SelectInt32Constant(Node* condition, int true_value,
+ int false_value) {
+ return SelectConstant(condition, Int32Constant(true_value),
+ Int32Constant(false_value),
+ MachineRepresentation::kWord32);
+}
+
+Node* CodeStubAssembler::SelectIntPtrConstant(Node* condition, int true_value,
+ int false_value) {
+ return SelectConstant(condition, IntPtrConstant(true_value),
+ IntPtrConstant(false_value),
+ MachineType::PointerRepresentation());
+}
+
+Node* CodeStubAssembler::SelectBooleanConstant(Node* condition) {
+ return SelectConstant(condition, TrueConstant(), FalseConstant(),
+ MachineRepresentation::kTagged);
+}
+
+Node* CodeStubAssembler::SelectTaggedConstant(Node* condition, Node* true_value,
+ Node* false_value) {
+ return SelectConstant(condition, true_value, false_value,
+ MachineRepresentation::kTagged);
+}
+
+Node* CodeStubAssembler::SelectSmiConstant(Node* condition, Smi* true_value,
+ Smi* false_value) {
+ return SelectConstant(condition, SmiConstant(true_value),
+ SmiConstant(false_value),
+ MachineRepresentation::kTaggedSigned);
+}
+
Node* CodeStubAssembler::NoContextConstant() { return NumberConstant(0); }
#define HEAP_CONSTANT_ACCESSOR(rootName, name) \
@@ -86,46 +169,19 @@ Node* CodeStubAssembler::IntPtrOrSmiConstant(int value, ParameterMode mode) {
if (mode == SMI_PARAMETERS) {
return SmiConstant(Smi::FromInt(value));
} else {
- DCHECK(mode == INTEGER_PARAMETERS || mode == INTPTR_PARAMETERS);
+ DCHECK_EQ(INTPTR_PARAMETERS, mode);
return IntPtrConstant(value);
}
}
-Node* CodeStubAssembler::IntPtrAddFoldConstants(Node* left, Node* right) {
- int32_t left_constant;
- bool is_left_constant = ToInt32Constant(left, left_constant);
- int32_t right_constant;
- bool is_right_constant = ToInt32Constant(right, right_constant);
- if (is_left_constant) {
- if (is_right_constant) {
- return IntPtrConstant(left_constant + right_constant);
- }
- if (left_constant == 0) {
- return right;
- }
- } else if (is_right_constant) {
- if (right_constant == 0) {
- return left;
- }
+bool CodeStubAssembler::IsIntPtrOrSmiConstantZero(Node* test) {
+ int32_t constant_test;
+ Smi* smi_test;
+ if ((ToInt32Constant(test, constant_test) && constant_test == 0) ||
+ (ToSmiConstant(test, smi_test) && smi_test->value() == 0)) {
+ return true;
}
- return IntPtrAdd(left, right);
-}
-
-Node* CodeStubAssembler::IntPtrSubFoldConstants(Node* left, Node* right) {
- int32_t left_constant;
- bool is_left_constant = ToInt32Constant(left, left_constant);
- int32_t right_constant;
- bool is_right_constant = ToInt32Constant(right, right_constant);
- if (is_left_constant) {
- if (is_right_constant) {
- return IntPtrConstant(left_constant - right_constant);
- }
- } else if (is_right_constant) {
- if (right_constant == 0) {
- return left;
- }
- }
- return IntPtrSub(left, right);
+ return false;
}
Node* CodeStubAssembler::IntPtrRoundUpToPowerOfTwo32(Node* value) {
@@ -141,9 +197,11 @@ Node* CodeStubAssembler::IntPtrRoundUpToPowerOfTwo32(Node* value) {
Node* CodeStubAssembler::WordIsPowerOfTwo(Node* value) {
// value && !(value & (value - 1))
return WordEqual(
- Select(WordEqual(value, IntPtrConstant(0)), IntPtrConstant(1),
- WordAnd(value, IntPtrSub(value, IntPtrConstant(1))),
- MachineType::PointerRepresentation()),
+ Select(
+ WordEqual(value, IntPtrConstant(0)),
+ [=] { return IntPtrConstant(1); },
+ [=] { return WordAnd(value, IntPtrSub(value, IntPtrConstant(1))); },
+ MachineType::PointerRepresentation()),
IntPtrConstant(0));
}
@@ -151,11 +209,10 @@ Node* CodeStubAssembler::Float64Round(Node* x) {
Node* one = Float64Constant(1.0);
Node* one_half = Float64Constant(0.5);
- Variable var_x(this, MachineRepresentation::kFloat64);
Label return_x(this);
// Round up {x} towards Infinity.
- var_x.Bind(Float64Ceil(x));
+ Variable var_x(this, MachineRepresentation::kFloat64, Float64Ceil(x));
GotoIf(Float64LessThanOrEqual(Float64Sub(var_x.value(), one_half), x),
&return_x);
@@ -176,9 +233,8 @@ Node* CodeStubAssembler::Float64Ceil(Node* x) {
Node* two_52 = Float64Constant(4503599627370496.0E0);
Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
- Variable var_x(this, MachineRepresentation::kFloat64);
+ Variable var_x(this, MachineRepresentation::kFloat64, x);
Label return_x(this), return_minus_x(this);
- var_x.Bind(x);
// Check if {x} is greater than zero.
Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
@@ -192,7 +248,7 @@ Node* CodeStubAssembler::Float64Ceil(Node* x) {
// Round positive {x} towards Infinity.
var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
- GotoUnless(Float64LessThan(var_x.value(), x), &return_x);
+ GotoIfNot(Float64LessThan(var_x.value(), x), &return_x);
var_x.Bind(Float64Add(var_x.value(), one));
Goto(&return_x);
}
@@ -201,12 +257,12 @@ Node* CodeStubAssembler::Float64Ceil(Node* x) {
{
// Just return {x} unless it's in the range ]-2^52,0[
GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
- GotoUnless(Float64LessThan(x, zero), &return_x);
+ GotoIfNot(Float64LessThan(x, zero), &return_x);
// Round negated {x} towards Infinity and return the result negated.
Node* minus_x = Float64Neg(x);
var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
- GotoUnless(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
+ GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
var_x.Bind(Float64Sub(var_x.value(), one));
Goto(&return_minus_x);
}
@@ -229,9 +285,8 @@ Node* CodeStubAssembler::Float64Floor(Node* x) {
Node* two_52 = Float64Constant(4503599627370496.0E0);
Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
- Variable var_x(this, MachineRepresentation::kFloat64);
+ Variable var_x(this, MachineRepresentation::kFloat64, x);
Label return_x(this), return_minus_x(this);
- var_x.Bind(x);
// Check if {x} is greater than zero.
Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
@@ -245,7 +300,7 @@ Node* CodeStubAssembler::Float64Floor(Node* x) {
// Round positive {x} towards -Infinity.
var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
- GotoUnless(Float64GreaterThan(var_x.value(), x), &return_x);
+ GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
var_x.Bind(Float64Sub(var_x.value(), one));
Goto(&return_x);
}
@@ -254,12 +309,12 @@ Node* CodeStubAssembler::Float64Floor(Node* x) {
{
// Just return {x} unless it's in the range ]-2^52,0[
GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
- GotoUnless(Float64LessThan(x, zero), &return_x);
+ GotoIfNot(Float64LessThan(x, zero), &return_x);
// Round negated {x} towards -Infinity and return the result negated.
Node* minus_x = Float64Neg(x);
var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
- GotoUnless(Float64LessThan(var_x.value(), minus_x), &return_minus_x);
+ GotoIfNot(Float64LessThan(var_x.value(), minus_x), &return_minus_x);
var_x.Bind(Float64Add(var_x.value(), one));
Goto(&return_minus_x);
}
@@ -313,9 +368,8 @@ Node* CodeStubAssembler::Float64Trunc(Node* x) {
Node* two_52 = Float64Constant(4503599627370496.0E0);
Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
- Variable var_x(this, MachineRepresentation::kFloat64);
+ Variable var_x(this, MachineRepresentation::kFloat64, x);
Label return_x(this), return_minus_x(this);
- var_x.Bind(x);
// Check if {x} is greater than 0.
Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
@@ -332,7 +386,7 @@ Node* CodeStubAssembler::Float64Trunc(Node* x) {
// Round positive {x} towards -Infinity.
var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
- GotoUnless(Float64GreaterThan(var_x.value(), x), &return_x);
+ GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
var_x.Bind(Float64Sub(var_x.value(), one));
}
Goto(&return_x);
@@ -346,12 +400,12 @@ Node* CodeStubAssembler::Float64Trunc(Node* x) {
} else {
// Just return {x} unless its in the range ]-2^52,0[.
GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
- GotoUnless(Float64LessThan(x, zero), &return_x);
+ GotoIfNot(Float64LessThan(x, zero), &return_x);
// Round negated {x} towards -Infinity and return result negated.
Node* minus_x = Float64Neg(x);
var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
- GotoUnless(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
+ GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
var_x.Bind(Float64Sub(var_x.value(), one));
Goto(&return_minus_x);
}
@@ -388,57 +442,19 @@ Node* CodeStubAssembler::SmiUntag(Node* value) {
Node* CodeStubAssembler::SmiToWord32(Node* value) {
Node* result = SmiUntag(value);
- if (Is64()) {
- result = TruncateInt64ToInt32(result);
- }
- return result;
+ return TruncateWordToWord32(result);
}
Node* CodeStubAssembler::SmiToFloat64(Node* value) {
return ChangeInt32ToFloat64(SmiToWord32(value));
}
-Node* CodeStubAssembler::SmiAdd(Node* a, Node* b) {
- return BitcastWordToTaggedSigned(
- IntPtrAdd(BitcastTaggedToWord(a), BitcastTaggedToWord(b)));
-}
-
-Node* CodeStubAssembler::SmiSub(Node* a, Node* b) {
- return BitcastWordToTaggedSigned(
- IntPtrSub(BitcastTaggedToWord(a), BitcastTaggedToWord(b)));
-}
-
-Node* CodeStubAssembler::SmiEqual(Node* a, Node* b) {
- return WordEqual(BitcastTaggedToWord(a), BitcastTaggedToWord(b));
-}
-
-Node* CodeStubAssembler::SmiAbove(Node* a, Node* b) {
- return UintPtrGreaterThan(BitcastTaggedToWord(a), BitcastTaggedToWord(b));
-}
-
-Node* CodeStubAssembler::SmiAboveOrEqual(Node* a, Node* b) {
- return UintPtrGreaterThanOrEqual(BitcastTaggedToWord(a),
- BitcastTaggedToWord(b));
-}
-
-Node* CodeStubAssembler::SmiBelow(Node* a, Node* b) {
- return UintPtrLessThan(BitcastTaggedToWord(a), BitcastTaggedToWord(b));
-}
-
-Node* CodeStubAssembler::SmiLessThan(Node* a, Node* b) {
- return IntPtrLessThan(BitcastTaggedToWord(a), BitcastTaggedToWord(b));
-}
-
-Node* CodeStubAssembler::SmiLessThanOrEqual(Node* a, Node* b) {
- return IntPtrLessThanOrEqual(BitcastTaggedToWord(a), BitcastTaggedToWord(b));
-}
-
Node* CodeStubAssembler::SmiMax(Node* a, Node* b) {
- return Select(SmiLessThan(a, b), b, a);
+ return SelectTaggedConstant(SmiLessThan(a, b), b, a);
}
Node* CodeStubAssembler::SmiMin(Node* a, Node* b) {
- return Select(SmiLessThan(a, b), a, b);
+ return SelectTaggedConstant(SmiLessThan(a, b), a, b);
}
Node* CodeStubAssembler::SmiMod(Node* a, Node* b) {
@@ -473,7 +489,7 @@ Node* CodeStubAssembler::SmiMod(Node* a, Node* b) {
// Check if {a} is kMinInt and {b} is -1 (only relevant if the
// kMinInt is actually representable as a Smi).
Label join(this);
- GotoUnless(Word32Equal(a, Int32Constant(kMinInt)), &join);
+ GotoIfNot(Word32Equal(a, Int32Constant(kMinInt)), &join);
GotoIf(Word32Equal(b, Int32Constant(-1)), &return_minuszero);
Goto(&join);
Bind(&join);
@@ -527,7 +543,7 @@ Node* CodeStubAssembler::SmiMul(Node* a, Node* b) {
Label answer_zero(this), answer_not_zero(this);
Node* answer = Projection(0, pair);
Node* zero = Int32Constant(0);
- Branch(WordEqual(answer, zero), &answer_zero, &answer_not_zero);
+ Branch(Word32Equal(answer, zero), &answer_zero, &answer_not_zero);
Bind(&answer_not_zero);
{
var_result.Bind(ChangeInt32ToTagged(answer));
@@ -546,7 +562,7 @@ Node* CodeStubAssembler::SmiMul(Node* a, Node* b) {
}
Bind(&if_should_be_zero);
{
- var_result.Bind(zero);
+ var_result.Bind(SmiConstant(0));
Goto(&return_result);
}
}
@@ -565,13 +581,27 @@ Node* CodeStubAssembler::SmiMul(Node* a, Node* b) {
return var_result.value();
}
+Node* CodeStubAssembler::TruncateWordToWord32(Node* value) {
+ if (Is64()) {
+ return TruncateInt64ToInt32(value);
+ }
+ return value;
+}
+
Node* CodeStubAssembler::TaggedIsSmi(Node* a) {
return WordEqual(WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
IntPtrConstant(0));
}
-Node* CodeStubAssembler::WordIsPositiveSmi(Node* a) {
- return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask | kSmiSignMask)),
+Node* CodeStubAssembler::TaggedIsNotSmi(Node* a) {
+ return WordNotEqual(
+ WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
+ IntPtrConstant(0));
+}
+
+Node* CodeStubAssembler::TaggedIsPositiveSmi(Node* a) {
+ return WordEqual(WordAnd(BitcastTaggedToWord(a),
+ IntPtrConstant(kSmiTagMask | kSmiSignMask)),
IntPtrConstant(0));
}
@@ -580,82 +610,10 @@ Node* CodeStubAssembler::WordIsWordAligned(Node* word) {
WordAnd(word, IntPtrConstant((1 << kPointerSizeLog2) - 1)));
}
-void CodeStubAssembler::BranchIfSimd128Equal(Node* lhs, Node* lhs_map,
- Node* rhs, Node* rhs_map,
- Label* if_equal,
- Label* if_notequal) {
- Label if_mapsame(this), if_mapnotsame(this);
- Branch(WordEqual(lhs_map, rhs_map), &if_mapsame, &if_mapnotsame);
-
- Bind(&if_mapsame);
- {
- // Both {lhs} and {rhs} are Simd128Values with the same map, need special
- // handling for Float32x4 because of NaN comparisons.
- Label if_float32x4(this), if_notfloat32x4(this);
- Node* float32x4_map = HeapConstant(factory()->float32x4_map());
- Branch(WordEqual(lhs_map, float32x4_map), &if_float32x4, &if_notfloat32x4);
-
- Bind(&if_float32x4);
- {
- // Both {lhs} and {rhs} are Float32x4, compare the lanes individually
- // using a floating point comparison.
- for (int offset = Float32x4::kValueOffset - kHeapObjectTag;
- offset < Float32x4::kSize - kHeapObjectTag;
- offset += sizeof(float)) {
- // Load the floating point values for {lhs} and {rhs}.
- Node* lhs_value =
- Load(MachineType::Float32(), lhs, IntPtrConstant(offset));
- Node* rhs_value =
- Load(MachineType::Float32(), rhs, IntPtrConstant(offset));
-
- // Perform a floating point comparison.
- Label if_valueequal(this), if_valuenotequal(this);
- Branch(Float32Equal(lhs_value, rhs_value), &if_valueequal,
- &if_valuenotequal);
- Bind(&if_valuenotequal);
- Goto(if_notequal);
- Bind(&if_valueequal);
- }
-
- // All 4 lanes match, {lhs} and {rhs} considered equal.
- Goto(if_equal);
- }
-
- Bind(&if_notfloat32x4);
- {
- // For other Simd128Values we just perform a bitwise comparison.
- for (int offset = Simd128Value::kValueOffset - kHeapObjectTag;
- offset < Simd128Value::kSize - kHeapObjectTag;
- offset += kPointerSize) {
- // Load the word values for {lhs} and {rhs}.
- Node* lhs_value =
- Load(MachineType::Pointer(), lhs, IntPtrConstant(offset));
- Node* rhs_value =
- Load(MachineType::Pointer(), rhs, IntPtrConstant(offset));
-
- // Perform a bitwise word-comparison.
- Label if_valueequal(this), if_valuenotequal(this);
- Branch(WordEqual(lhs_value, rhs_value), &if_valueequal,
- &if_valuenotequal);
- Bind(&if_valuenotequal);
- Goto(if_notequal);
- Bind(&if_valueequal);
- }
-
- // Bitwise comparison succeeded, {lhs} and {rhs} considered equal.
- Goto(if_equal);
- }
- }
-
- Bind(&if_mapnotsame);
- Goto(if_notequal);
-}
-
void CodeStubAssembler::BranchIfPrototypesHaveNoElements(
Node* receiver_map, Label* definitely_no_elements,
Label* possibly_elements) {
- Variable var_map(this, MachineRepresentation::kTagged);
- var_map.Bind(receiver_map);
+ Variable var_map(this, MachineRepresentation::kTagged, receiver_map);
Label loop_body(this, &var_map);
Node* empty_elements = LoadRoot(Heap::kEmptyFixedArrayRootIndex);
Goto(&loop_body);
@@ -698,25 +656,27 @@ void CodeStubAssembler::BranchIfJSObject(Node* object, Label* if_true,
if_true, if_false);
}
-void CodeStubAssembler::BranchIfFastJSArray(Node* object, Node* context,
- Label* if_true, Label* if_false) {
+void CodeStubAssembler::BranchIfFastJSArray(
+ Node* object, Node* context, CodeStubAssembler::FastJSArrayAccessMode mode,
+ Label* if_true, Label* if_false) {
// Bailout if receiver is a Smi.
GotoIf(TaggedIsSmi(object), if_false);
Node* map = LoadMap(object);
// Bailout if instance type is not JS_ARRAY_TYPE.
- GotoIf(WordNotEqual(LoadMapInstanceType(map), Int32Constant(JS_ARRAY_TYPE)),
+ GotoIf(Word32NotEqual(LoadMapInstanceType(map), Int32Constant(JS_ARRAY_TYPE)),
if_false);
Node* elements_kind = LoadMapElementsKind(map);
// Bailout if receiver has slow elements.
- GotoUnless(IsFastElementsKind(elements_kind), if_false);
+ GotoIfNot(IsFastElementsKind(elements_kind), if_false);
// Check prototype chain if receiver does not have packed elements.
- GotoUnless(IsHoleyFastElementsKind(elements_kind), if_true);
-
+ if (mode == FastJSArrayAccessMode::INBOUNDS_READ) {
+ GotoIfNot(IsHoleyFastElementsKind(elements_kind), if_true);
+ }
BranchIfPrototypesHaveNoElements(map, if_true, if_false);
}
@@ -732,6 +692,22 @@ Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes,
Label runtime_call(this, Label::kDeferred), no_runtime_call(this);
Label merge_runtime(this, &result);
+ if (flags & kAllowLargeObjectAllocation) {
+ Label next(this);
+ GotoIf(IsRegularHeapObjectSize(size_in_bytes), &next);
+
+ Node* runtime_flags = SmiConstant(
+ Smi::FromInt(AllocateDoubleAlignFlag::encode(false) |
+ AllocateTargetSpace::encode(AllocationSpace::LO_SPACE)));
+ Node* const runtime_result =
+ CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(),
+ SmiTag(size_in_bytes), runtime_flags);
+ result.Bind(runtime_result);
+ Goto(&merge_runtime);
+
+ Bind(&next);
+ }
+
Node* new_top = IntPtrAdd(top, size_in_bytes);
Branch(UintPtrGreaterThanOrEqual(new_top, limit), &runtime_call,
&no_runtime_call);
@@ -772,10 +748,9 @@ Node* CodeStubAssembler::AllocateRawAligned(Node* size_in_bytes,
Node* limit_address) {
Node* top = Load(MachineType::Pointer(), top_address);
Node* limit = Load(MachineType::Pointer(), limit_address);
- Variable adjusted_size(this, MachineType::PointerRepresentation());
- adjusted_size.Bind(size_in_bytes);
+ Variable adjusted_size(this, MachineType::PointerRepresentation(),
+ size_in_bytes);
if (flags & kDoubleAlignment) {
- // TODO(epertoso): Simd128 alignment.
Label aligned(this), not_aligned(this), merge(this, &adjusted_size);
Branch(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), &not_aligned,
&aligned);
@@ -792,8 +767,9 @@ Node* CodeStubAssembler::AllocateRawAligned(Node* size_in_bytes,
Bind(&merge);
}
- Variable address(this, MachineRepresentation::kTagged);
- address.Bind(AllocateRawUnaligned(adjusted_size.value(), kNone, top, limit));
+ Variable address(
+ this, MachineRepresentation::kTagged,
+ AllocateRawUnaligned(adjusted_size.value(), kNone, top, limit));
Label needs_filler(this), doesnt_need_filler(this),
merge_address(this, &address);
@@ -802,8 +778,6 @@ Node* CodeStubAssembler::AllocateRawAligned(Node* size_in_bytes,
Bind(&needs_filler);
// Store a filler and increase the address by kPointerSize.
- // TODO(epertoso): this code assumes that we only align to kDoubleSize. Change
- // it when Simd128 alignment is supported.
StoreNoWriteBarrier(MachineType::PointerRepresentation(), top,
LoadRoot(Heap::kOnePointerFillerMapRootIndex));
address.Bind(BitcastWordToTagged(
@@ -827,10 +801,17 @@ Node* CodeStubAssembler::Allocate(Node* size_in_bytes, AllocationFlags flags) {
new_space
? ExternalReference::new_space_allocation_top_address(isolate())
: ExternalReference::old_space_allocation_top_address(isolate()));
- Node* limit_address = ExternalConstant(
- new_space
- ? ExternalReference::new_space_allocation_limit_address(isolate())
- : ExternalReference::old_space_allocation_limit_address(isolate()));
+ DCHECK_EQ(kPointerSize,
+ ExternalReference::new_space_allocation_limit_address(isolate())
+ .address() -
+ ExternalReference::new_space_allocation_top_address(isolate())
+ .address());
+ DCHECK_EQ(kPointerSize,
+ ExternalReference::old_space_allocation_limit_address(isolate())
+ .address() -
+ ExternalReference::old_space_allocation_top_address(isolate())
+ .address());
+ Node* limit_address = IntPtrAdd(top_address, IntPtrConstant(kPointerSize));
#ifdef V8_HOST_ARCH_32_BIT
if (flags & kDoubleAlignment) {
@@ -846,7 +827,7 @@ Node* CodeStubAssembler::Allocate(int size_in_bytes, AllocationFlags flags) {
}
Node* CodeStubAssembler::InnerAllocate(Node* previous, Node* offset) {
- return BitcastWordToTagged(IntPtrAdd(previous, offset));
+ return BitcastWordToTagged(IntPtrAdd(BitcastTaggedToWord(previous), offset));
}
Node* CodeStubAssembler::InnerAllocate(Node* previous, int offset) {
@@ -860,11 +841,10 @@ Node* CodeStubAssembler::IsRegularHeapObjectSize(Node* size) {
void CodeStubAssembler::BranchIfToBooleanIsTrue(Node* value, Label* if_true,
Label* if_false) {
- Label if_valueissmi(this), if_valueisnotsmi(this), if_valueisstring(this),
- if_valueisheapnumber(this), if_valueisother(this);
+ Label if_valueissmi(this), if_valueisnotsmi(this),
+ if_valueisheapnumber(this, Label::kDeferred);
- // Fast check for Boolean {value}s (common case).
- GotoIf(WordEqual(value, BooleanConstant(true)), if_true);
+ // Rule out false {value}.
GotoIf(WordEqual(value, BooleanConstant(false)), if_false);
// Check if {value} is a Smi or a HeapObject.
@@ -878,27 +858,24 @@ void CodeStubAssembler::BranchIfToBooleanIsTrue(Node* value, Label* if_true,
Bind(&if_valueisnotsmi);
{
+ // Check if {value} is the empty string.
+ GotoIf(IsEmptyString(value), if_false);
+
// The {value} is a HeapObject, load its map.
Node* value_map = LoadMap(value);
- // Load the {value}s instance type.
- Node* value_instance_type = LoadMapInstanceType(value_map);
+ // Only null, undefined and document.all have the undetectable bit set,
+ // so we can return false immediately when that bit is set.
+ Node* value_map_bitfield = LoadMapBitField(value_map);
+ Node* value_map_undetectable =
+ Word32And(value_map_bitfield, Int32Constant(1 << Map::kIsUndetectable));
- // Dispatch based on the instance type; we distinguish all String instance
- // types, the HeapNumber type and everything else.
- GotoIf(Word32Equal(value_instance_type, Int32Constant(HEAP_NUMBER_TYPE)),
- &if_valueisheapnumber);
- Branch(IsStringInstanceType(value_instance_type), &if_valueisstring,
- &if_valueisother);
+ // Check if the {value} is undetectable.
+ GotoIfNot(Word32Equal(value_map_undetectable, Int32Constant(0)), if_false);
- Bind(&if_valueisstring);
- {
- // Load the string length field of the {value}.
- Node* value_length = LoadObjectField(value, String::kLengthOffset);
-
- // Check if the {value} is the empty string.
- BranchIfSmiEqual(value_length, SmiConstant(0), if_false, if_true);
- }
+ // We still need to handle numbers specially, but all other {value}s
+ // that make it here yield true.
+ Branch(IsHeapNumberMap(value_map), &if_valueisheapnumber, if_true);
Bind(&if_valueisheapnumber);
{
@@ -910,32 +887,15 @@ void CodeStubAssembler::BranchIfToBooleanIsTrue(Node* value, Label* if_true,
Branch(Float64LessThan(Float64Constant(0.0), Float64Abs(value_value)),
if_true, if_false);
}
-
- Bind(&if_valueisother);
- {
- // Load the bit field from the {value}s map. The {value} is now either
- // Null or Undefined, which have the undetectable bit set (so we always
- // return false for those), or a Symbol or Simd128Value, whose maps never
- // have the undetectable bit set (so we always return true for those), or
- // a JSReceiver, which may or may not have the undetectable bit set.
- Node* value_map_bitfield = LoadMapBitField(value_map);
- Node* value_map_undetectable = Word32And(
- value_map_bitfield, Int32Constant(1 << Map::kIsUndetectable));
-
- // Check if the {value} is undetectable.
- Branch(Word32Equal(value_map_undetectable, Int32Constant(0)), if_true,
- if_false);
- }
}
}
-compiler::Node* CodeStubAssembler::LoadFromFrame(int offset, MachineType rep) {
+Node* CodeStubAssembler::LoadFromFrame(int offset, MachineType rep) {
Node* frame_pointer = LoadFramePointer();
return Load(rep, frame_pointer, IntPtrConstant(offset));
}
-compiler::Node* CodeStubAssembler::LoadFromParentFrame(int offset,
- MachineType rep) {
+Node* CodeStubAssembler::LoadFromParentFrame(int offset, MachineType rep) {
Node* frame_pointer = LoadParentFramePointer();
return Load(rep, frame_pointer, IntPtrConstant(offset));
}
@@ -1009,6 +969,24 @@ Node* CodeStubAssembler::LoadAndUntagToWord32Root(
}
}
+Node* CodeStubAssembler::StoreAndTagSmi(Node* base, int offset, Node* value) {
+ if (Is64()) {
+ int zero_offset = offset + kPointerSize / 2;
+ int payload_offset = offset;
+#if V8_TARGET_LITTLE_ENDIAN
+ std::swap(zero_offset, payload_offset);
+#endif
+ StoreNoWriteBarrier(MachineRepresentation::kWord32, base,
+ IntPtrConstant(zero_offset), Int32Constant(0));
+ return StoreNoWriteBarrier(MachineRepresentation::kWord32, base,
+ IntPtrConstant(payload_offset),
+ TruncateInt64ToInt32(value));
+ } else {
+ return StoreNoWriteBarrier(MachineRepresentation::kTaggedSigned, base,
+ IntPtrConstant(offset), SmiTag(value));
+ }
+}
+
Node* CodeStubAssembler::LoadHeapNumberValue(Node* object) {
return LoadObjectField(object, HeapNumber::kValueOffset,
MachineType::Float64());
@@ -1027,6 +1005,11 @@ Node* CodeStubAssembler::HasInstanceType(Node* object,
return Word32Equal(LoadInstanceType(object), Int32Constant(instance_type));
}
+Node* CodeStubAssembler::DoesntHaveInstanceType(Node* object,
+ InstanceType instance_type) {
+ return Word32NotEqual(LoadInstanceType(object), Int32Constant(instance_type));
+}
+
Node* CodeStubAssembler::LoadProperties(Node* object) {
return LoadObjectField(object, JSObject::kPropertiesOffset);
}
@@ -1089,9 +1072,9 @@ Node* CodeStubAssembler::LoadMapPrototypeInfo(Node* map,
Node* prototype_info =
LoadObjectField(map, Map::kTransitionsOrPrototypeInfoOffset);
GotoIf(TaggedIsSmi(prototype_info), if_no_proto_info);
- GotoUnless(WordEqual(LoadMap(prototype_info),
- LoadRoot(Heap::kPrototypeInfoMapRootIndex)),
- if_no_proto_info);
+ GotoIfNot(WordEqual(LoadMap(prototype_info),
+ LoadRoot(Heap::kPrototypeInfoMapRootIndex)),
+ if_no_proto_info);
return prototype_info;
}
@@ -1126,8 +1109,8 @@ Node* CodeStubAssembler::LoadMapConstructorFunctionIndex(Node* map) {
Node* CodeStubAssembler::LoadMapConstructor(Node* map) {
CSA_SLOW_ASSERT(this, IsMap(map));
- Variable result(this, MachineRepresentation::kTagged);
- result.Bind(LoadObjectField(map, Map::kConstructorOrBackPointerOffset));
+ Variable result(this, MachineRepresentation::kTagged,
+ LoadObjectField(map, Map::kConstructorOrBackPointerOffset));
Label done(this), loop(this, &result);
Goto(&loop);
@@ -1136,7 +1119,7 @@ Node* CodeStubAssembler::LoadMapConstructor(Node* map) {
GotoIf(TaggedIsSmi(result.value()), &done);
Node* is_map_type =
Word32Equal(LoadInstanceType(result.value()), Int32Constant(MAP_TYPE));
- GotoUnless(is_map_type, &done);
+ GotoIfNot(is_map_type, &done);
result.Bind(
LoadObjectField(result.value(), Map::kConstructorOrBackPointerOffset));
Goto(&loop);
@@ -1145,6 +1128,25 @@ Node* CodeStubAssembler::LoadMapConstructor(Node* map) {
return result.value();
}
+Node* CodeStubAssembler::LoadSharedFunctionInfoSpecialField(
+ Node* shared, int offset, ParameterMode mode) {
+ if (Is64()) {
+ Node* result = LoadObjectField(shared, offset, MachineType::Int32());
+ if (mode == SMI_PARAMETERS) {
+ result = SmiTag(result);
+ } else {
+ result = ChangeUint32ToWord(result);
+ }
+ return result;
+ } else {
+ Node* result = LoadObjectField(shared, offset);
+ if (mode != SMI_PARAMETERS) {
+ result = SmiUntag(result);
+ }
+ return result;
+ }
+}
+
Node* CodeStubAssembler::LoadNameHashField(Node* name) {
CSA_ASSERT(this, IsName(name));
return LoadObjectField(name, Name::kHashFieldOffset, MachineType::Uint32());
@@ -1303,8 +1305,7 @@ Node* CodeStubAssembler::LoadContextElement(Node* context, Node* slot_index) {
Node* CodeStubAssembler::StoreContextElement(Node* context, int slot_index,
Node* value) {
int offset = Context::SlotOffset(slot_index);
- return Store(MachineRepresentation::kTagged, context, IntPtrConstant(offset),
- value);
+ return Store(context, IntPtrConstant(offset), value);
}
Node* CodeStubAssembler::StoreContextElement(Node* context, Node* slot_index,
@@ -1312,7 +1313,15 @@ Node* CodeStubAssembler::StoreContextElement(Node* context, Node* slot_index,
Node* offset =
IntPtrAdd(WordShl(slot_index, kPointerSizeLog2),
IntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
- return Store(MachineRepresentation::kTagged, context, offset, value);
+ return Store(context, offset, value);
+}
+
+Node* CodeStubAssembler::StoreContextElementNoWriteBarrier(Node* context,
+ int slot_index,
+ Node* value) {
+ int offset = Context::SlotOffset(slot_index);
+ return StoreNoWriteBarrier(MachineRepresentation::kTagged, context,
+ IntPtrConstant(offset), value);
}
Node* CodeStubAssembler::LoadNativeContext(Node* context) {
@@ -1322,8 +1331,7 @@ Node* CodeStubAssembler::LoadNativeContext(Node* context) {
Node* CodeStubAssembler::LoadJSArrayElementsMap(ElementsKind kind,
Node* native_context) {
CSA_ASSERT(this, IsNativeContext(native_context));
- return LoadFixedArrayElement(native_context,
- IntPtrConstant(Context::ArrayMapIndex(kind)));
+ return LoadContextElement(native_context, Context::ArrayMapIndex(kind));
}
Node* CodeStubAssembler::StoreHeapNumberValue(Node* object, Node* value) {
@@ -1333,8 +1341,8 @@ Node* CodeStubAssembler::StoreHeapNumberValue(Node* object, Node* value) {
Node* CodeStubAssembler::StoreObjectField(
Node* object, int offset, Node* value) {
- return Store(MachineRepresentation::kTagged, object,
- IntPtrConstant(offset - kHeapObjectTag), value);
+ DCHECK_NE(HeapObject::kMapOffset, offset); // Use StoreMap instead.
+ return Store(object, IntPtrConstant(offset - kHeapObjectTag), value);
}
Node* CodeStubAssembler::StoreObjectField(Node* object, Node* offset,
@@ -1343,8 +1351,8 @@ Node* CodeStubAssembler::StoreObjectField(Node* object, Node* offset,
if (ToInt32Constant(offset, const_offset)) {
return StoreObjectField(object, const_offset, value);
}
- return Store(MachineRepresentation::kTagged, object,
- IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value);
+ return Store(object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)),
+ value);
}
Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
@@ -1363,10 +1371,22 @@ Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value);
}
+Node* CodeStubAssembler::StoreMap(Node* object, Node* map) {
+ CSA_SLOW_ASSERT(this, IsMap(map));
+ return StoreWithMapWriteBarrier(
+ object, IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), map);
+}
+
+Node* CodeStubAssembler::StoreMapNoWriteBarrier(
+ Node* object, Heap::RootListIndex map_root_index) {
+ return StoreMapNoWriteBarrier(object, LoadRoot(map_root_index));
+}
+
Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) {
+ CSA_SLOW_ASSERT(this, IsMap(map));
return StoreNoWriteBarrier(
MachineRepresentation::kTagged, object,
- IntPtrConstant(HeapNumber::kMapOffset - kHeapObjectTag), map);
+ IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), map);
}
Node* CodeStubAssembler::StoreObjectFieldRoot(Node* object, int offset,
@@ -1381,17 +1401,19 @@ Node* CodeStubAssembler::StoreObjectFieldRoot(Node* object, int offset,
Node* CodeStubAssembler::StoreFixedArrayElement(Node* object, Node* index_node,
Node* value,
WriteBarrierMode barrier_mode,
+ int additional_offset,
ParameterMode parameter_mode) {
DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
barrier_mode == UPDATE_WRITE_BARRIER);
- Node* offset =
- ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, parameter_mode,
- FixedArray::kHeaderSize - kHeapObjectTag);
- MachineRepresentation rep = MachineRepresentation::kTagged;
+ int header_size =
+ FixedArray::kHeaderSize + additional_offset - kHeapObjectTag;
+ Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS,
+ parameter_mode, header_size);
if (barrier_mode == SKIP_WRITE_BARRIER) {
- return StoreNoWriteBarrier(rep, object, offset, value);
+ return StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset,
+ value);
} else {
- return Store(rep, object, offset, value);
+ return Store(object, offset, value);
}
}
@@ -1405,13 +1427,90 @@ Node* CodeStubAssembler::StoreFixedDoubleArrayElement(
return StoreNoWriteBarrier(rep, object, offset, value);
}
+Node* CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* context,
+ Node* array,
+ CodeStubArguments& args,
+ Variable& arg_index,
+ Label* bailout) {
+ Comment("BuildAppendJSArray: %s", ElementsKindToString(kind));
+ Label pre_bailout(this);
+ Label success(this);
+ Variable var_tagged_length(this, MachineRepresentation::kTagged);
+ ParameterMode mode = OptimalParameterMode();
+ Variable var_length(this, OptimalParameterRepresentation(),
+ TaggedToParameter(LoadJSArrayLength(array), mode));
+ Variable var_elements(this, MachineRepresentation::kTagged,
+ LoadElements(array));
+ Node* capacity =
+ TaggedToParameter(LoadFixedArrayBaseLength(var_elements.value()), mode);
+
+ // Resize the capacity of the fixed array if it doesn't fit.
+ Label fits(this, &var_elements);
+ Node* first = arg_index.value();
+ Node* growth = IntPtrSub(args.GetLength(), first);
+ Node* new_length =
+ IntPtrOrSmiAdd(WordToParameter(growth, mode), var_length.value(), mode);
+ GotoIfNot(IntPtrOrSmiGreaterThan(new_length, capacity, mode), &fits);
+ Node* new_capacity = CalculateNewElementsCapacity(new_length, mode);
+ var_elements.Bind(GrowElementsCapacity(array, var_elements.value(), kind,
+ kind, capacity, new_capacity, mode,
+ &pre_bailout));
+ Goto(&fits);
+ Bind(&fits);
+ Node* elements = var_elements.value();
+
+ // Push each argument onto the end of the array now that there is enough
+ // capacity.
+ CodeStubAssembler::VariableList push_vars({&var_length}, zone());
+ args.ForEach(
+ push_vars,
+ [this, kind, mode, elements, &var_length, &pre_bailout](Node* arg) {
+ if (IsFastSmiElementsKind(kind)) {
+ GotoIf(TaggedIsNotSmi(arg), &pre_bailout);
+ } else if (IsFastDoubleElementsKind(kind)) {
+ GotoIfNotNumber(arg, &pre_bailout);
+ }
+ if (IsFastDoubleElementsKind(kind)) {
+ Node* double_value = ChangeNumberToFloat64(arg);
+ StoreFixedDoubleArrayElement(elements, var_length.value(),
+ Float64SilenceNaN(double_value), mode);
+ } else {
+ WriteBarrierMode barrier_mode = IsFastSmiElementsKind(kind)
+ ? SKIP_WRITE_BARRIER
+ : UPDATE_WRITE_BARRIER;
+ StoreFixedArrayElement(elements, var_length.value(), arg,
+ barrier_mode, 0, mode);
+ }
+ Increment(var_length, 1, mode);
+ },
+ first, nullptr);
+ {
+ Node* length = ParameterToTagged(var_length.value(), mode);
+ var_tagged_length.Bind(length);
+ StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
+ Goto(&success);
+ }
+
+ Bind(&pre_bailout);
+ {
+ Node* length = ParameterToTagged(var_length.value(), mode);
+ var_tagged_length.Bind(length);
+ Node* diff = SmiSub(length, LoadJSArrayLength(array));
+ StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
+ arg_index.Bind(IntPtrAdd(arg_index.value(), SmiUntag(diff)));
+ Goto(bailout);
+ }
+
+ Bind(&success);
+ return var_tagged_length.value();
+}
+
Node* CodeStubAssembler::AllocateHeapNumber(MutableMode mode) {
Node* result = Allocate(HeapNumber::kSize, kNone);
Heap::RootListIndex heap_map_index =
mode == IMMUTABLE ? Heap::kHeapNumberMapRootIndex
: Heap::kMutableHeapNumberMapRootIndex;
- Node* map = LoadRoot(heap_map_index);
- StoreMapNoWriteBarrier(result, map);
+ StoreMapNoWriteBarrier(result, heap_map_index);
return result;
}
@@ -1425,14 +1524,18 @@ Node* CodeStubAssembler::AllocateHeapNumberWithValue(Node* value,
Node* CodeStubAssembler::AllocateSeqOneByteString(int length,
AllocationFlags flags) {
Comment("AllocateSeqOneByteString");
+ if (length == 0) {
+ return LoadRoot(Heap::kempty_stringRootIndex);
+ }
Node* result = Allocate(SeqOneByteString::SizeFor(length), flags);
DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex));
- StoreMapNoWriteBarrier(result, LoadRoot(Heap::kOneByteStringMapRootIndex));
+ StoreMapNoWriteBarrier(result, Heap::kOneByteStringMapRootIndex);
StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
SmiConstant(Smi::FromInt(length)));
- StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldOffset,
+ // Initialize both used and unused parts of hash field slot at once.
+ StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldSlot,
IntPtrConstant(String::kEmptyHashField),
- MachineRepresentation::kWord32);
+ MachineType::PointerRepresentation());
return result;
}
@@ -1443,8 +1546,10 @@ Node* CodeStubAssembler::AllocateSeqOneByteString(Node* context, Node* length,
Variable var_result(this, MachineRepresentation::kTagged);
// Compute the SeqOneByteString size and check if it fits into new space.
- Label if_sizeissmall(this), if_notsizeissmall(this, Label::kDeferred),
- if_join(this);
+ Label if_lengthiszero(this), if_sizeissmall(this),
+ if_notsizeissmall(this, Label::kDeferred), if_join(this);
+ GotoIf(WordEqual(length, IntPtrOrSmiConstant(0, mode)), &if_lengthiszero);
+
Node* raw_size = GetArrayAllocationSize(
length, UINT8_ELEMENTS, mode,
SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
@@ -1457,13 +1562,13 @@ Node* CodeStubAssembler::AllocateSeqOneByteString(Node* context, Node* length,
// Just allocate the SeqOneByteString in new space.
Node* result = Allocate(size, flags);
DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex));
- StoreMapNoWriteBarrier(result, LoadRoot(Heap::kOneByteStringMapRootIndex));
- StoreObjectFieldNoWriteBarrier(
- result, SeqOneByteString::kLengthOffset,
- mode == SMI_PARAMETERS ? length : SmiFromWord(length));
- StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldOffset,
+ StoreMapNoWriteBarrier(result, Heap::kOneByteStringMapRootIndex);
+ StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
+ ParameterToTagged(length, mode));
+ // Initialize both used and unused parts of hash field slot at once.
+ StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldSlot,
IntPtrConstant(String::kEmptyHashField),
- MachineRepresentation::kWord32);
+ MachineType::PointerRepresentation());
var_result.Bind(result);
Goto(&if_join);
}
@@ -1471,13 +1576,18 @@ Node* CodeStubAssembler::AllocateSeqOneByteString(Node* context, Node* length,
Bind(&if_notsizeissmall);
{
// We might need to allocate in large object space, go to the runtime.
- Node* result =
- CallRuntime(Runtime::kAllocateSeqOneByteString, context,
- mode == SMI_PARAMETERS ? length : SmiFromWord(length));
+ Node* result = CallRuntime(Runtime::kAllocateSeqOneByteString, context,
+ ParameterToTagged(length, mode));
var_result.Bind(result);
Goto(&if_join);
}
+ Bind(&if_lengthiszero);
+ {
+ var_result.Bind(LoadRoot(Heap::kempty_stringRootIndex));
+ Goto(&if_join);
+ }
+
Bind(&if_join);
return var_result.value();
}
@@ -1485,14 +1595,18 @@ Node* CodeStubAssembler::AllocateSeqOneByteString(Node* context, Node* length,
Node* CodeStubAssembler::AllocateSeqTwoByteString(int length,
AllocationFlags flags) {
Comment("AllocateSeqTwoByteString");
+ if (length == 0) {
+ return LoadRoot(Heap::kempty_stringRootIndex);
+ }
Node* result = Allocate(SeqTwoByteString::SizeFor(length), flags);
DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex));
- StoreMapNoWriteBarrier(result, LoadRoot(Heap::kStringMapRootIndex));
+ StoreMapNoWriteBarrier(result, Heap::kStringMapRootIndex);
StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
SmiConstant(Smi::FromInt(length)));
- StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldOffset,
+ // Initialize both used and unused parts of hash field slot at once.
+ StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldSlot,
IntPtrConstant(String::kEmptyHashField),
- MachineRepresentation::kWord32);
+ MachineType::PointerRepresentation());
return result;
}
@@ -1503,8 +1617,10 @@ Node* CodeStubAssembler::AllocateSeqTwoByteString(Node* context, Node* length,
Variable var_result(this, MachineRepresentation::kTagged);
// Compute the SeqTwoByteString size and check if it fits into new space.
- Label if_sizeissmall(this), if_notsizeissmall(this, Label::kDeferred),
- if_join(this);
+ Label if_lengthiszero(this), if_sizeissmall(this),
+ if_notsizeissmall(this, Label::kDeferred), if_join(this);
+ GotoIf(WordEqual(length, IntPtrOrSmiConstant(0, mode)), &if_lengthiszero);
+
Node* raw_size = GetArrayAllocationSize(
length, UINT16_ELEMENTS, mode,
SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
@@ -1517,13 +1633,14 @@ Node* CodeStubAssembler::AllocateSeqTwoByteString(Node* context, Node* length,
// Just allocate the SeqTwoByteString in new space.
Node* result = Allocate(size, flags);
DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex));
- StoreMapNoWriteBarrier(result, LoadRoot(Heap::kStringMapRootIndex));
+ StoreMapNoWriteBarrier(result, Heap::kStringMapRootIndex);
StoreObjectFieldNoWriteBarrier(
result, SeqTwoByteString::kLengthOffset,
mode == SMI_PARAMETERS ? length : SmiFromWord(length));
- StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldOffset,
+ // Initialize both used and unused parts of hash field slot at once.
+ StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldSlot,
IntPtrConstant(String::kEmptyHashField),
- MachineRepresentation::kWord32);
+ MachineType::PointerRepresentation());
var_result.Bind(result);
Goto(&if_join);
}
@@ -1538,6 +1655,12 @@ Node* CodeStubAssembler::AllocateSeqTwoByteString(Node* context, Node* length,
Goto(&if_join);
}
+ Bind(&if_lengthiszero);
+ {
+ var_result.Bind(LoadRoot(Heap::kempty_stringRootIndex));
+ Goto(&if_join);
+ }
+
Bind(&if_join);
return var_result.value();
}
@@ -1547,14 +1670,14 @@ Node* CodeStubAssembler::AllocateSlicedString(
Node* offset) {
CSA_ASSERT(this, TaggedIsSmi(length));
Node* result = Allocate(SlicedString::kSize);
- Node* map = LoadRoot(map_root_index);
DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
- StoreMapNoWriteBarrier(result, map);
+ StoreMapNoWriteBarrier(result, map_root_index);
StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length,
MachineRepresentation::kTagged);
- StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldOffset,
- Int32Constant(String::kEmptyHashField),
- MachineRepresentation::kWord32);
+ // Initialize both used and unused parts of hash field slot at once.
+ StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldSlot,
+ IntPtrConstant(String::kEmptyHashField),
+ MachineType::PointerRepresentation());
StoreObjectFieldNoWriteBarrier(result, SlicedString::kParentOffset, parent,
MachineRepresentation::kTagged);
StoreObjectFieldNoWriteBarrier(result, SlicedString::kOffsetOffset, offset,
@@ -1580,14 +1703,14 @@ Node* CodeStubAssembler::AllocateConsString(Heap::RootListIndex map_root_index,
AllocationFlags flags) {
CSA_ASSERT(this, TaggedIsSmi(length));
Node* result = Allocate(ConsString::kSize, flags);
- Node* map = LoadRoot(map_root_index);
DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
- StoreMapNoWriteBarrier(result, map);
+ StoreMapNoWriteBarrier(result, map_root_index);
StoreObjectFieldNoWriteBarrier(result, ConsString::kLengthOffset, length,
MachineRepresentation::kTagged);
- StoreObjectFieldNoWriteBarrier(result, ConsString::kHashFieldOffset,
- Int32Constant(String::kEmptyHashField),
- MachineRepresentation::kWord32);
+ // Initialize both used and unused parts of hash field slot at once.
+ StoreObjectFieldNoWriteBarrier(result, ConsString::kHashFieldSlot,
+ IntPtrConstant(String::kEmptyHashField),
+ MachineType::PointerRepresentation());
bool const new_space = !(flags & kPretenured);
if (new_space) {
StoreObjectFieldNoWriteBarrier(result, ConsString::kFirstOffset, first,
@@ -1624,8 +1747,10 @@ Node* CodeStubAssembler::NewConsString(Node* context, Node* length, Node* left,
Node* right_instance_type = LoadInstanceType(right);
// Compute intersection and difference of instance types.
- Node* anded_instance_types = WordAnd(left_instance_type, right_instance_type);
- Node* xored_instance_types = WordXor(left_instance_type, right_instance_type);
+ Node* anded_instance_types =
+ Word32And(left_instance_type, right_instance_type);
+ Node* xored_instance_types =
+ Word32Xor(left_instance_type, right_instance_type);
// We create a one-byte cons string if
// 1. both strings are one-byte, or
@@ -1642,15 +1767,15 @@ Node* CodeStubAssembler::NewConsString(Node* context, Node* length, Node* left,
Label two_byte_map(this);
Variable result(this, MachineRepresentation::kTagged);
Label done(this, &result);
- GotoIf(WordNotEqual(
- WordAnd(anded_instance_types,
- IntPtrConstant(kStringEncodingMask | kOneByteDataHintTag)),
- IntPtrConstant(0)),
+ GotoIf(Word32NotEqual(Word32And(anded_instance_types,
+ Int32Constant(kStringEncodingMask |
+ kOneByteDataHintTag)),
+ Int32Constant(0)),
&one_byte_map);
- Branch(WordNotEqual(WordAnd(xored_instance_types,
- IntPtrConstant(kStringEncodingMask |
- kOneByteDataHintMask)),
- IntPtrConstant(kOneByteStringTag | kOneByteDataHintTag)),
+ Branch(Word32NotEqual(Word32And(xored_instance_types,
+ Int32Constant(kStringEncodingMask |
+ kOneByteDataHintMask)),
+ Int32Constant(kOneByteStringTag | kOneByteDataHintTag)),
&two_byte_map, &one_byte_map);
Bind(&one_byte_map);
@@ -1700,15 +1825,13 @@ Node* CodeStubAssembler::AllocateRegExpResult(Node* context, Node* length,
Node* const zero = IntPtrConstant(0);
Node* const length_intptr = SmiUntag(length);
const ElementsKind elements_kind = FAST_ELEMENTS;
- const ParameterMode parameter_mode = INTPTR_PARAMETERS;
- Node* const elements =
- AllocateFixedArray(elements_kind, length_intptr, parameter_mode);
+ Node* const elements = AllocateFixedArray(elements_kind, length_intptr);
StoreObjectField(result, JSArray::kElementsOffset, elements);
// Fill in the elements with undefined.
FillFixedArrayWithValue(elements_kind, elements, zero, length_intptr,
- Heap::kUndefinedValueRootIndex, parameter_mode);
+ Heap::kUndefinedValueRootIndex);
return result;
}
@@ -1727,14 +1850,14 @@ Node* CodeStubAssembler::AllocateNameDictionary(Node* at_least_space_for) {
Node* length = EntryToIndex<NameDictionary>(capacity);
Node* store_size =
- IntPtrAddFoldConstants(WordShl(length, IntPtrConstant(kPointerSizeLog2)),
- IntPtrConstant(NameDictionary::kHeaderSize));
+ IntPtrAdd(WordShl(length, IntPtrConstant(kPointerSizeLog2)),
+ IntPtrConstant(NameDictionary::kHeaderSize));
Node* result = Allocate(store_size);
Comment("Initialize NameDictionary");
// Initialize FixedArray fields.
- StoreObjectFieldRoot(result, FixedArray::kMapOffset,
- Heap::kHashTableMapRootIndex);
+ DCHECK(Heap::RootIsImmortalImmovable(Heap::kHashTableMapRootIndex));
+ StoreMapNoWriteBarrier(result, Heap::kHashTableMapRootIndex);
StoreObjectFieldNoWriteBarrier(result, FixedArray::kLengthOffset,
SmiFromWord(length));
// Initialized HashTable fields.
@@ -1754,25 +1877,25 @@ Node* CodeStubAssembler::AllocateNameDictionary(Node* at_least_space_for) {
SKIP_WRITE_BARRIER);
// Initialize NameDictionary elements.
- result = BitcastTaggedToWord(result);
+ Node* result_word = BitcastTaggedToWord(result);
Node* start_address = IntPtrAdd(
- result, IntPtrConstant(NameDictionary::OffsetOfElementAt(
- NameDictionary::kElementsStartIndex) -
- kHeapObjectTag));
+ result_word, IntPtrConstant(NameDictionary::OffsetOfElementAt(
+ NameDictionary::kElementsStartIndex) -
+ kHeapObjectTag));
Node* end_address = IntPtrAdd(
- result,
- IntPtrSubFoldConstants(store_size, IntPtrConstant(kHeapObjectTag)));
+ result_word, IntPtrSub(store_size, IntPtrConstant(kHeapObjectTag)));
StoreFieldsNoWriteBarrier(start_address, end_address, filler);
return result;
}
Node* CodeStubAssembler::AllocateJSObjectFromMap(Node* map, Node* properties,
- Node* elements) {
+ Node* elements,
+ AllocationFlags flags) {
CSA_ASSERT(this, IsMap(map));
Node* size =
IntPtrMul(LoadMapInstanceSize(map), IntPtrConstant(kPointerSize));
CSA_ASSERT(this, IsRegularHeapObjectSize(size));
- Node* object = Allocate(size);
+ Node* object = Allocate(size, flags);
StoreMapNoWriteBarrier(object, map);
InitializeJSObjectFromMap(object, map, size, properties, elements);
return object;
@@ -1806,6 +1929,7 @@ void CodeStubAssembler::InitializeJSObjectBody(Node* object, Node* map,
Comment("InitializeJSObjectBody");
Node* filler = LoadRoot(Heap::kUndefinedValueRootIndex);
// Calculate the untagged field addresses.
+ object = BitcastTaggedToWord(object);
Node* start_address =
IntPtrAdd(object, IntPtrConstant(start_offset - kHeapObjectTag));
Node* end_address =
@@ -1819,12 +1943,12 @@ void CodeStubAssembler::StoreFieldsNoWriteBarrier(Node* start_address,
Comment("StoreFieldsNoWriteBarrier");
CSA_ASSERT(this, WordIsWordAligned(start_address));
CSA_ASSERT(this, WordIsWordAligned(end_address));
- BuildFastLoop(
- MachineType::PointerRepresentation(), start_address, end_address,
- [value](CodeStubAssembler* a, Node* current) {
- a->StoreNoWriteBarrier(MachineRepresentation::kTagged, current, value);
- },
- kPointerSize, IndexAdvanceMode::kPost);
+ BuildFastLoop(start_address, end_address,
+ [this, value](Node* current) {
+ StoreNoWriteBarrier(MachineRepresentation::kTagged, current,
+ value);
+ },
+ kPointerSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
}
Node* CodeStubAssembler::AllocateUninitializedJSArrayWithoutElements(
@@ -1861,9 +1985,8 @@ CodeStubAssembler::AllocateUninitializedJSArrayWithElements(
Node* array = AllocateUninitializedJSArray(kind, array_map, length,
allocation_site, size);
- // The bitcast here is safe because InnerAllocate doesn't actually allocate.
- Node* elements = InnerAllocate(BitcastTaggedToWord(array), elements_offset);
- StoreObjectField(array, JSObject::kElementsOffset, elements);
+ Node* elements = InnerAllocate(array, elements_offset);
+ StoreObjectFieldNoWriteBarrier(array, JSObject::kElementsOffset, elements);
return {array, elements};
}
@@ -1878,6 +2001,7 @@ Node* CodeStubAssembler::AllocateUninitializedJSArray(ElementsKind kind,
Comment("write JSArray headers");
StoreMapNoWriteBarrier(array, array_map);
+ CSA_ASSERT(this, TaggedIsSmi(length));
StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
StoreObjectFieldRoot(array, JSArray::kPropertiesOffset,
@@ -1893,25 +2017,31 @@ Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map,
Node* capacity, Node* length,
Node* allocation_site,
ParameterMode capacity_mode) {
- bool is_double = IsFastDoubleElementsKind(kind);
-
- // Allocate both array and elements object, and initialize the JSArray.
- Node *array, *elements;
- std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
- kind, array_map, length, allocation_site, capacity, capacity_mode);
- // Setup elements object.
- Heap* heap = isolate()->heap();
- Handle<Map> elements_map(is_double ? heap->fixed_double_array_map()
- : heap->fixed_array_map());
- StoreMapNoWriteBarrier(elements, HeapConstant(elements_map));
- StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset,
- TagParameter(capacity, capacity_mode));
-
- // Fill in the elements with holes.
- FillFixedArrayWithValue(
- kind, elements, capacity_mode == SMI_PARAMETERS ? SmiConstant(Smi::kZero)
- : IntPtrConstant(0),
- capacity, Heap::kTheHoleValueRootIndex, capacity_mode);
+ Node *array = nullptr, *elements = nullptr;
+ if (IsIntPtrOrSmiConstantZero(capacity)) {
+ // Array is empty. Use the shared empty fixed array instead of allocating a
+ // new one.
+ array = AllocateUninitializedJSArrayWithoutElements(kind, array_map, length,
+ nullptr);
+ StoreObjectFieldRoot(array, JSArray::kElementsOffset,
+ Heap::kEmptyFixedArrayRootIndex);
+ } else {
+ // Allocate both array and elements object, and initialize the JSArray.
+ std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
+ kind, array_map, length, allocation_site, capacity, capacity_mode);
+ // Setup elements object.
+ Heap::RootListIndex elements_map_index =
+ IsFastDoubleElementsKind(kind) ? Heap::kFixedDoubleArrayMapRootIndex
+ : Heap::kFixedArrayMapRootIndex;
+ DCHECK(Heap::RootIsImmortalImmovable(elements_map_index));
+ StoreMapNoWriteBarrier(elements, elements_map_index);
+ StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset,
+ ParameterToTagged(capacity, capacity_mode));
+ // Fill in the elements with holes.
+ FillFixedArrayWithValue(kind, elements,
+ IntPtrOrSmiConstant(0, capacity_mode), capacity,
+ Heap::kTheHoleValueRootIndex, capacity_mode);
+ }
return array;
}
@@ -1920,23 +2050,19 @@ Node* CodeStubAssembler::AllocateFixedArray(ElementsKind kind,
Node* capacity_node,
ParameterMode mode,
AllocationFlags flags) {
- CSA_ASSERT(this,
- IntPtrGreaterThan(capacity_node, IntPtrOrSmiConstant(0, mode)));
+ CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity_node,
+ IntPtrOrSmiConstant(0, mode), mode));
Node* total_size = GetFixedArrayAllocationSize(capacity_node, kind, mode);
// Allocate both array and elements object, and initialize the JSArray.
Node* array = Allocate(total_size, flags);
- Heap* heap = isolate()->heap();
- Handle<Map> map(IsFastDoubleElementsKind(kind)
- ? heap->fixed_double_array_map()
- : heap->fixed_array_map());
- if (flags & kPretenured) {
- StoreObjectField(array, JSObject::kMapOffset, HeapConstant(map));
- } else {
- StoreMapNoWriteBarrier(array, HeapConstant(map));
- }
+ Heap::RootListIndex map_index = IsFastDoubleElementsKind(kind)
+ ? Heap::kFixedDoubleArrayMapRootIndex
+ : Heap::kFixedArrayMapRootIndex;
+ DCHECK(Heap::RootIsImmortalImmovable(map_index));
+ StoreMapNoWriteBarrier(array, map_index);
StoreObjectFieldNoWriteBarrier(array, FixedArray::kLengthOffset,
- TagParameter(capacity_node, mode));
+ ParameterToTagged(capacity_node, mode));
return array;
}
@@ -1954,8 +2080,7 @@ void CodeStubAssembler::FillFixedArrayWithValue(
BuildFastFixedArrayForEach(
array, kind, from_node, to_node,
- [value, is_double, double_hole](CodeStubAssembler* assembler, Node* array,
- Node* offset) {
+ [this, value, is_double, double_hole](Node* array, Node* offset) {
if (is_double) {
// Don't use doubles to store the hole double, since manipulating the
// signaling NaN used for the hole in C++, e.g. with bit_cast, will
@@ -1965,21 +2090,19 @@ void CodeStubAssembler::FillFixedArrayWithValue(
// TODO(danno): When we have a Float32/Float64 wrapper class that
// preserves double bits during manipulation, remove this code/change
// this to an indexed Float64 store.
- if (assembler->Is64()) {
- assembler->StoreNoWriteBarrier(MachineRepresentation::kWord64,
- array, offset, double_hole);
+ if (Is64()) {
+ StoreNoWriteBarrier(MachineRepresentation::kWord64, array, offset,
+ double_hole);
} else {
- assembler->StoreNoWriteBarrier(MachineRepresentation::kWord32,
- array, offset, double_hole);
- assembler->StoreNoWriteBarrier(
- MachineRepresentation::kWord32, array,
- assembler->IntPtrAdd(offset,
- assembler->IntPtrConstant(kPointerSize)),
- double_hole);
+ StoreNoWriteBarrier(MachineRepresentation::kWord32, array, offset,
+ double_hole);
+ StoreNoWriteBarrier(MachineRepresentation::kWord32, array,
+ IntPtrAdd(offset, IntPtrConstant(kPointerSize)),
+ double_hole);
}
} else {
- assembler->StoreNoWriteBarrier(MachineRepresentation::kTagged, array,
- offset, value);
+ StoreNoWriteBarrier(MachineRepresentation::kTagged, array, offset,
+ value);
}
},
mode);
@@ -2024,9 +2147,9 @@ void CodeStubAssembler::CopyFixedArrayElements(
Node* limit_offset = ElementOffsetFromIndex(
IntPtrOrSmiConstant(0, mode), from_kind, mode, first_element_offset);
- Variable var_from_offset(this, MachineType::PointerRepresentation());
- var_from_offset.Bind(ElementOffsetFromIndex(element_count, from_kind, mode,
- first_element_offset));
+ Variable var_from_offset(this, MachineType::PointerRepresentation(),
+ ElementOffsetFromIndex(element_count, from_kind,
+ mode, first_element_offset));
// This second variable is used only when the element sizes of source and
// destination arrays do not match.
Variable var_to_offset(this, MachineType::PointerRepresentation());
@@ -2076,7 +2199,7 @@ void CodeStubAssembler::CopyFixedArrayElements(
from_array, var_from_offset.value(), from_kind, to_kind, if_hole);
if (needs_write_barrier) {
- Store(MachineRepresentation::kTagged, to_array, to_offset, value);
+ Store(to_array, to_offset, value);
} else if (to_double_elements) {
StoreNoWriteBarrier(MachineRepresentation::kFloat64, to_array, to_offset,
value);
@@ -2119,11 +2242,12 @@ void CodeStubAssembler::CopyFixedArrayElements(
Comment("] CopyFixedArrayElements");
}
-void CodeStubAssembler::CopyStringCharacters(
- compiler::Node* from_string, compiler::Node* to_string,
- compiler::Node* from_index, compiler::Node* to_index,
- compiler::Node* character_count, String::Encoding from_encoding,
- String::Encoding to_encoding, ParameterMode mode) {
+void CodeStubAssembler::CopyStringCharacters(Node* from_string, Node* to_string,
+ Node* from_index, Node* to_index,
+ Node* character_count,
+ String::Encoding from_encoding,
+ String::Encoding to_encoding,
+ ParameterMode mode) {
bool from_one_byte = from_encoding == String::ONE_BYTE_ENCODING;
bool to_one_byte = to_encoding == String::ONE_BYTE_ENCODING;
DCHECK_IMPLIES(to_one_byte, from_one_byte);
@@ -2140,7 +2264,7 @@ void CodeStubAssembler::CopyStringCharacters(
Node* to_offset =
ElementOffsetFromIndex(to_index, to_kind, mode, header_size);
Node* byte_count = ElementOffsetFromIndex(character_count, from_kind, mode);
- Node* limit_offset = IntPtrAddFoldConstants(from_offset, byte_count);
+ Node* limit_offset = IntPtrAdd(from_offset, byte_count);
// Prepare the fast loop
MachineType type =
@@ -2150,9 +2274,9 @@ void CodeStubAssembler::CopyStringCharacters(
int from_increment = 1 << ElementsKindToShiftSize(from_kind);
int to_increment = 1 << ElementsKindToShiftSize(to_kind);
- Variable current_to_offset(this, MachineType::PointerRepresentation());
+ Variable current_to_offset(this, MachineType::PointerRepresentation(),
+ to_offset);
VariableList vars({&current_to_offset}, zone());
- current_to_offset.Bind(to_offset);
int to_index_constant = 0, from_index_constant = 0;
Smi* to_index_smi = nullptr;
Smi* from_index_smi = nullptr;
@@ -2164,21 +2288,18 @@ void CodeStubAssembler::CopyStringCharacters(
(ToSmiConstant(from_index, from_index_smi) &&
ToSmiConstant(to_index, to_index_smi) &&
to_index_smi == from_index_smi));
- BuildFastLoop(vars, MachineType::PointerRepresentation(), from_offset,
- limit_offset,
- [from_string, to_string, &current_to_offset, to_increment, type,
- rep, index_same](CodeStubAssembler* assembler, Node* offset) {
- Node* value = assembler->Load(type, from_string, offset);
- assembler->StoreNoWriteBarrier(
+ BuildFastLoop(vars, from_offset, limit_offset,
+ [this, from_string, to_string, &current_to_offset, to_increment,
+ type, rep, index_same](Node* offset) {
+ Node* value = Load(type, from_string, offset);
+ StoreNoWriteBarrier(
rep, to_string,
index_same ? offset : current_to_offset.value(), value);
if (!index_same) {
- current_to_offset.Bind(assembler->IntPtrAdd(
- current_to_offset.value(),
- assembler->IntPtrConstant(to_increment)));
+ Increment(current_to_offset, to_increment);
}
},
- from_increment, IndexAdvanceMode::kPost);
+ from_increment, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
}
Node* CodeStubAssembler::LoadElementAndPrepareForStore(Node* array,
@@ -2212,17 +2333,10 @@ Node* CodeStubAssembler::LoadElementAndPrepareForStore(Node* array,
Node* CodeStubAssembler::CalculateNewElementsCapacity(Node* old_capacity,
ParameterMode mode) {
- Node* half_old_capacity = WordShr(old_capacity, IntPtrConstant(1));
- Node* new_capacity = IntPtrAdd(half_old_capacity, old_capacity);
- Node* unconditioned_result =
- IntPtrAdd(new_capacity, IntPtrOrSmiConstant(16, mode));
- if (mode == INTEGER_PARAMETERS || mode == INTPTR_PARAMETERS) {
- return unconditioned_result;
- } else {
- int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
- return WordAnd(unconditioned_result,
- IntPtrConstant(static_cast<size_t>(-1) << kSmiShiftBits));
- }
+ Node* half_old_capacity = WordOrSmiShr(old_capacity, 1, mode);
+ Node* new_capacity = IntPtrOrSmiAdd(half_old_capacity, old_capacity, mode);
+ Node* padding = IntPtrOrSmiConstant(16, mode);
+ return IntPtrOrSmiAdd(new_capacity, padding, mode);
}
Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
@@ -2231,8 +2345,8 @@ Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
Node* capacity = LoadFixedArrayBaseLength(elements);
ParameterMode mode = OptimalParameterMode();
- capacity = UntagParameter(capacity, mode);
- key = UntagParameter(key, mode);
+ capacity = TaggedToParameter(capacity, mode);
+ key = TaggedToParameter(key, mode);
return TryGrowElementsCapacity(object, elements, kind, key, capacity, mode,
bailout);
@@ -2247,12 +2361,12 @@ Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
// If the gap growth is too big, fall back to the runtime.
Node* max_gap = IntPtrOrSmiConstant(JSObject::kMaxGap, mode);
- Node* max_capacity = IntPtrAdd(capacity, max_gap);
- GotoIf(UintPtrGreaterThanOrEqual(key, max_capacity), bailout);
+ Node* max_capacity = IntPtrOrSmiAdd(capacity, max_gap, mode);
+ GotoIf(UintPtrOrSmiGreaterThanOrEqual(key, max_capacity, mode), bailout);
// Calculate the capacity of the new backing store.
Node* new_capacity = CalculateNewElementsCapacity(
- IntPtrAdd(key, IntPtrOrSmiConstant(1, mode)), mode);
+ IntPtrOrSmiAdd(key, IntPtrOrSmiConstant(1, mode), mode), mode);
return GrowElementsCapacity(object, elements, kind, kind, capacity,
new_capacity, mode, bailout);
}
@@ -2264,8 +2378,8 @@ Node* CodeStubAssembler::GrowElementsCapacity(
// If size of the allocation for the new capacity doesn't fit in a page
// that we can bump-pointer allocate from, fall back to the runtime.
int max_size = FixedArrayBase::GetMaxLengthForNewSpaceAllocation(to_kind);
- GotoIf(UintPtrGreaterThanOrEqual(new_capacity,
- IntPtrOrSmiConstant(max_size, mode)),
+ GotoIf(UintPtrOrSmiGreaterThanOrEqual(
+ new_capacity, IntPtrOrSmiConstant(max_size, mode), mode),
bailout);
// Allocate the new backing store.
@@ -2282,9 +2396,9 @@ Node* CodeStubAssembler::GrowElementsCapacity(
return new_elements;
}
-void CodeStubAssembler::InitializeAllocationMemento(
- compiler::Node* base_allocation, int base_allocation_size,
- compiler::Node* allocation_site) {
+void CodeStubAssembler::InitializeAllocationMemento(Node* base_allocation,
+ int base_allocation_size,
+ Node* allocation_site) {
StoreObjectFieldNoWriteBarrier(
base_allocation, AllocationMemento::kMapOffset + base_allocation_size,
HeapConstant(Handle<Map>(isolate()->heap()->allocation_memento_map())));
@@ -2370,10 +2484,9 @@ Node* CodeStubAssembler::TruncateTaggedToFloat64(Node* context, Node* value) {
Node* CodeStubAssembler::TruncateTaggedToWord32(Node* context, Node* value) {
// We might need to loop once due to ToNumber conversion.
- Variable var_value(this, MachineRepresentation::kTagged),
+ Variable var_value(this, MachineRepresentation::kTagged, value),
var_result(this, MachineRepresentation::kWord32);
Label loop(this, &var_value), done_loop(this, &var_result);
- var_value.Bind(value);
Goto(&loop);
Bind(&loop);
{
@@ -2396,8 +2509,8 @@ Node* CodeStubAssembler::TruncateTaggedToWord32(Node* context, Node* value) {
// Check if {value} is a HeapNumber.
Label if_valueisheapnumber(this),
if_valueisnotheapnumber(this, Label::kDeferred);
- Branch(WordEqual(LoadMap(value), HeapNumberMapConstant()),
- &if_valueisheapnumber, &if_valueisnotheapnumber);
+ Branch(IsHeapNumberMap(LoadMap(value)), &if_valueisheapnumber,
+ &if_valueisnotheapnumber);
Bind(&if_valueisheapnumber);
{
@@ -2434,7 +2547,7 @@ Node* CodeStubAssembler::ChangeFloat64ToTagged(Node* value) {
Branch(Float64Equal(value, value64), &if_valueisequal, &if_valueisnotequal);
Bind(&if_valueisequal);
{
- GotoUnless(Word32Equal(value32, Int32Constant(0)), &if_valueisint32);
+ GotoIfNot(Word32Equal(value32, Int32Constant(0)), &if_valueisint32);
Branch(Int32LessThan(Float64ExtractHighWord32(value), Int32Constant(0)),
&if_valueisheapnumber, &if_valueisint32);
}
@@ -2457,7 +2570,7 @@ Node* CodeStubAssembler::ChangeFloat64ToTagged(Node* value) {
Goto(&if_valueisheapnumber);
Bind(&if_notoverflow);
{
- Node* result = Projection(0, pair);
+ Node* result = BitcastWordToTaggedSigned(Projection(0, pair));
var_result.Bind(result);
Goto(&if_join);
}
@@ -2492,7 +2605,7 @@ Node* CodeStubAssembler::ChangeInt32ToTagged(Node* value) {
Goto(&if_join);
Bind(&if_notoverflow);
{
- Node* result = Projection(0, pair);
+ Node* result = BitcastWordToTaggedSigned(Projection(0, pair));
var_result.Bind(result);
}
Goto(&if_join);
@@ -2519,7 +2632,7 @@ Node* CodeStubAssembler::ChangeUint32ToTagged(Node* value) {
Node* overflow = Projection(1, pair);
GotoIf(overflow, &if_overflow);
- Node* result = Projection(0, pair);
+ Node* result = BitcastWordToTaggedSigned(Projection(0, pair));
var_result.Bind(result);
}
}
@@ -2538,8 +2651,7 @@ Node* CodeStubAssembler::ChangeUint32ToTagged(Node* value) {
Node* CodeStubAssembler::ToThisString(Node* context, Node* value,
char const* method_name) {
- Variable var_value(this, MachineRepresentation::kTagged);
- var_value.Bind(value);
+ Variable var_value(this, MachineRepresentation::kTagged, value);
// Check if the {value} is a Smi or a HeapObject.
Label if_valueissmi(this, Label::kDeferred), if_valueisnotsmi(this),
@@ -2582,7 +2694,7 @@ Node* CodeStubAssembler::ToThisString(Node* context, Node* value,
CallRuntime(Runtime::kThrowCalledOnNullOrUndefined, context,
HeapConstant(factory()->NewStringFromAsciiChecked(
method_name, TENURED)));
- Goto(&if_valueisstring); // Never reached.
+ Unreachable();
}
}
}
@@ -2597,14 +2709,32 @@ Node* CodeStubAssembler::ToThisString(Node* context, Node* value,
return var_value.value();
}
+Node* CodeStubAssembler::ChangeNumberToFloat64(compiler::Node* value) {
+ Variable result(this, MachineRepresentation::kFloat64);
+ Label smi(this);
+ Label done(this, &result);
+ GotoIf(TaggedIsSmi(value), &smi);
+ result.Bind(
+ LoadObjectField(value, HeapNumber::kValueOffset, MachineType::Float64()));
+ Goto(&done);
+
+ Bind(&smi);
+ {
+ result.Bind(SmiToFloat64(value));
+ Goto(&done);
+ }
+
+ Bind(&done);
+ return result.value();
+}
+
Node* CodeStubAssembler::ToThisValue(Node* context, Node* value,
PrimitiveType primitive_type,
char const* method_name) {
// We might need to loop once due to JSValue unboxing.
- Variable var_value(this, MachineRepresentation::kTagged);
+ Variable var_value(this, MachineRepresentation::kTagged, value);
Label loop(this, &var_value), done_loop(this),
done_throw(this, Label::kDeferred);
- var_value.Bind(value);
Goto(&loop);
Bind(&loop);
{
@@ -2663,7 +2793,7 @@ Node* CodeStubAssembler::ToThisValue(Node* context, Node* value,
CallRuntime(Runtime::kThrowNotGeneric, context,
HeapConstant(factory()->NewStringFromAsciiChecked(method_name,
TENURED)));
- Goto(&done_loop); // Never reached.
+ Unreachable();
}
Bind(&done_loop);
@@ -2691,21 +2821,25 @@ Node* CodeStubAssembler::ThrowIfNotInstanceType(Node* context, Node* value,
Runtime::kThrowIncompatibleMethodReceiver, context,
HeapConstant(factory()->NewStringFromAsciiChecked(method_name, TENURED)),
value);
- var_value_map.Bind(UndefinedConstant());
- Goto(&out); // Never reached.
+ Unreachable();
Bind(&out);
return var_value_map.value();
}
+Node* CodeStubAssembler::InstanceTypeEqual(Node* instance_type, int type) {
+ return Word32Equal(instance_type, Int32Constant(type));
+}
+
Node* CodeStubAssembler::IsSpecialReceiverMap(Node* map) {
Node* is_special = IsSpecialReceiverInstanceType(LoadMapInstanceType(map));
uint32_t mask =
1 << Map::kHasNamedInterceptor | 1 << Map::kIsAccessCheckNeeded;
USE(mask);
// Interceptors or access checks imply special receiver.
- CSA_ASSERT(this, Select(IsSetWord32(LoadMapBitField(map), mask), is_special,
- Int32Constant(1), MachineRepresentation::kWord32));
+ CSA_ASSERT(this,
+ SelectConstant(IsSetWord32(LoadMapBitField(map), mask), is_special,
+ Int32Constant(1), MachineRepresentation::kWord32));
return is_special;
}
@@ -2723,6 +2857,17 @@ Node* CodeStubAssembler::IsCallableMap(Node* map) {
Int32Constant(0));
}
+Node* CodeStubAssembler::IsCallable(Node* object) {
+ return IsCallableMap(LoadMap(object));
+}
+
+Node* CodeStubAssembler::IsConstructorMap(Node* map) {
+ CSA_ASSERT(this, IsMap(map));
+ return Word32NotEqual(
+ Word32And(LoadMapBitField(map), Int32Constant(1 << Map::kIsConstructor)),
+ Int32Constant(0));
+}
+
Node* CodeStubAssembler::IsSpecialReceiverInstanceType(Node* instance_type) {
STATIC_ASSERT(JS_GLOBAL_OBJECT_TYPE <= LAST_SPECIAL_RECEIVER_TYPE);
return Int32LessThanOrEqual(instance_type,
@@ -2745,6 +2890,11 @@ Node* CodeStubAssembler::IsJSReceiver(Node* object) {
return IsJSReceiverInstanceType(LoadInstanceType(object));
}
+Node* CodeStubAssembler::IsJSReceiverMap(Node* map) {
+ STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
+ return IsJSReceiverInstanceType(LoadMapInstanceType(map));
+}
+
Node* CodeStubAssembler::IsJSObject(Node* object) {
STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
return Int32GreaterThanOrEqual(LoadInstanceType(object),
@@ -2772,6 +2922,14 @@ Node* CodeStubAssembler::IsWeakCell(Node* object) {
return HasInstanceType(object, WEAK_CELL_TYPE);
}
+Node* CodeStubAssembler::IsBoolean(Node* object) {
+ return IsBooleanMap(LoadMap(object));
+}
+
+Node* CodeStubAssembler::IsHeapNumber(Node* object) {
+ return IsHeapNumberMap(LoadMap(object));
+}
+
Node* CodeStubAssembler::IsName(Node* object) {
return Int32LessThanOrEqual(LoadInstanceType(object),
Int32Constant(LAST_NAME_TYPE));
@@ -2782,6 +2940,22 @@ Node* CodeStubAssembler::IsString(Node* object) {
Int32Constant(FIRST_NONSTRING_TYPE));
}
+Node* CodeStubAssembler::IsSymbol(Node* object) {
+ return IsSymbolMap(LoadMap(object));
+}
+
+Node* CodeStubAssembler::IsPrivateSymbol(Node* object) {
+ return Select(
+ IsSymbol(object),
+ [=] {
+ Node* const flags =
+ SmiToWord32(LoadObjectField(object, Symbol::kFlagsOffset));
+ const int kPrivateMask = 1 << Symbol::kPrivateBit;
+ return IsSetWord32(flags, kPrivateMask);
+ },
+ [=] { return Int32Constant(0); }, MachineRepresentation::kWord32);
+}
+
Node* CodeStubAssembler::IsNativeContext(Node* object) {
return WordEqual(LoadMap(object), LoadRoot(Heap::kNativeContextMapRootIndex));
}
@@ -2795,7 +2969,7 @@ Node* CodeStubAssembler::IsHashTable(Node* object) {
}
Node* CodeStubAssembler::IsDictionary(Node* object) {
- return WordOr(IsHashTable(object), IsUnseededNumberDictionary(object));
+ return Word32Or(IsHashTable(object), IsUnseededNumberDictionary(object));
}
Node* CodeStubAssembler::IsUnseededNumberDictionary(Node* object) {
@@ -2803,19 +2977,22 @@ Node* CodeStubAssembler::IsUnseededNumberDictionary(Node* object) {
LoadRoot(Heap::kUnseededNumberDictionaryMapRootIndex));
}
-Node* CodeStubAssembler::StringCharCodeAt(Node* string, Node* index) {
+Node* CodeStubAssembler::IsJSFunction(Node* object) {
+ return HasInstanceType(object, JS_FUNCTION_TYPE);
+}
+
+Node* CodeStubAssembler::StringCharCodeAt(Node* string, Node* index,
+ ParameterMode parameter_mode) {
CSA_ASSERT(this, IsString(string));
// Translate the {index} into a Word.
- index = SmiToWord(index);
+ index = ParameterToWord(index, parameter_mode);
- // We may need to loop in case of cons or sliced strings.
- Variable var_index(this, MachineType::PointerRepresentation());
+ // We may need to loop in case of cons, thin, or sliced strings.
+ Variable var_index(this, MachineType::PointerRepresentation(), index);
+ Variable var_string(this, MachineRepresentation::kTagged, string);
Variable var_result(this, MachineRepresentation::kWord32);
- Variable var_string(this, MachineRepresentation::kTagged);
Variable* loop_vars[] = {&var_index, &var_string};
Label done_loop(this, &var_result), loop(this, 2, loop_vars);
- var_string.Bind(string);
- var_index.Bind(index);
Goto(&loop);
Bind(&loop);
{
@@ -2960,14 +3137,29 @@ Node* CodeStubAssembler::StringCharCodeAt(Node* string, Node* index) {
Bind(&if_stringisnotexternal);
{
- // The {string} is a SlicedString, continue with its parent.
- Node* string_offset =
- LoadAndUntagObjectField(string, SlicedString::kOffsetOffset);
- Node* string_parent =
- LoadObjectField(string, SlicedString::kParentOffset);
- var_index.Bind(IntPtrAdd(index, string_offset));
- var_string.Bind(string_parent);
- Goto(&loop);
+ Label if_stringissliced(this), if_stringisthin(this);
+ Branch(
+ Word32Equal(Word32And(string_instance_type,
+ Int32Constant(kStringRepresentationMask)),
+ Int32Constant(kSlicedStringTag)),
+ &if_stringissliced, &if_stringisthin);
+ Bind(&if_stringissliced);
+ {
+ // The {string} is a SlicedString, continue with its parent.
+ Node* string_offset =
+ LoadAndUntagObjectField(string, SlicedString::kOffsetOffset);
+ Node* string_parent =
+ LoadObjectField(string, SlicedString::kParentOffset);
+ var_index.Bind(IntPtrAdd(index, string_offset));
+ var_string.Bind(string_parent);
+ Goto(&loop);
+ }
+ Bind(&if_stringisthin);
+ {
+ // The {string} is a ThinString, continue with its actual value.
+ var_string.Bind(LoadObjectField(string, ThinString::kActualOffset));
+ Goto(&loop);
+ }
}
}
}
@@ -2989,12 +3181,13 @@ Node* CodeStubAssembler::StringFromCharCode(Node* code) {
{
// Load the isolate wide single character string cache.
Node* cache = LoadRoot(Heap::kSingleCharacterStringCacheRootIndex);
+ Node* code_index = ChangeUint32ToWord(code);
// Check if we have an entry for the {code} in the single character string
// cache already.
Label if_entryisundefined(this, Label::kDeferred),
if_entryisnotundefined(this);
- Node* entry = LoadFixedArrayElement(cache, code);
+ Node* entry = LoadFixedArrayElement(cache, code_index);
Branch(WordEqual(entry, UndefinedConstant()), &if_entryisundefined,
&if_entryisnotundefined);
@@ -3005,7 +3198,7 @@ Node* CodeStubAssembler::StringFromCharCode(Node* code) {
StoreNoWriteBarrier(
MachineRepresentation::kWord8, result,
IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag), code);
- StoreFixedArrayElement(cache, code, result);
+ StoreFixedArrayElement(cache, code_index, result);
var_result.Bind(result);
Goto(&if_done);
}
@@ -3096,31 +3289,28 @@ Node* CodeStubAssembler::SubString(Node* context, Node* string, Node* from,
Label end(this);
Label runtime(this);
- Variable var_instance_type(this, MachineRepresentation::kWord8); // Int32.
- Variable var_result(this, MachineRepresentation::kTagged); // String.
- Variable var_from(this, MachineRepresentation::kTagged); // Smi.
- Variable var_string(this, MachineRepresentation::kTagged); // String.
+ Node* const int_zero = Int32Constant(0);
- var_instance_type.Bind(Int32Constant(0));
- var_string.Bind(string);
- var_from.Bind(from);
+ // Int32 variables.
+ Variable var_instance_type(this, MachineRepresentation::kWord32, int_zero);
+ Variable var_representation(this, MachineRepresentation::kWord32, int_zero);
- // Make sure first argument is a string.
+ Variable var_from(this, MachineRepresentation::kTagged, from); // Smi.
+ Variable var_string(this, MachineRepresentation::kTagged, string); // String.
+ Variable var_result(this, MachineRepresentation::kTagged); // String.
- // Bailout if receiver is a Smi.
- GotoIf(TaggedIsSmi(string), &runtime);
+ // Make sure first argument is a string.
+ CSA_ASSERT(this, TaggedIsNotSmi(string));
+ CSA_ASSERT(this, IsString(string));
// Load the instance type of the {string}.
Node* const instance_type = LoadInstanceType(string);
var_instance_type.Bind(instance_type);
- // Check if {string} is a String.
- GotoUnless(IsStringInstanceType(instance_type), &runtime);
-
// Make sure that both from and to are non-negative smis.
- GotoUnless(WordIsPositiveSmi(from), &runtime);
- GotoUnless(WordIsPositiveSmi(to), &runtime);
+ GotoIfNot(TaggedIsPositiveSmi(from), &runtime);
+ GotoIfNot(TaggedIsPositiveSmi(to), &runtime);
Node* const substr_length = SmiSub(to, from);
Node* const string_length = LoadStringLength(string);
@@ -3142,7 +3332,8 @@ Node* CodeStubAssembler::SubString(Node* context, Node* string, Node* from,
// and put the underlying string into var_string.
// If the string is not indirect, it can only be sequential or external.
- STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
+ STATIC_ASSERT(kIsIndirectStringMask ==
+ (kSlicedStringTag & kConsStringTag & kThinStringTag));
STATIC_ASSERT(kIsIndirectStringMask != 0);
Label underlying_unpacked(this);
GotoIf(Word32Equal(
@@ -3150,13 +3341,14 @@ Node* CodeStubAssembler::SubString(Node* context, Node* string, Node* from,
Int32Constant(0)),
&underlying_unpacked);
- // The subject string is either a sliced or cons string.
+ // The subject string is a sliced, cons, or thin string.
- Label sliced_string(this);
- GotoIf(Word32NotEqual(
- Word32And(instance_type, Int32Constant(kSlicedNotConsMask)),
- Int32Constant(0)),
- &sliced_string);
+ Label thin_string(this), thin_or_sliced(this);
+ var_representation.Bind(
+ Word32And(instance_type, Int32Constant(kStringRepresentationMask)));
+ GotoIf(
+ Word32NotEqual(var_representation.value(), Int32Constant(kConsStringTag)),
+ &thin_or_sliced);
// Cons string. Check whether it is flat, then fetch first part.
// Flat cons strings have an empty second part.
@@ -3168,14 +3360,25 @@ Node* CodeStubAssembler::SubString(Node* context, Node* string, Node* from,
Node* first_string_part = LoadObjectField(string, ConsString::kFirstOffset);
var_string.Bind(first_string_part);
var_instance_type.Bind(LoadInstanceType(first_string_part));
+ var_representation.Bind(Word32And(
+ var_instance_type.value(), Int32Constant(kStringRepresentationMask)));
- Goto(&underlying_unpacked);
+ // The loaded first part might be a thin string.
+ Branch(Word32Equal(Word32And(var_instance_type.value(),
+ Int32Constant(kIsIndirectStringMask)),
+ Int32Constant(0)),
+ &underlying_unpacked, &thin_string);
}
- Bind(&sliced_string);
+ Bind(&thin_or_sliced);
{
+ GotoIf(
+ Word32Equal(var_representation.value(), Int32Constant(kThinStringTag)),
+ &thin_string);
+ // Otherwise it's a sliced string.
// Fetch parent and correct start index by offset.
- Node* sliced_offset = LoadObjectField(string, SlicedString::kOffsetOffset);
+ Node* sliced_offset =
+ LoadObjectField(var_string.value(), SlicedString::kOffsetOffset);
var_from.Bind(SmiAdd(from, sliced_offset));
Node* slice_parent = LoadObjectField(string, SlicedString::kParentOffset);
@@ -3184,6 +3387,19 @@ Node* CodeStubAssembler::SubString(Node* context, Node* string, Node* from,
Node* slice_parent_instance_type = LoadInstanceType(slice_parent);
var_instance_type.Bind(slice_parent_instance_type);
+ // The loaded parent might be a thin string.
+ Branch(Word32Equal(Word32And(var_instance_type.value(),
+ Int32Constant(kIsIndirectStringMask)),
+ Int32Constant(0)),
+ &underlying_unpacked, &thin_string);
+ }
+
+ Bind(&thin_string);
+ {
+ Node* actual_string =
+ LoadObjectField(var_string.value(), ThinString::kActualOffset);
+ var_string.Bind(actual_string);
+ var_instance_type.Bind(LoadInstanceType(actual_string));
Goto(&underlying_unpacked);
}
@@ -3231,10 +3447,10 @@ Node* CodeStubAssembler::SubString(Node* context, Node* string, Node* from,
// encoding at this point.
STATIC_ASSERT(kExternalStringTag != 0);
STATIC_ASSERT(kSeqStringTag == 0);
- GotoUnless(Word32Equal(Word32And(var_instance_type.value(),
- Int32Constant(kExternalStringTag)),
- Int32Constant(0)),
- &external_string);
+ GotoIfNot(Word32Equal(Word32And(var_instance_type.value(),
+ Int32Constant(kExternalStringTag)),
+ Int32Constant(0)),
+ &external_string);
var_result.Bind(AllocAndCopyStringCharacters(
this, context, var_string.value(), var_instance_type.value(),
@@ -3249,22 +3465,8 @@ Node* CodeStubAssembler::SubString(Node* context, Node* string, Node* from,
// Handle external string.
Bind(&external_string);
{
- // Rule out short external strings.
- STATIC_ASSERT(kShortExternalStringTag != 0);
- GotoIf(Word32NotEqual(Word32And(var_instance_type.value(),
- Int32Constant(kShortExternalStringMask)),
- Int32Constant(0)),
- &runtime);
-
- // Move the pointer so that offset-wise, it looks like a sequential string.
- STATIC_ASSERT(SeqTwoByteString::kHeaderSize ==
- SeqOneByteString::kHeaderSize);
-
- Node* resource_data = LoadObjectField(var_string.value(),
- ExternalString::kResourceDataOffset);
- Node* const fake_sequential_string = IntPtrSub(
- resource_data,
- IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
+ Node* const fake_sequential_string = TryDerefExternalString(
+ var_string.value(), var_instance_type.value(), &runtime);
var_result.Bind(AllocAndCopyStringCharacters(
this, context, fake_sequential_string, var_instance_type.value(),
@@ -3313,12 +3515,91 @@ Node* CodeStubAssembler::SubString(Node* context, Node* string, Node* from,
return var_result.value();
}
+namespace {
+
+Node* IsExternalStringInstanceType(CodeStubAssembler* a,
+ Node* const instance_type) {
+ CSA_ASSERT(a, a->IsStringInstanceType(instance_type));
+ return a->Word32Equal(
+ a->Word32And(instance_type, a->Int32Constant(kStringRepresentationMask)),
+ a->Int32Constant(kExternalStringTag));
+}
+
+Node* IsShortExternalStringInstanceType(CodeStubAssembler* a,
+ Node* const instance_type) {
+ CSA_ASSERT(a, a->IsStringInstanceType(instance_type));
+ STATIC_ASSERT(kShortExternalStringTag != 0);
+ return a->Word32NotEqual(
+ a->Word32And(instance_type, a->Int32Constant(kShortExternalStringMask)),
+ a->Int32Constant(0));
+}
+
+} // namespace
+
+Node* CodeStubAssembler::TryDerefExternalString(Node* const string,
+ Node* const instance_type,
+ Label* if_bailout) {
+ Label out(this);
+
+ USE(IsExternalStringInstanceType);
+ CSA_ASSERT(this, IsExternalStringInstanceType(this, instance_type));
+ GotoIf(IsShortExternalStringInstanceType(this, instance_type), if_bailout);
+
+ // Move the pointer so that offset-wise, it looks like a sequential string.
+ STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
+
+ Node* resource_data = LoadObjectField(
+ string, ExternalString::kResourceDataOffset, MachineType::Pointer());
+ Node* const fake_sequential_string =
+ IntPtrSub(resource_data,
+ IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
+
+ return fake_sequential_string;
+}
+
+void CodeStubAssembler::MaybeDerefIndirectString(Variable* var_string,
+ Node* instance_type,
+ Variable* var_did_something) {
+ Label deref(this), done(this, var_did_something);
+ Node* representation =
+ Word32And(instance_type, Int32Constant(kStringRepresentationMask));
+ GotoIf(Word32Equal(representation, Int32Constant(kThinStringTag)), &deref);
+ GotoIf(Word32NotEqual(representation, Int32Constant(kConsStringTag)), &done);
+ // Cons string.
+ Node* rhs = LoadObjectField(var_string->value(), ConsString::kSecondOffset);
+ GotoIf(WordEqual(rhs, EmptyStringConstant()), &deref);
+ Goto(&done);
+
+ Bind(&deref);
+ STATIC_ASSERT(ThinString::kActualOffset == ConsString::kFirstOffset);
+ var_string->Bind(
+ LoadObjectField(var_string->value(), ThinString::kActualOffset));
+ var_did_something->Bind(IntPtrConstant(1));
+ Goto(&done);
+
+ Bind(&done);
+}
+
+void CodeStubAssembler::MaybeDerefIndirectStrings(Variable* var_left,
+ Node* left_instance_type,
+ Variable* var_right,
+ Node* right_instance_type,
+ Label* did_something) {
+ Variable var_did_something(this, MachineType::PointerRepresentation(),
+ IntPtrConstant(0));
+ MaybeDerefIndirectString(var_left, left_instance_type, &var_did_something);
+ MaybeDerefIndirectString(var_right, right_instance_type, &var_did_something);
+
+ GotoIf(WordNotEqual(var_did_something.value(), IntPtrConstant(0)),
+ did_something);
+ // Fall through if neither string was an indirect string.
+}
+
Node* CodeStubAssembler::StringAdd(Node* context, Node* left, Node* right,
AllocationFlags flags) {
Label check_right(this);
Label runtime(this, Label::kDeferred);
Label cons(this);
- Label non_cons(this);
Variable result(this, MachineRepresentation::kTagged);
Label done(this, &result);
Label done_native(this, &result);
@@ -3336,73 +3617,90 @@ Node* CodeStubAssembler::StringAdd(Node* context, Node* left, Node* right,
Goto(&done_native);
Bind(&cons);
- CSA_ASSERT(this, TaggedIsSmi(left_length));
- CSA_ASSERT(this, TaggedIsSmi(right_length));
- Node* new_length = SmiAdd(left_length, right_length);
- GotoIf(UintPtrGreaterThanOrEqual(
- new_length, SmiConstant(Smi::FromInt(String::kMaxLength))),
- &runtime);
-
- GotoIf(IntPtrLessThan(new_length,
- SmiConstant(Smi::FromInt(ConsString::kMinLength))),
- &non_cons);
-
- result.Bind(NewConsString(context, new_length, left, right, flags));
- Goto(&done_native);
+ {
+ CSA_ASSERT(this, TaggedIsSmi(left_length));
+ CSA_ASSERT(this, TaggedIsSmi(right_length));
+ Node* new_length = SmiAdd(left_length, right_length);
+ GotoIf(SmiAboveOrEqual(new_length, SmiConstant(String::kMaxLength)),
+ &runtime);
- Bind(&non_cons);
+ Variable var_left(this, MachineRepresentation::kTagged, left);
+ Variable var_right(this, MachineRepresentation::kTagged, right);
+ Variable* input_vars[2] = {&var_left, &var_right};
+ Label non_cons(this, 2, input_vars);
+ Label slow(this, Label::kDeferred);
+ GotoIf(SmiLessThan(new_length, SmiConstant(ConsString::kMinLength)),
+ &non_cons);
- Comment("Full string concatenate");
- Node* left_instance_type = LoadInstanceType(left);
- Node* right_instance_type = LoadInstanceType(right);
- // Compute intersection and difference of instance types.
+ result.Bind(NewConsString(context, new_length, var_left.value(),
+ var_right.value(), flags));
+ Goto(&done_native);
- Node* ored_instance_types = WordOr(left_instance_type, right_instance_type);
- Node* xored_instance_types = WordXor(left_instance_type, right_instance_type);
-
- // Check if both strings have the same encoding and both are sequential.
- GotoIf(WordNotEqual(
- WordAnd(xored_instance_types, IntPtrConstant(kStringEncodingMask)),
- IntPtrConstant(0)),
- &runtime);
- GotoIf(WordNotEqual(WordAnd(ored_instance_types,
- IntPtrConstant(kStringRepresentationMask)),
- IntPtrConstant(0)),
- &runtime);
-
- Label two_byte(this);
- GotoIf(WordEqual(
- WordAnd(ored_instance_types, IntPtrConstant(kStringEncodingMask)),
- IntPtrConstant(kTwoByteStringTag)),
- &two_byte);
- // One-byte sequential string case
- Node* new_string =
- AllocateSeqOneByteString(context, new_length, SMI_PARAMETERS);
- CopyStringCharacters(left, new_string, SmiConstant(Smi::kZero),
- SmiConstant(Smi::kZero), left_length,
- String::ONE_BYTE_ENCODING, String::ONE_BYTE_ENCODING,
- SMI_PARAMETERS);
- CopyStringCharacters(right, new_string, SmiConstant(Smi::kZero), left_length,
- right_length, String::ONE_BYTE_ENCODING,
- String::ONE_BYTE_ENCODING, SMI_PARAMETERS);
- result.Bind(new_string);
- Goto(&done_native);
+ Bind(&non_cons);
- Bind(&two_byte);
- {
- // Two-byte sequential string case
- new_string = AllocateSeqTwoByteString(context, new_length, SMI_PARAMETERS);
- CopyStringCharacters(left, new_string, SmiConstant(Smi::kZero),
+ Comment("Full string concatenate");
+ Node* left_instance_type = LoadInstanceType(var_left.value());
+ Node* right_instance_type = LoadInstanceType(var_right.value());
+ // Compute intersection and difference of instance types.
+
+ Node* ored_instance_types =
+ Word32Or(left_instance_type, right_instance_type);
+ Node* xored_instance_types =
+ Word32Xor(left_instance_type, right_instance_type);
+
+ // Check if both strings have the same encoding and both are sequential.
+ GotoIf(Word32NotEqual(Word32And(xored_instance_types,
+ Int32Constant(kStringEncodingMask)),
+ Int32Constant(0)),
+ &runtime);
+ GotoIf(Word32NotEqual(Word32And(ored_instance_types,
+ Int32Constant(kStringRepresentationMask)),
+ Int32Constant(0)),
+ &slow);
+
+ Label two_byte(this);
+ GotoIf(Word32Equal(Word32And(ored_instance_types,
+ Int32Constant(kStringEncodingMask)),
+ Int32Constant(kTwoByteStringTag)),
+ &two_byte);
+ // One-byte sequential string case
+ Node* new_string =
+ AllocateSeqOneByteString(context, new_length, SMI_PARAMETERS);
+ CopyStringCharacters(var_left.value(), new_string, SmiConstant(Smi::kZero),
SmiConstant(Smi::kZero), left_length,
- String::TWO_BYTE_ENCODING, String::TWO_BYTE_ENCODING,
+ String::ONE_BYTE_ENCODING, String::ONE_BYTE_ENCODING,
SMI_PARAMETERS);
- CopyStringCharacters(right, new_string, SmiConstant(Smi::kZero),
- left_length, right_length, String::TWO_BYTE_ENCODING,
- String::TWO_BYTE_ENCODING, SMI_PARAMETERS);
+ CopyStringCharacters(var_right.value(), new_string, SmiConstant(Smi::kZero),
+ left_length, right_length, String::ONE_BYTE_ENCODING,
+ String::ONE_BYTE_ENCODING, SMI_PARAMETERS);
result.Bind(new_string);
Goto(&done_native);
- }
+ Bind(&two_byte);
+ {
+ // Two-byte sequential string case
+ new_string =
+ AllocateSeqTwoByteString(context, new_length, SMI_PARAMETERS);
+ CopyStringCharacters(var_left.value(), new_string,
+ SmiConstant(Smi::kZero), SmiConstant(Smi::kZero),
+ left_length, String::TWO_BYTE_ENCODING,
+ String::TWO_BYTE_ENCODING, SMI_PARAMETERS);
+ CopyStringCharacters(var_right.value(), new_string,
+ SmiConstant(Smi::kZero), left_length, right_length,
+ String::TWO_BYTE_ENCODING, String::TWO_BYTE_ENCODING,
+ SMI_PARAMETERS);
+ result.Bind(new_string);
+ Goto(&done_native);
+ }
+
+ Bind(&slow);
+ {
+ // Try to unwrap indirect strings, restart the above attempt on success.
+ MaybeDerefIndirectStrings(&var_left, left_instance_type, &var_right,
+ right_instance_type, &non_cons);
+ Goto(&runtime);
+ }
+ }
Bind(&runtime);
{
result.Bind(CallRuntime(Runtime::kStringAdd, context, left, right));
@@ -3419,76 +3717,10 @@ Node* CodeStubAssembler::StringAdd(Node* context, Node* left, Node* right,
return result.value();
}
-Node* CodeStubAssembler::StringIndexOfChar(Node* context, Node* string,
- Node* needle_char, Node* from) {
- CSA_ASSERT(this, IsString(string));
- Variable var_result(this, MachineRepresentation::kTagged);
-
- Label out(this), runtime(this, Label::kDeferred);
-
- // Let runtime handle non-one-byte {needle_char}.
-
- Node* const one_byte_char_mask = IntPtrConstant(0xFF);
- GotoUnless(WordEqual(WordAnd(needle_char, one_byte_char_mask), needle_char),
- &runtime);
-
- // TODO(jgruber): Handle external and two-byte strings.
-
- Node* const one_byte_seq_mask = Int32Constant(
- kIsIndirectStringMask | kExternalStringTag | kStringEncodingMask);
- Node* const expected_masked = Int32Constant(kOneByteStringTag);
-
- Node* const string_instance_type = LoadInstanceType(string);
- GotoUnless(Word32Equal(Word32And(string_instance_type, one_byte_seq_mask),
- expected_masked),
- &runtime);
-
- // If we reach this, {string} is a non-indirect, non-external one-byte string.
-
- Node* const length = LoadStringLength(string);
- Node* const search_range_length = SmiUntag(SmiSub(length, from));
-
- const int offset = SeqOneByteString::kHeaderSize - kHeapObjectTag;
- Node* const begin = IntPtrConstant(offset);
- Node* const cursor = IntPtrAdd(begin, SmiUntag(from));
- Node* const end = IntPtrAdd(cursor, search_range_length);
-
- var_result.Bind(SmiConstant(Smi::FromInt(-1)));
-
- BuildFastLoop(MachineType::PointerRepresentation(), cursor, end,
- [string, needle_char, begin, &var_result, &out](
- CodeStubAssembler* csa, Node* cursor) {
- Label next(csa);
- Node* value = csa->Load(MachineType::Uint8(), string, cursor);
- csa->GotoUnless(csa->WordEqual(value, needle_char), &next);
-
- // Found a match.
- Node* index = csa->SmiTag(csa->IntPtrSub(cursor, begin));
- var_result.Bind(index);
- csa->Goto(&out);
-
- csa->Bind(&next);
- },
- 1, IndexAdvanceMode::kPost);
- Goto(&out);
-
- Bind(&runtime);
- {
- Node* const pattern = StringFromCharCode(needle_char);
- Node* const result =
- CallRuntime(Runtime::kStringIndexOf, context, string, pattern, from);
- var_result.Bind(result);
- Goto(&out);
- }
-
- Bind(&out);
- return var_result.value();
-}
-
-Node* CodeStubAssembler::StringFromCodePoint(compiler::Node* codepoint,
+Node* CodeStubAssembler::StringFromCodePoint(Node* codepoint,
UnicodeEncoding encoding) {
- Variable var_result(this, MachineRepresentation::kTagged);
- var_result.Bind(EmptyStringConstant());
+ Variable var_result(this, MachineRepresentation::kTagged,
+ EmptyStringConstant());
Label if_isword16(this), if_isword32(this), return_result(this);
@@ -3563,8 +3795,7 @@ Node* CodeStubAssembler::StringToNumber(Node* context, Node* input) {
return var_result.value();
}
-Node* CodeStubAssembler::NumberToString(compiler::Node* context,
- compiler::Node* argument) {
+Node* CodeStubAssembler::NumberToString(Node* context, Node* argument) {
Variable result(this, MachineRepresentation::kTagged);
Label runtime(this, Label::kDeferred);
Label smi(this);
@@ -3575,7 +3806,9 @@ Node* CodeStubAssembler::NumberToString(compiler::Node* context,
// Make the hash mask from the length of the number string cache. It
// contains two elements (number and string) for each cache entry.
- Node* mask = LoadFixedArrayBaseLength(number_string_cache);
+ // TODO(ishell): cleanup mask handling.
+ Node* mask =
+ BitcastTaggedToWord(LoadFixedArrayBaseLength(number_string_cache));
Node* one = IntPtrConstant(1);
mask = IntPtrSub(mask, one);
@@ -3583,7 +3816,7 @@ Node* CodeStubAssembler::NumberToString(compiler::Node* context,
// Argument isn't smi, check to see if it's a heap-number.
Node* map = LoadMap(argument);
- GotoUnless(WordEqual(map, HeapNumberMapConstant()), &runtime);
+ GotoIfNot(IsHeapNumberMap(map), &runtime);
// Make a hash from the two 32-bit values of the double.
Node* low =
@@ -3591,29 +3824,27 @@ Node* CodeStubAssembler::NumberToString(compiler::Node* context,
Node* high = LoadObjectField(argument, HeapNumber::kValueOffset + kIntSize,
MachineType::Int32());
Node* hash = Word32Xor(low, high);
- if (Is64()) hash = ChangeInt32ToInt64(hash);
+ hash = ChangeInt32ToIntPtr(hash);
hash = WordShl(hash, one);
- Node* index = WordAnd(hash, SmiToWord(mask));
+ Node* index = WordAnd(hash, SmiUntag(BitcastWordToTagged(mask)));
// Cache entry's key must be a heap number
- Node* number_key =
- LoadFixedArrayElement(number_string_cache, index, 0, INTPTR_PARAMETERS);
+ Node* number_key = LoadFixedArrayElement(number_string_cache, index);
GotoIf(TaggedIsSmi(number_key), &runtime);
map = LoadMap(number_key);
- GotoUnless(WordEqual(map, HeapNumberMapConstant()), &runtime);
+ GotoIfNot(IsHeapNumberMap(map), &runtime);
// Cache entry's key must match the heap number value we're looking for.
Node* low_compare = LoadObjectField(number_key, HeapNumber::kValueOffset,
MachineType::Int32());
Node* high_compare = LoadObjectField(
number_key, HeapNumber::kValueOffset + kIntSize, MachineType::Int32());
- GotoUnless(WordEqual(low, low_compare), &runtime);
- GotoUnless(WordEqual(high, high_compare), &runtime);
+ GotoIfNot(Word32Equal(low, low_compare), &runtime);
+ GotoIfNot(Word32Equal(high, high_compare), &runtime);
- // Heap number match, return value fro cache entry.
+ // Heap number match, return value from cache entry.
IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
- result.Bind(LoadFixedArrayElement(number_string_cache, index, kPointerSize,
- INTPTR_PARAMETERS));
+ result.Bind(LoadFixedArrayElement(number_string_cache, index, kPointerSize));
Goto(&done);
Bind(&runtime);
@@ -3626,7 +3857,8 @@ Node* CodeStubAssembler::NumberToString(compiler::Node* context,
Bind(&smi);
{
// Load the smi key, make sure it matches the smi we're looking for.
- Node* smi_index = WordAnd(WordShl(argument, one), mask);
+ Node* smi_index = BitcastWordToTagged(
+ WordAnd(WordShl(BitcastTaggedToWord(argument), one), mask));
Node* smi_key = LoadFixedArrayElement(number_string_cache, smi_index, 0,
SMI_PARAMETERS);
GotoIf(WordNotEqual(smi_key, argument), &runtime);
@@ -3643,9 +3875,6 @@ Node* CodeStubAssembler::NumberToString(compiler::Node* context,
}
Node* CodeStubAssembler::ToName(Node* context, Node* value) {
- typedef CodeStubAssembler::Label Label;
- typedef CodeStubAssembler::Variable Variable;
-
Label end(this);
Variable var_result(this, MachineRepresentation::kTagged);
@@ -3694,14 +3923,13 @@ Node* CodeStubAssembler::ToName(Node* context, Node* value) {
Node* CodeStubAssembler::NonNumberToNumber(Node* context, Node* input) {
// Assert input is a HeapObject (not smi or heap number)
CSA_ASSERT(this, Word32BinaryNot(TaggedIsSmi(input)));
- CSA_ASSERT(this, Word32NotEqual(LoadMap(input), HeapNumberMapConstant()));
+ CSA_ASSERT(this, Word32BinaryNot(IsHeapNumberMap(LoadMap(input))));
// We might need to loop once here due to ToPrimitive conversions.
- Variable var_input(this, MachineRepresentation::kTagged);
+ Variable var_input(this, MachineRepresentation::kTagged, input);
Variable var_result(this, MachineRepresentation::kTagged);
Label loop(this, &var_input);
Label end(this);
- var_input.Bind(input);
Goto(&loop);
Bind(&loop);
{
@@ -3745,7 +3973,7 @@ Node* CodeStubAssembler::NonNumberToNumber(Node* context, Node* input) {
Label if_resultisnumber(this), if_resultisnotnumber(this);
GotoIf(TaggedIsSmi(result), &if_resultisnumber);
Node* result_map = LoadMap(result);
- Branch(WordEqual(result_map, HeapNumberMapConstant()), &if_resultisnumber,
+ Branch(IsHeapNumberMap(result_map), &if_resultisnumber,
&if_resultisnotnumber);
Bind(&if_resultisnumber);
@@ -3765,8 +3993,8 @@ Node* CodeStubAssembler::NonNumberToNumber(Node* context, Node* input) {
Bind(&if_inputisother);
{
- // The {input} is something else (i.e. Symbol or Simd128Value), let the
- // runtime figure out the correct exception.
+ // The {input} is something else (e.g. Symbol), let the runtime figure
+ // out the correct exception.
// Note: We cannot tail call to the runtime here, as js-to-wasm
// trampolines also use this code currently, and they declare all
// outgoing parameters as untagged, while we would push a tagged
@@ -3785,7 +4013,7 @@ Node* CodeStubAssembler::ToNumber(Node* context, Node* input) {
Label end(this);
Label not_smi(this, Label::kDeferred);
- GotoUnless(TaggedIsSmi(input), &not_smi);
+ GotoIfNot(TaggedIsSmi(input), &not_smi);
var_result.Bind(input);
Goto(&end);
@@ -3793,8 +4021,7 @@ Node* CodeStubAssembler::ToNumber(Node* context, Node* input) {
{
Label not_heap_number(this, Label::kDeferred);
Node* input_map = LoadMap(input);
- GotoIf(Word32NotEqual(input_map, HeapNumberMapConstant()),
- &not_heap_number);
+ GotoIfNot(IsHeapNumberMap(input_map), &not_heap_number);
var_result.Bind(input);
Goto(&end);
@@ -3810,6 +4037,107 @@ Node* CodeStubAssembler::ToNumber(Node* context, Node* input) {
return var_result.value();
}
+Node* CodeStubAssembler::ToUint32(Node* context, Node* input) {
+ Node* const float_zero = Float64Constant(0.0);
+ Node* const float_two_32 = Float64Constant(static_cast<double>(1ULL << 32));
+
+ Label out(this);
+
+ Variable var_result(this, MachineRepresentation::kTagged, input);
+
+ // Early exit for positive smis.
+ {
+ // TODO(jgruber): This branch and the recheck below can be removed once we
+ // have a ToNumber with multiple exits.
+ Label next(this, Label::kDeferred);
+ Branch(TaggedIsPositiveSmi(input), &out, &next);
+ Bind(&next);
+ }
+
+ Node* const number = ToNumber(context, input);
+ var_result.Bind(number);
+
+ // Perhaps we have a positive smi now.
+ {
+ Label next(this, Label::kDeferred);
+ Branch(TaggedIsPositiveSmi(number), &out, &next);
+ Bind(&next);
+ }
+
+ Label if_isnegativesmi(this), if_isheapnumber(this);
+ Branch(TaggedIsSmi(number), &if_isnegativesmi, &if_isheapnumber);
+
+ Bind(&if_isnegativesmi);
+ {
+ // floor({input}) mod 2^32 === {input} + 2^32.
+ Node* const float_number = SmiToFloat64(number);
+ Node* const float_result = Float64Add(float_number, float_two_32);
+ Node* const result = ChangeFloat64ToTagged(float_result);
+ var_result.Bind(result);
+ Goto(&out);
+ }
+
+ Bind(&if_isheapnumber);
+ {
+ Label return_zero(this);
+ Node* const value = LoadHeapNumberValue(number);
+
+ {
+ // +-0.
+ Label next(this);
+ Branch(Float64Equal(value, float_zero), &return_zero, &next);
+ Bind(&next);
+ }
+
+ {
+ // NaN.
+ Label next(this);
+ Branch(Float64Equal(value, value), &next, &return_zero);
+ Bind(&next);
+ }
+
+ {
+ // +Infinity.
+ Label next(this);
+ Node* const positive_infinity =
+ Float64Constant(std::numeric_limits<double>::infinity());
+ Branch(Float64Equal(value, positive_infinity), &return_zero, &next);
+ Bind(&next);
+ }
+
+ {
+ // -Infinity.
+ Label next(this);
+ Node* const negative_infinity =
+ Float64Constant(-1.0 * std::numeric_limits<double>::infinity());
+ Branch(Float64Equal(value, negative_infinity), &return_zero, &next);
+ Bind(&next);
+ }
+
+ // Return floor({input}) mod 2^32 (assuming mod semantics that always return
+ // positive results).
+ {
+ Node* x = Float64Floor(value);
+ x = Float64Mod(x, float_two_32);
+ x = Float64Add(x, float_two_32);
+ x = Float64Mod(x, float_two_32);
+
+ Node* const result = ChangeFloat64ToTagged(x);
+ var_result.Bind(result);
+ Goto(&out);
+ }
+
+ Bind(&return_zero);
+ {
+ var_result.Bind(SmiConstant(Smi::kZero));
+ Goto(&out);
+ }
+ }
+
+ Bind(&out);
+ return var_result.value();
+}
+
Node* CodeStubAssembler::ToString(Node* context, Node* input) {
Label is_number(this);
Label runtime(this, Label::kDeferred);
@@ -3825,8 +4153,7 @@ Node* CodeStubAssembler::ToString(Node* context, Node* input) {
GotoIf(IsStringInstanceType(input_instance_type), &done);
Label not_heap_number(this);
- Branch(WordNotEqual(input_map, HeapNumberMapConstant()), &not_heap_number,
- &is_number);
+ Branch(IsHeapNumberMap(input_map), &is_number, &not_heap_number);
Bind(&is_number);
result.Bind(NumberToString(context, input));
@@ -3850,45 +4177,6 @@ Node* CodeStubAssembler::ToString(Node* context, Node* input) {
return result.value();
}
-Node* CodeStubAssembler::FlattenString(Node* string) {
- CSA_ASSERT(this, IsString(string));
- Variable var_result(this, MachineRepresentation::kTagged);
- var_result.Bind(string);
-
- Node* instance_type = LoadInstanceType(string);
-
- // Check if the {string} is not a ConsString (i.e. already flat).
- Label is_cons(this, Label::kDeferred), is_flat_in_cons(this), end(this);
- {
- GotoUnless(Word32Equal(Word32And(instance_type,
- Int32Constant(kStringRepresentationMask)),
- Int32Constant(kConsStringTag)),
- &end);
-
- // Check whether the right hand side is the empty string (i.e. if
- // this is really a flat string in a cons string).
- Node* rhs = LoadObjectField(string, ConsString::kSecondOffset);
- Branch(WordEqual(rhs, EmptyStringConstant()), &is_flat_in_cons, &is_cons);
- }
-
- // Bail out to the runtime.
- Bind(&is_cons);
- {
- var_result.Bind(
- CallRuntime(Runtime::kFlattenString, NoContextConstant(), string));
- Goto(&end);
- }
-
- Bind(&is_flat_in_cons);
- {
- var_result.Bind(LoadObjectField(string, ConsString::kFirstOffset));
- Goto(&end);
- }
-
- Bind(&end);
- return var_result.value();
-}
-
Node* CodeStubAssembler::JSReceiverToPrimitive(Node* context, Node* input) {
Label if_isreceiver(this, Label::kDeferred), if_isnotreceiver(this);
Variable result(this, MachineRepresentation::kTagged);
@@ -3917,9 +4205,8 @@ Node* CodeStubAssembler::JSReceiverToPrimitive(Node* context, Node* input) {
Node* CodeStubAssembler::ToInteger(Node* context, Node* input,
ToIntegerTruncationMode mode) {
// We might need to loop once for ToNumber conversion.
- Variable var_arg(this, MachineRepresentation::kTagged);
+ Variable var_arg(this, MachineRepresentation::kTagged, input);
Label loop(this, &var_arg), out(this);
- var_arg.Bind(input);
Goto(&loop);
Bind(&loop);
{
@@ -3935,8 +4222,8 @@ Node* CodeStubAssembler::ToInteger(Node* context, Node* input,
// Check if {arg} is a HeapNumber.
Label if_argisheapnumber(this),
if_argisnotheapnumber(this, Label::kDeferred);
- Branch(WordEqual(LoadMap(arg), HeapNumberMapConstant()),
- &if_argisheapnumber, &if_argisnotheapnumber);
+ Branch(IsHeapNumberMap(LoadMap(arg)), &if_argisheapnumber,
+ &if_argisnotheapnumber);
Bind(&if_argisheapnumber);
{
@@ -3944,7 +4231,7 @@ Node* CodeStubAssembler::ToInteger(Node* context, Node* input,
Node* arg_value = LoadHeapNumberValue(arg);
// Check if {arg} is NaN.
- GotoUnless(Float64Equal(arg_value, arg_value), &return_zero);
+ GotoIfNot(Float64Equal(arg_value, arg_value), &return_zero);
// Truncate {arg} towards zero.
Node* value = Float64Trunc(arg_value);
@@ -4013,29 +4300,42 @@ void CodeStubAssembler::DecrementCounter(StatsCounter* counter, int delta) {
}
}
+void CodeStubAssembler::Increment(Variable& variable, int value,
+ ParameterMode mode) {
+ DCHECK_IMPLIES(mode == INTPTR_PARAMETERS,
+ variable.rep() == MachineType::PointerRepresentation());
+ DCHECK_IMPLIES(mode == SMI_PARAMETERS,
+ variable.rep() == MachineRepresentation::kTagged ||
+ variable.rep() == MachineRepresentation::kTaggedSigned);
+ variable.Bind(
+ IntPtrOrSmiAdd(variable.value(), IntPtrOrSmiConstant(value, mode), mode));
+}
+
void CodeStubAssembler::Use(Label* label) {
GotoIf(Word32Equal(Int32Constant(0), Int32Constant(1)), label);
}
void CodeStubAssembler::TryToName(Node* key, Label* if_keyisindex,
Variable* var_index, Label* if_keyisunique,
- Label* if_bailout) {
+ Variable* var_unique, Label* if_bailout) {
DCHECK_EQ(MachineType::PointerRepresentation(), var_index->rep());
+ DCHECK_EQ(MachineRepresentation::kTagged, var_unique->rep());
Comment("TryToName");
- Label if_hascachedindex(this), if_keyisnotindex(this);
+ Label if_hascachedindex(this), if_keyisnotindex(this), if_thinstring(this);
// Handle Smi and HeapNumber keys.
var_index->Bind(TryToIntptr(key, &if_keyisnotindex));
Goto(if_keyisindex);
Bind(&if_keyisnotindex);
- Node* key_instance_type = LoadInstanceType(key);
+ Node* key_map = LoadMap(key);
+ var_unique->Bind(key);
// Symbols are unique.
- GotoIf(Word32Equal(key_instance_type, Int32Constant(SYMBOL_TYPE)),
- if_keyisunique);
+ GotoIf(IsSymbolMap(key_map), if_keyisunique);
+ Node* key_instance_type = LoadMapInstanceType(key_map);
// Miss if |key| is not a String.
STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
- GotoUnless(IsStringInstanceType(key_instance_type), if_bailout);
+ GotoIfNot(IsStringInstanceType(key_instance_type), if_bailout);
// |key| is a String. Check if it has a cached array index.
Node* hash = LoadNameHashField(key);
Node* contains_index =
@@ -4046,6 +4346,12 @@ void CodeStubAssembler::TryToName(Node* key, Label* if_keyisindex,
Node* not_an_index =
Word32And(hash, Int32Constant(Name::kIsNotArrayIndexMask));
GotoIf(Word32Equal(not_an_index, Int32Constant(0)), if_bailout);
+ // Check if we have a ThinString.
+ GotoIf(Word32Equal(key_instance_type, Int32Constant(THIN_STRING_TYPE)),
+ &if_thinstring);
+ GotoIf(
+ Word32Equal(key_instance_type, Int32Constant(THIN_ONE_BYTE_STRING_TYPE)),
+ &if_thinstring);
// Finally, check if |key| is internalized.
STATIC_ASSERT(kNotInternalizedTag != 0);
Node* not_internalized =
@@ -4053,6 +4359,10 @@ void CodeStubAssembler::TryToName(Node* key, Label* if_keyisindex,
GotoIf(Word32NotEqual(not_internalized, Int32Constant(0)), if_bailout);
Goto(if_keyisunique);
+ Bind(&if_thinstring);
+ var_unique->Bind(LoadObjectField(key, ThinString::kActualOffset));
+ Goto(if_keyisunique);
+
Bind(&if_hascachedindex);
var_index->Bind(DecodeWordFromWord32<Name::ArrayIndexValueBits>(hash));
Goto(if_keyisindex);
@@ -4067,6 +4377,8 @@ Node* CodeStubAssembler::EntryToIndex(Node* entry, int field_index) {
template Node* CodeStubAssembler::EntryToIndex<NameDictionary>(Node*, int);
template Node* CodeStubAssembler::EntryToIndex<GlobalDictionary>(Node*, int);
+template Node* CodeStubAssembler::EntryToIndex<SeededNumberDictionary>(Node*,
+ int);
Node* CodeStubAssembler::HashTableComputeCapacity(Node* at_least_space_for) {
Node* capacity = IntPtrRoundUpToPowerOfTwo32(
@@ -4075,8 +4387,49 @@ Node* CodeStubAssembler::HashTableComputeCapacity(Node* at_least_space_for) {
}
Node* CodeStubAssembler::IntPtrMax(Node* left, Node* right) {
- return Select(IntPtrGreaterThanOrEqual(left, right), left, right,
- MachineType::PointerRepresentation());
+ return SelectConstant(IntPtrGreaterThanOrEqual(left, right), left, right,
+ MachineType::PointerRepresentation());
+}
+
+Node* CodeStubAssembler::IntPtrMin(Node* left, Node* right) {
+ return SelectConstant(IntPtrLessThanOrEqual(left, right), left, right,
+ MachineType::PointerRepresentation());
+}
+
+template <class Dictionary>
+Node* CodeStubAssembler::GetNumberOfElements(Node* dictionary) {
+ return LoadFixedArrayElement(dictionary, Dictionary::kNumberOfElementsIndex);
+}
+
+template <class Dictionary>
+void CodeStubAssembler::SetNumberOfElements(Node* dictionary,
+ Node* num_elements_smi) {
+ StoreFixedArrayElement(dictionary, Dictionary::kNumberOfElementsIndex,
+ num_elements_smi, SKIP_WRITE_BARRIER);
+}
+
+template <class Dictionary>
+Node* CodeStubAssembler::GetNumberOfDeletedElements(Node* dictionary) {
+ return LoadFixedArrayElement(dictionary,
+ Dictionary::kNumberOfDeletedElementsIndex);
+}
+
+template <class Dictionary>
+Node* CodeStubAssembler::GetCapacity(Node* dictionary) {
+ return LoadFixedArrayElement(dictionary, Dictionary::kCapacityIndex);
+}
+
+template <class Dictionary>
+Node* CodeStubAssembler::GetNextEnumerationIndex(Node* dictionary) {
+ return LoadFixedArrayElement(dictionary,
+ Dictionary::kNextEnumerationIndexIndex);
+}
+
+template <class Dictionary>
+void CodeStubAssembler::SetNextEnumerationIndex(Node* dictionary,
+ Node* next_enum_index_smi) {
+ StoreFixedArrayElement(dictionary, Dictionary::kNextEnumerationIndexIndex,
+ next_enum_index_smi, SKIP_WRITE_BARRIER);
}
template <typename Dictionary>
@@ -4084,14 +4437,15 @@ void CodeStubAssembler::NameDictionaryLookup(Node* dictionary,
Node* unique_name, Label* if_found,
Variable* var_name_index,
Label* if_not_found,
- int inlined_probes) {
+ int inlined_probes,
+ LookupMode mode) {
CSA_ASSERT(this, IsDictionary(dictionary));
DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep());
+ DCHECK_IMPLIES(mode == kFindInsertionIndex,
+ inlined_probes == 0 && if_found == nullptr);
Comment("NameDictionaryLookup");
- Node* capacity = SmiUntag(LoadFixedArrayElement(
- dictionary, IntPtrConstant(Dictionary::kCapacityIndex), 0,
- INTPTR_PARAMETERS));
+ Node* capacity = SmiUntag(GetCapacity<Dictionary>(dictionary));
Node* mask = IntPtrSub(capacity, IntPtrConstant(1));
Node* hash = ChangeUint32ToWord(LoadNameHash(unique_name));
@@ -4103,42 +4457,46 @@ void CodeStubAssembler::NameDictionaryLookup(Node* dictionary,
Node* index = EntryToIndex<Dictionary>(entry);
var_name_index->Bind(index);
- Node* current =
- LoadFixedArrayElement(dictionary, index, 0, INTPTR_PARAMETERS);
+ Node* current = LoadFixedArrayElement(dictionary, index);
GotoIf(WordEqual(current, unique_name), if_found);
// See Dictionary::NextProbe().
count = IntPtrConstant(i + 1);
entry = WordAnd(IntPtrAdd(entry, count), mask);
}
+ if (mode == kFindInsertionIndex) {
+ // Appease the variable merging algorithm for "Goto(&loop)" below.
+ var_name_index->Bind(IntPtrConstant(0));
+ }
Node* undefined = UndefinedConstant();
+ Node* the_hole = mode == kFindExisting ? nullptr : TheHoleConstant();
- Variable var_count(this, MachineType::PointerRepresentation());
- Variable var_entry(this, MachineType::PointerRepresentation());
+ Variable var_count(this, MachineType::PointerRepresentation(), count);
+ Variable var_entry(this, MachineType::PointerRepresentation(), entry);
Variable* loop_vars[] = {&var_count, &var_entry, var_name_index};
Label loop(this, 3, loop_vars);
- var_count.Bind(count);
- var_entry.Bind(entry);
Goto(&loop);
Bind(&loop);
{
- Node* count = var_count.value();
Node* entry = var_entry.value();
Node* index = EntryToIndex<Dictionary>(entry);
var_name_index->Bind(index);
- Node* current =
- LoadFixedArrayElement(dictionary, index, 0, INTPTR_PARAMETERS);
+ Node* current = LoadFixedArrayElement(dictionary, index);
GotoIf(WordEqual(current, undefined), if_not_found);
- GotoIf(WordEqual(current, unique_name), if_found);
+ if (mode == kFindExisting) {
+ GotoIf(WordEqual(current, unique_name), if_found);
+ } else {
+ DCHECK_EQ(kFindInsertionIndex, mode);
+ GotoIf(WordEqual(current, the_hole), if_not_found);
+ }
// See Dictionary::NextProbe().
- count = IntPtrAdd(count, IntPtrConstant(1));
- entry = WordAnd(IntPtrAdd(entry, count), mask);
+ Increment(var_count);
+ entry = WordAnd(IntPtrAdd(entry, var_count.value()), mask);
- var_count.Bind(count);
var_entry.Bind(entry);
Goto(&loop);
}
@@ -4146,13 +4504,13 @@ void CodeStubAssembler::NameDictionaryLookup(Node* dictionary,
// Instantiate template methods to workaround GCC compilation issue.
template void CodeStubAssembler::NameDictionaryLookup<NameDictionary>(
- Node*, Node*, Label*, Variable*, Label*, int);
+ Node*, Node*, Label*, Variable*, Label*, int, LookupMode);
template void CodeStubAssembler::NameDictionaryLookup<GlobalDictionary>(
- Node*, Node*, Label*, Variable*, Label*, int);
+ Node*, Node*, Label*, Variable*, Label*, int, LookupMode);
Node* CodeStubAssembler::ComputeIntegerHash(Node* key, Node* seed) {
// See v8::internal::ComputeIntegerHash()
- Node* hash = key;
+ Node* hash = TruncateWordToWord32(key);
hash = Word32Xor(hash, seed);
hash = Int32Add(Word32Xor(hash, Int32Constant(0xffffffff)),
Word32Shl(hash, Int32Constant(15)));
@@ -4174,9 +4532,7 @@ void CodeStubAssembler::NumberDictionaryLookup(Node* dictionary,
DCHECK_EQ(MachineType::PointerRepresentation(), var_entry->rep());
Comment("NumberDictionaryLookup");
- Node* capacity = SmiUntag(LoadFixedArrayElement(
- dictionary, IntPtrConstant(Dictionary::kCapacityIndex), 0,
- INTPTR_PARAMETERS));
+ Node* capacity = SmiUntag(GetCapacity<Dictionary>(dictionary));
Node* mask = IntPtrSub(capacity, IntPtrConstant(1));
Node* int32_seed;
@@ -4195,20 +4551,17 @@ void CodeStubAssembler::NumberDictionaryLookup(Node* dictionary,
Node* undefined = UndefinedConstant();
Node* the_hole = TheHoleConstant();
- Variable var_count(this, MachineType::PointerRepresentation());
+ Variable var_count(this, MachineType::PointerRepresentation(), count);
Variable* loop_vars[] = {&var_count, var_entry};
Label loop(this, 2, loop_vars);
- var_count.Bind(count);
var_entry->Bind(entry);
Goto(&loop);
Bind(&loop);
{
- Node* count = var_count.value();
Node* entry = var_entry->value();
Node* index = EntryToIndex<Dictionary>(entry);
- Node* current =
- LoadFixedArrayElement(dictionary, index, 0, INTPTR_PARAMETERS);
+ Node* current = LoadFixedArrayElement(dictionary, index);
GotoIf(WordEqual(current, undefined), if_not_found);
Label next_probe(this);
{
@@ -4231,38 +4584,273 @@ void CodeStubAssembler::NumberDictionaryLookup(Node* dictionary,
Bind(&next_probe);
// See Dictionary::NextProbe().
- count = IntPtrAdd(count, IntPtrConstant(1));
- entry = WordAnd(IntPtrAdd(entry, count), mask);
+ Increment(var_count);
+ entry = WordAnd(IntPtrAdd(entry, var_count.value()), mask);
- var_count.Bind(count);
var_entry->Bind(entry);
Goto(&loop);
}
}
+template <class Dictionary>
+void CodeStubAssembler::FindInsertionEntry(Node* dictionary, Node* key,
+ Variable* var_key_index) {
+ UNREACHABLE();
+}
+
+template <>
+void CodeStubAssembler::FindInsertionEntry<NameDictionary>(
+ Node* dictionary, Node* key, Variable* var_key_index) {
+ Label done(this);
+ NameDictionaryLookup<NameDictionary>(dictionary, key, nullptr, var_key_index,
+ &done, 0, kFindInsertionIndex);
+ Bind(&done);
+}
+
+template <class Dictionary>
+void CodeStubAssembler::InsertEntry(Node* dictionary, Node* key, Node* value,
+ Node* index, Node* enum_index) {
+ UNREACHABLE(); // Use specializations instead.
+}
+
+template <>
+void CodeStubAssembler::InsertEntry<NameDictionary>(Node* dictionary,
+ Node* name, Node* value,
+ Node* index,
+ Node* enum_index) {
+ // Store name and value.
+ StoreFixedArrayElement(dictionary, index, name);
+ StoreValueByKeyIndex<NameDictionary>(dictionary, index, value);
+
+ // Prepare details of the new property.
+ const int kInitialIndex = 0;
+ PropertyDetails d(kData, NONE, kInitialIndex, PropertyCellType::kNoCell);
+ enum_index =
+ SmiShl(enum_index, PropertyDetails::DictionaryStorageField::kShift);
+ STATIC_ASSERT(kInitialIndex == 0);
+ Variable var_details(this, MachineRepresentation::kTaggedSigned,
+ SmiOr(SmiConstant(d.AsSmi()), enum_index));
+
+ // Private names must be marked non-enumerable.
+ Label not_private(this, &var_details);
+ GotoIfNot(IsSymbolMap(LoadMap(name)), &not_private);
+ Node* flags = SmiToWord32(LoadObjectField(name, Symbol::kFlagsOffset));
+ const int kPrivateMask = 1 << Symbol::kPrivateBit;
+ GotoIfNot(IsSetWord32(flags, kPrivateMask), &not_private);
+ Node* dont_enum =
+ SmiShl(SmiConstant(DONT_ENUM), PropertyDetails::AttributesField::kShift);
+ var_details.Bind(SmiOr(var_details.value(), dont_enum));
+ Goto(&not_private);
+ Bind(&not_private);
+
+ // Finally, store the details.
+ StoreDetailsByKeyIndex<NameDictionary>(dictionary, index,
+ var_details.value());
+}
+
+template <>
+void CodeStubAssembler::InsertEntry<GlobalDictionary>(Node* dictionary,
+ Node* key, Node* value,
+ Node* index,
+ Node* enum_index) {
+ UNIMPLEMENTED();
+}
+
+template <class Dictionary>
+void CodeStubAssembler::Add(Node* dictionary, Node* key, Node* value,
+ Label* bailout) {
+ Node* capacity = GetCapacity<Dictionary>(dictionary);
+ Node* nof = GetNumberOfElements<Dictionary>(dictionary);
+ Node* new_nof = SmiAdd(nof, SmiConstant(1));
+ // Require 33% to still be free after adding additional_elements.
+ // Computing "x + (x >> 1)" on a Smi x does not return a valid Smi!
+ // But that's OK here because it's only used for a comparison.
+ Node* required_capacity_pseudo_smi = SmiAdd(new_nof, SmiShr(new_nof, 1));
+ GotoIf(SmiBelow(capacity, required_capacity_pseudo_smi), bailout);
+ // Require rehashing if more than 50% of free elements are deleted elements.
+ Node* deleted = GetNumberOfDeletedElements<Dictionary>(dictionary);
+ CSA_ASSERT(this, SmiAbove(capacity, new_nof));
+ Node* half_of_free_elements = SmiShr(SmiSub(capacity, new_nof), 1);
+ GotoIf(SmiAbove(deleted, half_of_free_elements), bailout);
+ Node* enum_index = nullptr;
+ if (Dictionary::kIsEnumerable) {
+ enum_index = GetNextEnumerationIndex<Dictionary>(dictionary);
+ Node* new_enum_index = SmiAdd(enum_index, SmiConstant(1));
+ Node* max_enum_index =
+ SmiConstant(PropertyDetails::DictionaryStorageField::kMax);
+ GotoIf(SmiAbove(new_enum_index, max_enum_index), bailout);
+
+ // No more bailouts after this point.
+ // Operations from here on can have side effects.
+
+ SetNextEnumerationIndex<Dictionary>(dictionary, new_enum_index);
+ } else {
+ USE(enum_index);
+ }
+ SetNumberOfElements<Dictionary>(dictionary, new_nof);
+
+ Variable var_key_index(this, MachineType::PointerRepresentation());
+ FindInsertionEntry<Dictionary>(dictionary, key, &var_key_index);
+ InsertEntry<Dictionary>(dictionary, key, value, var_key_index.value(),
+ enum_index);
+}
+
+template void CodeStubAssembler::Add<NameDictionary>(Node*, Node*, Node*,
+ Label*);
+
void CodeStubAssembler::DescriptorLookupLinear(Node* unique_name,
Node* descriptors, Node* nof,
Label* if_found,
Variable* var_name_index,
Label* if_not_found) {
+ Comment("DescriptorLookupLinear");
Node* first_inclusive = IntPtrConstant(DescriptorArray::ToKeyIndex(0));
- Node* factor = IntPtrConstant(DescriptorArray::kDescriptorSize);
+ Node* factor = IntPtrConstant(DescriptorArray::kEntrySize);
Node* last_exclusive = IntPtrAdd(first_inclusive, IntPtrMul(nof, factor));
- BuildFastLoop(
- MachineType::PointerRepresentation(), last_exclusive, first_inclusive,
- [descriptors, unique_name, if_found, var_name_index](
- CodeStubAssembler* assembler, Node* name_index) {
- Node* candidate_name = assembler->LoadFixedArrayElement(
- descriptors, name_index, 0, INTPTR_PARAMETERS);
- var_name_index->Bind(name_index);
- assembler->GotoIf(assembler->WordEqual(candidate_name, unique_name),
- if_found);
- },
- -DescriptorArray::kDescriptorSize, IndexAdvanceMode::kPre);
+ BuildFastLoop(last_exclusive, first_inclusive,
+ [this, descriptors, unique_name, if_found,
+ var_name_index](Node* name_index) {
+ Node* candidate_name =
+ LoadFixedArrayElement(descriptors, name_index);
+ var_name_index->Bind(name_index);
+ GotoIf(WordEqual(candidate_name, unique_name), if_found);
+ },
+ -DescriptorArray::kEntrySize, INTPTR_PARAMETERS,
+ IndexAdvanceMode::kPre);
Goto(if_not_found);
}
+Node* CodeStubAssembler::DescriptorArrayNumberOfEntries(Node* descriptors) {
+ return LoadAndUntagToWord32FixedArrayElement(
+ descriptors, IntPtrConstant(DescriptorArray::kDescriptorLengthIndex));
+}
+
+namespace {
+
+Node* DescriptorNumberToIndex(CodeStubAssembler* a, Node* descriptor_number) {
+ Node* descriptor_size = a->Int32Constant(DescriptorArray::kEntrySize);
+ Node* index = a->Int32Mul(descriptor_number, descriptor_size);
+ return a->ChangeInt32ToIntPtr(index);
+}
+
+} // namespace
+
+Node* CodeStubAssembler::DescriptorArrayToKeyIndex(Node* descriptor_number) {
+ return IntPtrAdd(IntPtrConstant(DescriptorArray::ToKeyIndex(0)),
+ DescriptorNumberToIndex(this, descriptor_number));
+}
+
+Node* CodeStubAssembler::DescriptorArrayGetSortedKeyIndex(
+ Node* descriptors, Node* descriptor_number) {
+ const int details_offset = DescriptorArray::ToDetailsIndex(0) * kPointerSize;
+ Node* details = LoadAndUntagToWord32FixedArrayElement(
+ descriptors, DescriptorNumberToIndex(this, descriptor_number),
+ details_offset);
+ return DecodeWord32<PropertyDetails::DescriptorPointer>(details);
+}
+
+Node* CodeStubAssembler::DescriptorArrayGetKey(Node* descriptors,
+ Node* descriptor_number) {
+ const int key_offset = DescriptorArray::ToKeyIndex(0) * kPointerSize;
+ return LoadFixedArrayElement(descriptors,
+ DescriptorNumberToIndex(this, descriptor_number),
+ key_offset);
+}
+
+void CodeStubAssembler::DescriptorLookupBinary(Node* unique_name,
+ Node* descriptors, Node* nof,
+ Label* if_found,
+ Variable* var_name_index,
+ Label* if_not_found) {
+ Comment("DescriptorLookupBinary");
+ Variable var_low(this, MachineRepresentation::kWord32, Int32Constant(0));
+ Node* limit =
+ Int32Sub(DescriptorArrayNumberOfEntries(descriptors), Int32Constant(1));
+ Variable var_high(this, MachineRepresentation::kWord32, limit);
+ Node* hash = LoadNameHashField(unique_name);
+ CSA_ASSERT(this, Word32NotEqual(hash, Int32Constant(0)));
+
+ // Assume non-empty array.
+ CSA_ASSERT(this, Uint32LessThanOrEqual(var_low.value(), var_high.value()));
+
+ Variable* loop_vars[] = {&var_high, &var_low};
+ Label binary_loop(this, 2, loop_vars);
+ Goto(&binary_loop);
+ Bind(&binary_loop);
+ {
+ // mid = low + (high - low) / 2 (to avoid overflow in "(low + high) / 2").
+ Node* mid =
+ Int32Add(var_low.value(),
+ Word32Shr(Int32Sub(var_high.value(), var_low.value()), 1));
+ // mid_name = descriptors->GetSortedKey(mid).
+ Node* sorted_key_index = DescriptorArrayGetSortedKeyIndex(descriptors, mid);
+ Node* mid_name = DescriptorArrayGetKey(descriptors, sorted_key_index);
+
+ Node* mid_hash = LoadNameHashField(mid_name);
+
+ Label mid_greater(this), mid_less(this), merge(this);
+ Branch(Uint32GreaterThanOrEqual(mid_hash, hash), &mid_greater, &mid_less);
+ Bind(&mid_greater);
+ {
+ var_high.Bind(mid);
+ Goto(&merge);
+ }
+ Bind(&mid_less);
+ {
+ var_low.Bind(Int32Add(mid, Int32Constant(1)));
+ Goto(&merge);
+ }
+ Bind(&merge);
+ GotoIf(Word32NotEqual(var_low.value(), var_high.value()), &binary_loop);
+ }
+
+ Label scan_loop(this, &var_low);
+ Goto(&scan_loop);
+ Bind(&scan_loop);
+ {
+ GotoIf(Int32GreaterThan(var_low.value(), limit), if_not_found);
+
+ Node* sort_index =
+ DescriptorArrayGetSortedKeyIndex(descriptors, var_low.value());
+ Node* current_name = DescriptorArrayGetKey(descriptors, sort_index);
+ Node* current_hash = LoadNameHashField(current_name);
+ GotoIf(Word32NotEqual(current_hash, hash), if_not_found);
+ Label next(this);
+ GotoIf(WordNotEqual(current_name, unique_name), &next);
+ GotoIf(Int32GreaterThanOrEqual(sort_index, nof), if_not_found);
+ var_name_index->Bind(DescriptorArrayToKeyIndex(sort_index));
+ Goto(if_found);
+
+ Bind(&next);
+ var_low.Bind(Int32Add(var_low.value(), Int32Constant(1)));
+ Goto(&scan_loop);
+ }
+}
+
+void CodeStubAssembler::DescriptorLookup(Node* unique_name, Node* descriptors,
+ Node* bitfield3, Label* if_found,
+ Variable* var_name_index,
+ Label* if_not_found) {
+ Comment("DescriptorArrayLookup");
+ Node* nof = DecodeWord32<Map::NumberOfOwnDescriptorsBits>(bitfield3);
+ GotoIf(Word32Equal(nof, Int32Constant(0)), if_not_found);
+ Label linear_search(this), binary_search(this);
+ const int kMaxElementsForLinearSearch = 32;
+ Branch(Int32LessThanOrEqual(nof, Int32Constant(kMaxElementsForLinearSearch)),
+ &linear_search, &binary_search);
+ Bind(&linear_search);
+ {
+ DescriptorLookupLinear(unique_name, descriptors, ChangeInt32ToIntPtr(nof),
+ if_found, var_name_index, if_not_found);
+ }
+ Bind(&binary_search);
+ {
+ DescriptorLookupBinary(unique_name, descriptors, nof, if_found,
+ var_name_index, if_not_found);
+ }
+}
+
void CodeStubAssembler::TryLookupProperty(
Node* object, Node* map, Node* instance_type, Node* unique_name,
Label* if_found_fast, Label* if_found_dict, Label* if_found_global,
@@ -4288,20 +4876,11 @@ void CodeStubAssembler::TryLookupProperty(
&if_isfastmap);
Bind(&if_isfastmap);
{
- Comment("DescriptorArrayLookup");
- Node* nof =
- DecodeWordFromWord32<Map::NumberOfOwnDescriptorsBits>(bit_field3);
- // Bail out to the runtime for large numbers of own descriptors. The stub
- // only does linear search, which becomes too expensive in that case.
- {
- static const int32_t kMaxLinear = 210;
- GotoIf(UintPtrGreaterThan(nof, IntPtrConstant(kMaxLinear)), if_bailout);
- }
Node* descriptors = LoadMapDescriptors(map);
var_meta_storage->Bind(descriptors);
- DescriptorLookupLinear(unique_name, descriptors, nof, if_found_fast,
- var_name_index, if_not_found);
+ DescriptorLookup(unique_name, descriptors, bit_field3, if_found_fast,
+ var_name_index, if_not_found);
}
Bind(&if_isslowmap);
{
@@ -4314,8 +4893,8 @@ void CodeStubAssembler::TryLookupProperty(
Bind(&if_objectisspecial);
{
// Handle global object here and other special objects in runtime.
- GotoUnless(Word32Equal(instance_type, Int32Constant(JS_GLOBAL_OBJECT_TYPE)),
- if_bailout);
+ GotoIfNot(Word32Equal(instance_type, Int32Constant(JS_GLOBAL_OBJECT_TYPE)),
+ if_bailout);
// Handle interceptors and access checks in runtime.
Node* bit_field = LoadMapBitField(map);
@@ -4332,11 +4911,10 @@ void CodeStubAssembler::TryLookupProperty(
}
}
-void CodeStubAssembler::TryHasOwnProperty(compiler::Node* object,
- compiler::Node* map,
- compiler::Node* instance_type,
- compiler::Node* unique_name,
- Label* if_found, Label* if_not_found,
+void CodeStubAssembler::TryHasOwnProperty(Node* object, Node* map,
+ Node* instance_type,
+ Node* unique_name, Label* if_found,
+ Label* if_not_found,
Label* if_bailout) {
Comment("TryHasOwnProperty");
Variable var_meta_storage(this, MachineRepresentation::kTagged);
@@ -4367,15 +4945,8 @@ void CodeStubAssembler::LoadPropertyFromFastObject(Node* object, Node* map,
DCHECK_EQ(MachineRepresentation::kTagged, var_value->rep());
Comment("[ LoadPropertyFromFastObject");
- const int name_to_details_offset =
- (DescriptorArray::kDescriptorDetails - DescriptorArray::kDescriptorKey) *
- kPointerSize;
- const int name_to_value_offset =
- (DescriptorArray::kDescriptorValue - DescriptorArray::kDescriptorKey) *
- kPointerSize;
-
- Node* details = LoadAndUntagToWord32FixedArrayElement(descriptors, name_index,
- name_to_details_offset);
+ Node* details =
+ LoadDetailsByKeyIndex<DescriptorArray>(descriptors, name_index);
var_details->Bind(details);
Node* location = DecodeWord32<PropertyDetails::LocationField>(details);
@@ -4458,9 +5029,8 @@ void CodeStubAssembler::LoadPropertyFromFastObject(Node* object, Node* map,
}
Bind(&if_in_descriptor);
{
- Node* value =
- LoadFixedArrayElement(descriptors, name_index, name_to_value_offset);
- var_value->Bind(value);
+ var_value->Bind(
+ LoadValueByKeyIndex<DescriptorArray>(descriptors, name_index));
Goto(&done);
}
Bind(&done);
@@ -4474,19 +5044,10 @@ void CodeStubAssembler::LoadPropertyFromNameDictionary(Node* dictionary,
Variable* var_value) {
Comment("LoadPropertyFromNameDictionary");
CSA_ASSERT(this, IsDictionary(dictionary));
- const int name_to_details_offset =
- (NameDictionary::kEntryDetailsIndex - NameDictionary::kEntryKeyIndex) *
- kPointerSize;
- const int name_to_value_offset =
- (NameDictionary::kEntryValueIndex - NameDictionary::kEntryKeyIndex) *
- kPointerSize;
- Node* details = LoadAndUntagToWord32FixedArrayElement(dictionary, name_index,
- name_to_details_offset);
-
- var_details->Bind(details);
- var_value->Bind(
- LoadFixedArrayElement(dictionary, name_index, name_to_value_offset));
+ var_details->Bind(
+ LoadDetailsByKeyIndex<NameDictionary>(dictionary, name_index));
+ var_value->Bind(LoadValueByKeyIndex<NameDictionary>(dictionary, name_index));
Comment("] LoadPropertyFromNameDictionary");
}
@@ -4499,12 +5060,8 @@ void CodeStubAssembler::LoadPropertyFromGlobalDictionary(Node* dictionary,
Comment("[ LoadPropertyFromGlobalDictionary");
CSA_ASSERT(this, IsDictionary(dictionary));
- const int name_to_value_offset =
- (GlobalDictionary::kEntryValueIndex - GlobalDictionary::kEntryKeyIndex) *
- kPointerSize;
-
Node* property_cell =
- LoadFixedArrayElement(dictionary, name_index, name_to_value_offset);
+ LoadValueByKeyIndex<GlobalDictionary>(dictionary, name_index);
Node* value = LoadObjectField(property_cell, PropertyCell::kValueOffset);
GotoIf(WordEqual(value, TheHoleConstant()), if_deleted);
@@ -4524,8 +5081,7 @@ void CodeStubAssembler::LoadPropertyFromGlobalDictionary(Node* dictionary,
Node* CodeStubAssembler::CallGetterIfAccessor(Node* value, Node* details,
Node* context, Node* receiver,
Label* if_bailout) {
- Variable var_value(this, MachineRepresentation::kTagged);
- var_value.Bind(value);
+ Variable var_value(this, MachineRepresentation::kTagged, value);
Label done(this);
Node* kind = DecodeWord32<PropertyDetails::KindField>(details);
@@ -4548,7 +5104,7 @@ Node* CodeStubAssembler::CallGetterIfAccessor(Node* value, Node* details,
// Return undefined if the {getter} is not callable.
var_value.Bind(UndefinedConstant());
- GotoUnless(IsCallableMap(getter_map), &done);
+ GotoIfNot(IsCallableMap(getter_map), &done);
// Call the accessor.
Callable callable = CodeFactory::Call(isolate());
@@ -4664,10 +5220,9 @@ void CodeStubAssembler::TryLookupElement(Node* object, Node* map,
Node* elements = LoadElements(object);
Node* length = LoadAndUntagFixedArrayBaseLength(elements);
- GotoUnless(UintPtrLessThan(intptr_index, length), &if_oob);
+ GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
- Node* element =
- LoadFixedArrayElement(elements, intptr_index, 0, INTPTR_PARAMETERS);
+ Node* element = LoadFixedArrayElement(elements, intptr_index);
Node* the_hole = TheHoleConstant();
Branch(WordEqual(element, the_hole), if_not_found, if_found);
}
@@ -4676,7 +5231,7 @@ void CodeStubAssembler::TryLookupElement(Node* object, Node* map,
Node* elements = LoadElements(object);
Node* length = LoadAndUntagFixedArrayBaseLength(elements);
- GotoUnless(UintPtrLessThan(intptr_index, length), &if_oob);
+ GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
// Check if the element is a double hole, but don't load it.
LoadFixedDoubleArrayElement(elements, intptr_index, MachineType::None(), 0,
@@ -4727,8 +5282,8 @@ template void CodeStubAssembler::NumberDictionaryLookup<
UnseededNumberDictionary>(Node*, Node*, Label*, Variable*, Label*);
void CodeStubAssembler::TryPrototypeChainLookup(
- Node* receiver, Node* key, LookupInHolder& lookup_property_in_holder,
- LookupInHolder& lookup_element_in_holder, Label* if_end,
+ Node* receiver, Node* key, const LookupInHolder& lookup_property_in_holder,
+ const LookupInHolder& lookup_element_in_holder, Label* if_end,
Label* if_bailout) {
// Ensure receiver is JSReceiver, otherwise bailout.
Label if_objectisnotsmi(this);
@@ -4748,22 +5303,22 @@ void CodeStubAssembler::TryPrototypeChainLookup(
}
Variable var_index(this, MachineType::PointerRepresentation());
+ Variable var_unique(this, MachineRepresentation::kTagged);
Label if_keyisindex(this), if_iskeyunique(this);
- TryToName(key, &if_keyisindex, &var_index, &if_iskeyunique, if_bailout);
+ TryToName(key, &if_keyisindex, &var_index, &if_iskeyunique, &var_unique,
+ if_bailout);
Bind(&if_iskeyunique);
{
- Variable var_holder(this, MachineRepresentation::kTagged);
- Variable var_holder_map(this, MachineRepresentation::kTagged);
- Variable var_holder_instance_type(this, MachineRepresentation::kWord8);
+ Variable var_holder(this, MachineRepresentation::kTagged, receiver);
+ Variable var_holder_map(this, MachineRepresentation::kTagged, map);
+ Variable var_holder_instance_type(this, MachineRepresentation::kWord32,
+ instance_type);
Variable* merged_variables[] = {&var_holder, &var_holder_map,
&var_holder_instance_type};
Label loop(this, arraysize(merged_variables), merged_variables);
- var_holder.Bind(receiver);
- var_holder_map.Bind(map);
- var_holder_instance_type.Bind(instance_type);
Goto(&loop);
Bind(&loop);
{
@@ -4772,8 +5327,8 @@ void CodeStubAssembler::TryPrototypeChainLookup(
Label next_proto(this);
lookup_property_in_holder(receiver, var_holder.value(), holder_map,
- holder_instance_type, key, &next_proto,
- if_bailout);
+ holder_instance_type, var_unique.value(),
+ &next_proto, if_bailout);
Bind(&next_proto);
// Bailout if it can be an integer indexed exotic case.
@@ -4798,16 +5353,14 @@ void CodeStubAssembler::TryPrototypeChainLookup(
}
Bind(&if_keyisindex);
{
- Variable var_holder(this, MachineRepresentation::kTagged);
- Variable var_holder_map(this, MachineRepresentation::kTagged);
- Variable var_holder_instance_type(this, MachineRepresentation::kWord8);
+ Variable var_holder(this, MachineRepresentation::kTagged, receiver);
+ Variable var_holder_map(this, MachineRepresentation::kTagged, map);
+ Variable var_holder_instance_type(this, MachineRepresentation::kWord32,
+ instance_type);
Variable* merged_variables[] = {&var_holder, &var_holder_map,
&var_holder_instance_type};
Label loop(this, arraysize(merged_variables), merged_variables);
- var_holder.Bind(receiver);
- var_holder_map.Bind(map);
- var_holder_instance_type.Bind(instance_type);
Goto(&loop);
Bind(&loop);
{
@@ -4855,10 +5408,10 @@ Node* CodeStubAssembler::OrdinaryHasInstance(Node* context, Node* callable,
Node* instanceof_cache_map = LoadRoot(Heap::kInstanceofCacheMapRootIndex);
{
Label instanceof_cache_miss(this);
- GotoUnless(WordEqual(instanceof_cache_function, callable),
- &instanceof_cache_miss);
- GotoUnless(WordEqual(instanceof_cache_map, object_map),
- &instanceof_cache_miss);
+ GotoIfNot(WordEqual(instanceof_cache_function, callable),
+ &instanceof_cache_miss);
+ GotoIfNot(WordEqual(instanceof_cache_map, object_map),
+ &instanceof_cache_miss);
var_result.Bind(LoadRoot(Heap::kInstanceofCacheAnswerRootIndex));
Goto(&return_result);
Bind(&instanceof_cache_miss);
@@ -4872,14 +5425,14 @@ Node* CodeStubAssembler::OrdinaryHasInstance(Node* context, Node* callable,
// Goto runtime if {callable} is not a JSFunction.
Node* callable_instance_type = LoadMapInstanceType(callable_map);
- GotoUnless(
+ GotoIfNot(
Word32Equal(callable_instance_type, Int32Constant(JS_FUNCTION_TYPE)),
&return_runtime);
// Goto runtime if {callable} is not a constructor or has
// a non-instance "prototype".
Node* callable_bitfield = LoadMapBitField(callable_map);
- GotoUnless(
+ GotoIfNot(
Word32Equal(Word32And(callable_bitfield,
Int32Constant((1 << Map::kHasNonInstancePrototype) |
(1 << Map::kIsConstructor))),
@@ -4890,9 +5443,9 @@ Node* CodeStubAssembler::OrdinaryHasInstance(Node* context, Node* callable,
Node* callable_prototype =
LoadObjectField(callable, JSFunction::kPrototypeOrInitialMapOffset);
{
- Variable var_callable_prototype(this, MachineRepresentation::kTagged);
Label callable_prototype_valid(this);
- var_callable_prototype.Bind(callable_prototype);
+ Variable var_callable_prototype(this, MachineRepresentation::kTagged,
+ callable_prototype);
// Resolve the "prototype" if the {callable} has an initial map. Afterwards
// the {callable_prototype} will be either the JSReceiver prototype object
@@ -4900,7 +5453,7 @@ Node* CodeStubAssembler::OrdinaryHasInstance(Node* context, Node* callable,
// created so far and hence we should return false.
Node* callable_prototype_instance_type =
LoadInstanceType(callable_prototype);
- GotoUnless(
+ GotoIfNot(
Word32Equal(callable_prototype_instance_type, Int32Constant(MAP_TYPE)),
&callable_prototype_valid);
var_callable_prototype.Bind(
@@ -4916,8 +5469,7 @@ Node* CodeStubAssembler::OrdinaryHasInstance(Node* context, Node* callable,
StoreRoot(Heap::kInstanceofCacheMapRootIndex, object_map);
// Loop through the prototype chain looking for the {callable} prototype.
- Variable var_object_map(this, MachineRepresentation::kTagged);
- var_object_map.Bind(object_map);
+ Variable var_object_map(this, MachineRepresentation::kTagged, object_map);
Label loop(this, &var_object_map);
Goto(&loop);
Bind(&loop);
@@ -4926,7 +5478,7 @@ Node* CodeStubAssembler::OrdinaryHasInstance(Node* context, Node* callable,
// Check if the current {object} needs to be access checked.
Node* object_bitfield = LoadMapBitField(object_map);
- GotoUnless(
+ GotoIfNot(
Word32Equal(Word32And(object_bitfield,
Int32Constant(1 << Map::kIsAccessCheckNeeded)),
Int32Constant(0)),
@@ -4971,10 +5523,10 @@ Node* CodeStubAssembler::OrdinaryHasInstance(Node* context, Node* callable,
return var_result.value();
}
-compiler::Node* CodeStubAssembler::ElementOffsetFromIndex(Node* index_node,
- ElementsKind kind,
- ParameterMode mode,
- int base_size) {
+Node* CodeStubAssembler::ElementOffsetFromIndex(Node* index_node,
+ ElementsKind kind,
+ ParameterMode mode,
+ int base_size) {
int element_size_shift = ElementsKindToShiftSize(kind);
int element_size = 1 << element_size_shift;
int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
@@ -4986,10 +5538,6 @@ compiler::Node* CodeStubAssembler::ElementOffsetFromIndex(Node* index_node,
constant_index = ToSmiConstant(index_node, smi_index);
if (constant_index) index = smi_index->value();
index_node = BitcastTaggedToWord(index_node);
- } else if (mode == INTEGER_PARAMETERS) {
- int32_t temp = 0;
- constant_index = ToInt32Constant(index_node, temp);
- index = static_cast<intptr_t>(temp);
} else {
DCHECK(mode == INTPTR_PARAMETERS);
constant_index = ToIntPtrConstant(index_node, index);
@@ -4997,9 +5545,6 @@ compiler::Node* CodeStubAssembler::ElementOffsetFromIndex(Node* index_node,
if (constant_index) {
return IntPtrConstant(base_size + element_size * index);
}
- if (Is64() && mode == INTEGER_PARAMETERS) {
- index_node = ChangeInt32ToInt64(index_node);
- }
Node* shifted_index =
(element_size_shift == 0)
@@ -5007,32 +5552,28 @@ compiler::Node* CodeStubAssembler::ElementOffsetFromIndex(Node* index_node,
: ((element_size_shift > 0)
? WordShl(index_node, IntPtrConstant(element_size_shift))
: WordShr(index_node, IntPtrConstant(-element_size_shift)));
- return IntPtrAddFoldConstants(IntPtrConstant(base_size), shifted_index);
+ return IntPtrAdd(IntPtrConstant(base_size), shifted_index);
}
-compiler::Node* CodeStubAssembler::LoadTypeFeedbackVectorForStub() {
+Node* CodeStubAssembler::LoadFeedbackVectorForStub() {
Node* function =
LoadFromParentFrame(JavaScriptFrameConstants::kFunctionOffset);
- Node* literals = LoadObjectField(function, JSFunction::kLiteralsOffset);
- return LoadObjectField(literals, LiteralsArray::kFeedbackVectorOffset);
+ Node* cell = LoadObjectField(function, JSFunction::kFeedbackVectorOffset);
+ return LoadObjectField(cell, Cell::kValueOffset);
}
-void CodeStubAssembler::UpdateFeedback(compiler::Node* feedback,
- compiler::Node* type_feedback_vector,
- compiler::Node* slot_id) {
+void CodeStubAssembler::UpdateFeedback(Node* feedback, Node* feedback_vector,
+ Node* slot_id) {
// This method is used for binary op and compare feedback. These
// vector nodes are initialized with a smi 0, so we can simply OR
// our new feedback in place.
- // TODO(interpreter): Consider passing the feedback as Smi already to avoid
- // the tagging completely.
- Node* previous_feedback =
- LoadFixedArrayElement(type_feedback_vector, slot_id);
- Node* combined_feedback = SmiOr(previous_feedback, SmiFromWord32(feedback));
- StoreFixedArrayElement(type_feedback_vector, slot_id, combined_feedback,
+ Node* previous_feedback = LoadFixedArrayElement(feedback_vector, slot_id);
+ Node* combined_feedback = SmiOr(previous_feedback, feedback);
+ StoreFixedArrayElement(feedback_vector, slot_id, combined_feedback,
SKIP_WRITE_BARRIER);
}
-compiler::Node* CodeStubAssembler::LoadReceiverMap(compiler::Node* receiver) {
+Node* CodeStubAssembler::LoadReceiverMap(Node* receiver) {
Variable var_receiver_map(this, MachineRepresentation::kTagged);
Label load_smi_map(this, Label::kDeferred), load_receiver_map(this),
if_result(this);
@@ -5052,252 +5593,16 @@ compiler::Node* CodeStubAssembler::LoadReceiverMap(compiler::Node* receiver) {
return var_receiver_map.value();
}
-compiler::Node* CodeStubAssembler::TryMonomorphicCase(
- compiler::Node* slot, compiler::Node* vector, compiler::Node* receiver_map,
- Label* if_handler, Variable* var_handler, Label* if_miss) {
- Comment("TryMonomorphicCase");
- DCHECK_EQ(MachineRepresentation::kTagged, var_handler->rep());
-
- // TODO(ishell): add helper class that hides offset computations for a series
- // of loads.
- int32_t header_size = FixedArray::kHeaderSize - kHeapObjectTag;
- // Adding |header_size| with a separate IntPtrAdd rather than passing it
- // into ElementOffsetFromIndex() allows it to be folded into a single
- // [base, index, offset] indirect memory access on x64.
- Node* offset =
- ElementOffsetFromIndex(slot, FAST_HOLEY_ELEMENTS, SMI_PARAMETERS);
- Node* feedback = Load(MachineType::AnyTagged(), vector,
- IntPtrAdd(offset, IntPtrConstant(header_size)));
-
- // Try to quickly handle the monomorphic case without knowing for sure
- // if we have a weak cell in feedback. We do know it's safe to look
- // at WeakCell::kValueOffset.
- GotoIf(WordNotEqual(receiver_map, LoadWeakCellValueUnchecked(feedback)),
- if_miss);
-
- Node* handler =
- Load(MachineType::AnyTagged(), vector,
- IntPtrAdd(offset, IntPtrConstant(header_size + kPointerSize)));
-
- var_handler->Bind(handler);
- Goto(if_handler);
- return feedback;
-}
-
-void CodeStubAssembler::HandlePolymorphicCase(
- compiler::Node* receiver_map, compiler::Node* feedback, Label* if_handler,
- Variable* var_handler, Label* if_miss, int unroll_count) {
- Comment("HandlePolymorphicCase");
- DCHECK_EQ(MachineRepresentation::kTagged, var_handler->rep());
-
- // Iterate {feedback} array.
- const int kEntrySize = 2;
-
- for (int i = 0; i < unroll_count; i++) {
- Label next_entry(this);
- Node* cached_map = LoadWeakCellValue(LoadFixedArrayElement(
- feedback, IntPtrConstant(i * kEntrySize), 0, INTPTR_PARAMETERS));
- GotoIf(WordNotEqual(receiver_map, cached_map), &next_entry);
-
- // Found, now call handler.
- Node* handler = LoadFixedArrayElement(
- feedback, IntPtrConstant(i * kEntrySize + 1), 0, INTPTR_PARAMETERS);
- var_handler->Bind(handler);
- Goto(if_handler);
-
- Bind(&next_entry);
- }
-
- // Loop from {unroll_count}*kEntrySize to {length}.
- Node* init = IntPtrConstant(unroll_count * kEntrySize);
- Node* length = LoadAndUntagFixedArrayBaseLength(feedback);
- BuildFastLoop(
- MachineType::PointerRepresentation(), init, length,
- [receiver_map, feedback, if_handler, var_handler](CodeStubAssembler* csa,
- Node* index) {
- Node* cached_map = csa->LoadWeakCellValue(
- csa->LoadFixedArrayElement(feedback, index, 0, INTPTR_PARAMETERS));
-
- Label next_entry(csa);
- csa->GotoIf(csa->WordNotEqual(receiver_map, cached_map), &next_entry);
-
- // Found, now call handler.
- Node* handler = csa->LoadFixedArrayElement(
- feedback, index, kPointerSize, INTPTR_PARAMETERS);
- var_handler->Bind(handler);
- csa->Goto(if_handler);
-
- csa->Bind(&next_entry);
- },
- kEntrySize, IndexAdvanceMode::kPost);
- // The loop falls through if no handler was found.
- Goto(if_miss);
-}
-
-void CodeStubAssembler::HandleKeyedStorePolymorphicCase(
- compiler::Node* receiver_map, compiler::Node* feedback, Label* if_handler,
- Variable* var_handler, Label* if_transition_handler,
- Variable* var_transition_map_cell, Label* if_miss) {
- DCHECK_EQ(MachineRepresentation::kTagged, var_handler->rep());
- DCHECK_EQ(MachineRepresentation::kTagged, var_transition_map_cell->rep());
-
- const int kEntrySize = 3;
-
- Node* init = IntPtrConstant(0);
- Node* length = LoadAndUntagFixedArrayBaseLength(feedback);
- BuildFastLoop(
- MachineType::PointerRepresentation(), init, length,
- [receiver_map, feedback, if_handler, var_handler, if_transition_handler,
- var_transition_map_cell](CodeStubAssembler* csa, Node* index) {
- Node* cached_map = csa->LoadWeakCellValue(
- csa->LoadFixedArrayElement(feedback, index, 0, INTPTR_PARAMETERS));
- Label next_entry(csa);
- csa->GotoIf(csa->WordNotEqual(receiver_map, cached_map), &next_entry);
-
- Node* maybe_transition_map_cell = csa->LoadFixedArrayElement(
- feedback, index, kPointerSize, INTPTR_PARAMETERS);
-
- var_handler->Bind(csa->LoadFixedArrayElement(
- feedback, index, 2 * kPointerSize, INTPTR_PARAMETERS));
- csa->GotoIf(
- csa->WordEqual(maybe_transition_map_cell,
- csa->LoadRoot(Heap::kUndefinedValueRootIndex)),
- if_handler);
- var_transition_map_cell->Bind(maybe_transition_map_cell);
- csa->Goto(if_transition_handler);
-
- csa->Bind(&next_entry);
- },
- kEntrySize, IndexAdvanceMode::kPost);
- // The loop falls through if no handler was found.
- Goto(if_miss);
-}
-
-compiler::Node* CodeStubAssembler::StubCachePrimaryOffset(compiler::Node* name,
- compiler::Node* map) {
- // See v8::internal::StubCache::PrimaryOffset().
- STATIC_ASSERT(StubCache::kCacheIndexShift == Name::kHashShift);
- // Compute the hash of the name (use entire hash field).
- Node* hash_field = LoadNameHashField(name);
- CSA_ASSERT(this,
- Word32Equal(Word32And(hash_field,
- Int32Constant(Name::kHashNotComputedMask)),
- Int32Constant(0)));
-
- // Using only the low bits in 64-bit mode is unlikely to increase the
- // risk of collision even if the heap is spread over an area larger than
- // 4Gb (and not at all if it isn't).
- Node* hash = Int32Add(hash_field, map);
- // Base the offset on a simple combination of name and map.
- hash = Word32Xor(hash, Int32Constant(StubCache::kPrimaryMagic));
- uint32_t mask = (StubCache::kPrimaryTableSize - 1)
- << StubCache::kCacheIndexShift;
- return ChangeUint32ToWord(Word32And(hash, Int32Constant(mask)));
-}
-
-compiler::Node* CodeStubAssembler::StubCacheSecondaryOffset(
- compiler::Node* name, compiler::Node* seed) {
- // See v8::internal::StubCache::SecondaryOffset().
-
- // Use the seed from the primary cache in the secondary cache.
- Node* hash = Int32Sub(seed, name);
- hash = Int32Add(hash, Int32Constant(StubCache::kSecondaryMagic));
- int32_t mask = (StubCache::kSecondaryTableSize - 1)
- << StubCache::kCacheIndexShift;
- return ChangeUint32ToWord(Word32And(hash, Int32Constant(mask)));
-}
-
-enum CodeStubAssembler::StubCacheTable : int {
- kPrimary = static_cast<int>(StubCache::kPrimary),
- kSecondary = static_cast<int>(StubCache::kSecondary)
-};
-
-void CodeStubAssembler::TryProbeStubCacheTable(
- StubCache* stub_cache, StubCacheTable table_id,
- compiler::Node* entry_offset, compiler::Node* name, compiler::Node* map,
- Label* if_handler, Variable* var_handler, Label* if_miss) {
- StubCache::Table table = static_cast<StubCache::Table>(table_id);
-#ifdef DEBUG
- if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
- Goto(if_miss);
- return;
- } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
- Goto(if_miss);
- return;
- }
-#endif
- // The {table_offset} holds the entry offset times four (due to masking
- // and shifting optimizations).
- const int kMultiplier = sizeof(StubCache::Entry) >> Name::kHashShift;
- entry_offset = IntPtrMul(entry_offset, IntPtrConstant(kMultiplier));
-
- // Check that the key in the entry matches the name.
- Node* key_base =
- ExternalConstant(ExternalReference(stub_cache->key_reference(table)));
- Node* entry_key = Load(MachineType::Pointer(), key_base, entry_offset);
- GotoIf(WordNotEqual(name, entry_key), if_miss);
-
- // Get the map entry from the cache.
- DCHECK_EQ(kPointerSize * 2, stub_cache->map_reference(table).address() -
- stub_cache->key_reference(table).address());
- Node* entry_map =
- Load(MachineType::Pointer(), key_base,
- IntPtrAdd(entry_offset, IntPtrConstant(kPointerSize * 2)));
- GotoIf(WordNotEqual(map, entry_map), if_miss);
-
- DCHECK_EQ(kPointerSize, stub_cache->value_reference(table).address() -
- stub_cache->key_reference(table).address());
- Node* handler = Load(MachineType::TaggedPointer(), key_base,
- IntPtrAdd(entry_offset, IntPtrConstant(kPointerSize)));
-
- // We found the handler.
- var_handler->Bind(handler);
- Goto(if_handler);
-}
-
-void CodeStubAssembler::TryProbeStubCache(
- StubCache* stub_cache, compiler::Node* receiver, compiler::Node* name,
- Label* if_handler, Variable* var_handler, Label* if_miss) {
- Label try_secondary(this), miss(this);
-
- Counters* counters = isolate()->counters();
- IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
-
- // Check that the {receiver} isn't a smi.
- GotoIf(TaggedIsSmi(receiver), &miss);
-
- Node* receiver_map = LoadMap(receiver);
-
- // Probe the primary table.
- Node* primary_offset = StubCachePrimaryOffset(name, receiver_map);
- TryProbeStubCacheTable(stub_cache, kPrimary, primary_offset, name,
- receiver_map, if_handler, var_handler, &try_secondary);
-
- Bind(&try_secondary);
- {
- // Probe the secondary table.
- Node* secondary_offset = StubCacheSecondaryOffset(name, primary_offset);
- TryProbeStubCacheTable(stub_cache, kSecondary, secondary_offset, name,
- receiver_map, if_handler, var_handler, &miss);
- }
-
- Bind(&miss);
- {
- IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
- Goto(if_miss);
- }
-}
-
Node* CodeStubAssembler::TryToIntptr(Node* key, Label* miss) {
Variable var_intptr_key(this, MachineType::PointerRepresentation());
Label done(this, &var_intptr_key), key_is_smi(this);
GotoIf(TaggedIsSmi(key), &key_is_smi);
// Try to convert a heap number to a Smi.
- GotoUnless(WordEqual(LoadMap(key), HeapNumberMapConstant()), miss);
+ GotoIfNot(IsHeapNumberMap(LoadMap(key)), miss);
{
Node* value = LoadHeapNumberValue(key);
Node* int_value = RoundFloat64ToInt32(value);
- GotoUnless(Float64Equal(value, ChangeInt32ToFloat64(int_value)), miss);
+ GotoIfNot(Float64Equal(value, ChangeInt32ToFloat64(int_value)), miss);
var_intptr_key.Bind(ChangeInt32ToIntPtr(int_value));
Goto(&done);
}
@@ -5312,1377 +5617,6 @@ Node* CodeStubAssembler::TryToIntptr(Node* key, Label* miss) {
return var_intptr_key.value();
}
-void CodeStubAssembler::EmitFastElementsBoundsCheck(Node* object,
- Node* elements,
- Node* intptr_index,
- Node* is_jsarray_condition,
- Label* miss) {
- Variable var_length(this, MachineType::PointerRepresentation());
- Comment("Fast elements bounds check");
- Label if_array(this), length_loaded(this, &var_length);
- GotoIf(is_jsarray_condition, &if_array);
- {
- var_length.Bind(SmiUntag(LoadFixedArrayBaseLength(elements)));
- Goto(&length_loaded);
- }
- Bind(&if_array);
- {
- var_length.Bind(SmiUntag(LoadJSArrayLength(object)));
- Goto(&length_loaded);
- }
- Bind(&length_loaded);
- GotoUnless(UintPtrLessThan(intptr_index, var_length.value()), miss);
-}
-
-void CodeStubAssembler::EmitElementLoad(Node* object, Node* elements,
- Node* elements_kind, Node* intptr_index,
- Node* is_jsarray_condition,
- Label* if_hole, Label* rebox_double,
- Variable* var_double_value,
- Label* unimplemented_elements_kind,
- Label* out_of_bounds, Label* miss) {
- Label if_typed_array(this), if_fast_packed(this), if_fast_holey(this),
- if_fast_double(this), if_fast_holey_double(this), if_nonfast(this),
- if_dictionary(this);
- GotoIf(
- IntPtrGreaterThan(elements_kind, IntPtrConstant(LAST_FAST_ELEMENTS_KIND)),
- &if_nonfast);
-
- EmitFastElementsBoundsCheck(object, elements, intptr_index,
- is_jsarray_condition, out_of_bounds);
- int32_t kinds[] = {// Handled by if_fast_packed.
- FAST_SMI_ELEMENTS, FAST_ELEMENTS,
- // Handled by if_fast_holey.
- FAST_HOLEY_SMI_ELEMENTS, FAST_HOLEY_ELEMENTS,
- // Handled by if_fast_double.
- FAST_DOUBLE_ELEMENTS,
- // Handled by if_fast_holey_double.
- FAST_HOLEY_DOUBLE_ELEMENTS};
- Label* labels[] = {// FAST_{SMI,}_ELEMENTS
- &if_fast_packed, &if_fast_packed,
- // FAST_HOLEY_{SMI,}_ELEMENTS
- &if_fast_holey, &if_fast_holey,
- // FAST_DOUBLE_ELEMENTS
- &if_fast_double,
- // FAST_HOLEY_DOUBLE_ELEMENTS
- &if_fast_holey_double};
- Switch(elements_kind, unimplemented_elements_kind, kinds, labels,
- arraysize(kinds));
-
- Bind(&if_fast_packed);
- {
- Comment("fast packed elements");
- Return(LoadFixedArrayElement(elements, intptr_index, 0, INTPTR_PARAMETERS));
- }
-
- Bind(&if_fast_holey);
- {
- Comment("fast holey elements");
- Node* element =
- LoadFixedArrayElement(elements, intptr_index, 0, INTPTR_PARAMETERS);
- GotoIf(WordEqual(element, TheHoleConstant()), if_hole);
- Return(element);
- }
-
- Bind(&if_fast_double);
- {
- Comment("packed double elements");
- var_double_value->Bind(LoadFixedDoubleArrayElement(
- elements, intptr_index, MachineType::Float64(), 0, INTPTR_PARAMETERS));
- Goto(rebox_double);
- }
-
- Bind(&if_fast_holey_double);
- {
- Comment("holey double elements");
- Node* value = LoadFixedDoubleArrayElement(elements, intptr_index,
- MachineType::Float64(), 0,
- INTPTR_PARAMETERS, if_hole);
- var_double_value->Bind(value);
- Goto(rebox_double);
- }
-
- Bind(&if_nonfast);
- {
- STATIC_ASSERT(LAST_ELEMENTS_KIND == LAST_FIXED_TYPED_ARRAY_ELEMENTS_KIND);
- GotoIf(IntPtrGreaterThanOrEqual(
- elements_kind,
- IntPtrConstant(FIRST_FIXED_TYPED_ARRAY_ELEMENTS_KIND)),
- &if_typed_array);
- GotoIf(IntPtrEqual(elements_kind, IntPtrConstant(DICTIONARY_ELEMENTS)),
- &if_dictionary);
- Goto(unimplemented_elements_kind);
- }
-
- Bind(&if_dictionary);
- {
- Comment("dictionary elements");
- GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), out_of_bounds);
- Variable var_entry(this, MachineType::PointerRepresentation());
- Label if_found(this);
- NumberDictionaryLookup<SeededNumberDictionary>(
- elements, intptr_index, &if_found, &var_entry, if_hole);
- Bind(&if_found);
- // Check that the value is a data property.
- Node* details_index = EntryToIndex<SeededNumberDictionary>(
- var_entry.value(), SeededNumberDictionary::kEntryDetailsIndex);
- Node* details = SmiToWord32(
- LoadFixedArrayElement(elements, details_index, 0, INTPTR_PARAMETERS));
- Node* kind = DecodeWord32<PropertyDetails::KindField>(details);
- // TODO(jkummerow): Support accessors without missing?
- GotoUnless(Word32Equal(kind, Int32Constant(kData)), miss);
- // Finally, load the value.
- Node* value_index = EntryToIndex<SeededNumberDictionary>(
- var_entry.value(), SeededNumberDictionary::kEntryValueIndex);
- Return(LoadFixedArrayElement(elements, value_index, 0, INTPTR_PARAMETERS));
- }
-
- Bind(&if_typed_array);
- {
- Comment("typed elements");
- // Check if buffer has been neutered.
- Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
- Node* bitfield = LoadObjectField(buffer, JSArrayBuffer::kBitFieldOffset,
- MachineType::Uint32());
- Node* neutered_bit =
- Word32And(bitfield, Int32Constant(JSArrayBuffer::WasNeutered::kMask));
- GotoUnless(Word32Equal(neutered_bit, Int32Constant(0)), miss);
-
- // Bounds check.
- Node* length =
- SmiUntag(LoadObjectField(object, JSTypedArray::kLengthOffset));
- GotoUnless(UintPtrLessThan(intptr_index, length), out_of_bounds);
-
- // Backing store = external_pointer + base_pointer.
- Node* external_pointer =
- LoadObjectField(elements, FixedTypedArrayBase::kExternalPointerOffset,
- MachineType::Pointer());
- Node* base_pointer =
- LoadObjectField(elements, FixedTypedArrayBase::kBasePointerOffset);
- Node* backing_store = IntPtrAdd(external_pointer, base_pointer);
-
- Label uint8_elements(this), int8_elements(this), uint16_elements(this),
- int16_elements(this), uint32_elements(this), int32_elements(this),
- float32_elements(this), float64_elements(this);
- Label* elements_kind_labels[] = {
- &uint8_elements, &uint8_elements, &int8_elements,
- &uint16_elements, &int16_elements, &uint32_elements,
- &int32_elements, &float32_elements, &float64_elements};
- int32_t elements_kinds[] = {
- UINT8_ELEMENTS, UINT8_CLAMPED_ELEMENTS, INT8_ELEMENTS,
- UINT16_ELEMENTS, INT16_ELEMENTS, UINT32_ELEMENTS,
- INT32_ELEMENTS, FLOAT32_ELEMENTS, FLOAT64_ELEMENTS};
- const size_t kTypedElementsKindCount =
- LAST_FIXED_TYPED_ARRAY_ELEMENTS_KIND -
- FIRST_FIXED_TYPED_ARRAY_ELEMENTS_KIND + 1;
- DCHECK_EQ(kTypedElementsKindCount, arraysize(elements_kinds));
- DCHECK_EQ(kTypedElementsKindCount, arraysize(elements_kind_labels));
- Switch(elements_kind, miss, elements_kinds, elements_kind_labels,
- kTypedElementsKindCount);
- Bind(&uint8_elements);
- {
- Comment("UINT8_ELEMENTS"); // Handles UINT8_CLAMPED_ELEMENTS too.
- Return(SmiTag(Load(MachineType::Uint8(), backing_store, intptr_index)));
- }
- Bind(&int8_elements);
- {
- Comment("INT8_ELEMENTS");
- Return(SmiTag(Load(MachineType::Int8(), backing_store, intptr_index)));
- }
- Bind(&uint16_elements);
- {
- Comment("UINT16_ELEMENTS");
- Node* index = WordShl(intptr_index, IntPtrConstant(1));
- Return(SmiTag(Load(MachineType::Uint16(), backing_store, index)));
- }
- Bind(&int16_elements);
- {
- Comment("INT16_ELEMENTS");
- Node* index = WordShl(intptr_index, IntPtrConstant(1));
- Return(SmiTag(Load(MachineType::Int16(), backing_store, index)));
- }
- Bind(&uint32_elements);
- {
- Comment("UINT32_ELEMENTS");
- Node* index = WordShl(intptr_index, IntPtrConstant(2));
- Node* element = Load(MachineType::Uint32(), backing_store, index);
- Return(ChangeUint32ToTagged(element));
- }
- Bind(&int32_elements);
- {
- Comment("INT32_ELEMENTS");
- Node* index = WordShl(intptr_index, IntPtrConstant(2));
- Node* element = Load(MachineType::Int32(), backing_store, index);
- Return(ChangeInt32ToTagged(element));
- }
- Bind(&float32_elements);
- {
- Comment("FLOAT32_ELEMENTS");
- Node* index = WordShl(intptr_index, IntPtrConstant(2));
- Node* element = Load(MachineType::Float32(), backing_store, index);
- var_double_value->Bind(ChangeFloat32ToFloat64(element));
- Goto(rebox_double);
- }
- Bind(&float64_elements);
- {
- Comment("FLOAT64_ELEMENTS");
- Node* index = WordShl(intptr_index, IntPtrConstant(3));
- Node* element = Load(MachineType::Float64(), backing_store, index);
- var_double_value->Bind(element);
- Goto(rebox_double);
- }
- }
-}
-
-void CodeStubAssembler::HandleLoadICHandlerCase(
- const LoadICParameters* p, Node* handler, Label* miss,
- ElementSupport support_elements) {
- Comment("have_handler");
- Variable var_holder(this, MachineRepresentation::kTagged);
- var_holder.Bind(p->receiver);
- Variable var_smi_handler(this, MachineRepresentation::kTagged);
- var_smi_handler.Bind(handler);
-
- Variable* vars[] = {&var_holder, &var_smi_handler};
- Label if_smi_handler(this, 2, vars);
- Label try_proto_handler(this), call_handler(this);
-
- Branch(TaggedIsSmi(handler), &if_smi_handler, &try_proto_handler);
-
- // |handler| is a Smi, encoding what to do. See SmiHandler methods
- // for the encoding format.
- Bind(&if_smi_handler);
- {
- HandleLoadICSmiHandlerCase(p, var_holder.value(), var_smi_handler.value(),
- miss, support_elements);
- }
-
- Bind(&try_proto_handler);
- {
- GotoIf(IsCodeMap(LoadMap(handler)), &call_handler);
- HandleLoadICProtoHandler(p, handler, &var_holder, &var_smi_handler,
- &if_smi_handler, miss);
- }
-
- Bind(&call_handler);
- {
- typedef LoadWithVectorDescriptor Descriptor;
- TailCallStub(Descriptor(isolate()), handler, p->context,
- Arg(Descriptor::kReceiver, p->receiver),
- Arg(Descriptor::kName, p->name),
- Arg(Descriptor::kSlot, p->slot),
- Arg(Descriptor::kVector, p->vector));
- }
-}
-
-void CodeStubAssembler::HandleLoadICSmiHandlerCase(
- const LoadICParameters* p, Node* holder, Node* smi_handler, Label* miss,
- ElementSupport support_elements) {
- Variable var_double_value(this, MachineRepresentation::kFloat64);
- Label rebox_double(this, &var_double_value);
-
- Node* handler_word = SmiUntag(smi_handler);
- Node* handler_kind = DecodeWord<LoadHandler::KindBits>(handler_word);
- if (support_elements == kSupportElements) {
- Label property(this);
- GotoUnless(
- WordEqual(handler_kind, IntPtrConstant(LoadHandler::kForElements)),
- &property);
-
- Comment("element_load");
- Node* intptr_index = TryToIntptr(p->name, miss);
- Node* elements = LoadElements(holder);
- Node* is_jsarray_condition =
- IsSetWord<LoadHandler::IsJsArrayBits>(handler_word);
- Node* elements_kind =
- DecodeWord<LoadHandler::ElementsKindBits>(handler_word);
- Label if_hole(this), unimplemented_elements_kind(this);
- Label* out_of_bounds = miss;
- EmitElementLoad(holder, elements, elements_kind, intptr_index,
- is_jsarray_condition, &if_hole, &rebox_double,
- &var_double_value, &unimplemented_elements_kind,
- out_of_bounds, miss);
-
- Bind(&unimplemented_elements_kind);
- {
- // Smi handlers should only be installed for supported elements kinds.
- // Crash if we get here.
- DebugBreak();
- Goto(miss);
- }
-
- Bind(&if_hole);
- {
- Comment("convert hole");
- GotoUnless(IsSetWord<LoadHandler::ConvertHoleBits>(handler_word), miss);
- Node* protector_cell = LoadRoot(Heap::kArrayProtectorRootIndex);
- DCHECK(isolate()->heap()->array_protector()->IsPropertyCell());
- GotoUnless(
- WordEqual(LoadObjectField(protector_cell, PropertyCell::kValueOffset),
- SmiConstant(Smi::FromInt(Isolate::kProtectorValid))),
- miss);
- Return(UndefinedConstant());
- }
-
- Bind(&property);
- Comment("property_load");
- }
-
- Label constant(this), field(this);
- Branch(WordEqual(handler_kind, IntPtrConstant(LoadHandler::kForFields)),
- &field, &constant);
-
- Bind(&field);
- {
- Comment("field_load");
- Node* offset = DecodeWord<LoadHandler::FieldOffsetBits>(handler_word);
-
- Label inobject(this), out_of_object(this);
- Branch(IsSetWord<LoadHandler::IsInobjectBits>(handler_word), &inobject,
- &out_of_object);
-
- Bind(&inobject);
- {
- Label is_double(this);
- GotoIf(IsSetWord<LoadHandler::IsDoubleBits>(handler_word), &is_double);
- Return(LoadObjectField(holder, offset));
-
- Bind(&is_double);
- if (FLAG_unbox_double_fields) {
- var_double_value.Bind(
- LoadObjectField(holder, offset, MachineType::Float64()));
- } else {
- Node* mutable_heap_number = LoadObjectField(holder, offset);
- var_double_value.Bind(LoadHeapNumberValue(mutable_heap_number));
- }
- Goto(&rebox_double);
- }
-
- Bind(&out_of_object);
- {
- Label is_double(this);
- Node* properties = LoadProperties(holder);
- Node* value = LoadObjectField(properties, offset);
- GotoIf(IsSetWord<LoadHandler::IsDoubleBits>(handler_word), &is_double);
- Return(value);
-
- Bind(&is_double);
- var_double_value.Bind(LoadHeapNumberValue(value));
- Goto(&rebox_double);
- }
-
- Bind(&rebox_double);
- Return(AllocateHeapNumberWithValue(var_double_value.value()));
- }
-
- Bind(&constant);
- {
- Comment("constant_load");
- Node* descriptors = LoadMapDescriptors(LoadMap(holder));
- Node* descriptor =
- DecodeWord<LoadHandler::DescriptorValueIndexBits>(handler_word);
- CSA_ASSERT(this,
- UintPtrLessThan(descriptor,
- LoadAndUntagFixedArrayBaseLength(descriptors)));
- Node* value =
- LoadFixedArrayElement(descriptors, descriptor, 0, INTPTR_PARAMETERS);
-
- Label if_accessor_info(this);
- GotoIf(IsSetWord<LoadHandler::IsAccessorInfoBits>(handler_word),
- &if_accessor_info);
- Return(value);
-
- Bind(&if_accessor_info);
- Callable callable = CodeFactory::ApiGetter(isolate());
- TailCallStub(callable, p->context, p->receiver, holder, value);
- }
-}
-
-void CodeStubAssembler::HandleLoadICProtoHandler(
- const LoadICParameters* p, Node* handler, Variable* var_holder,
- Variable* var_smi_handler, Label* if_smi_handler, Label* miss) {
- DCHECK_EQ(MachineRepresentation::kTagged, var_holder->rep());
- DCHECK_EQ(MachineRepresentation::kTagged, var_smi_handler->rep());
-
- // IC dispatchers rely on these assumptions to be held.
- STATIC_ASSERT(FixedArray::kLengthOffset == LoadHandler::kHolderCellOffset);
- DCHECK_EQ(FixedArray::OffsetOfElementAt(LoadHandler::kSmiHandlerIndex),
- LoadHandler::kSmiHandlerOffset);
- DCHECK_EQ(FixedArray::OffsetOfElementAt(LoadHandler::kValidityCellIndex),
- LoadHandler::kValidityCellOffset);
-
- // Both FixedArray and Tuple3 handlers have validity cell at the same offset.
- Label validity_cell_check_done(this);
- Node* validity_cell =
- LoadObjectField(handler, LoadHandler::kValidityCellOffset);
- GotoIf(WordEqual(validity_cell, IntPtrConstant(0)),
- &validity_cell_check_done);
- Node* cell_value = LoadObjectField(validity_cell, Cell::kValueOffset);
- GotoIf(WordNotEqual(cell_value,
- SmiConstant(Smi::FromInt(Map::kPrototypeChainValid))),
- miss);
- Goto(&validity_cell_check_done);
-
- Bind(&validity_cell_check_done);
- Node* smi_handler = LoadObjectField(handler, LoadHandler::kSmiHandlerOffset);
- CSA_ASSERT(this, TaggedIsSmi(smi_handler));
- Node* handler_flags = SmiUntag(smi_handler);
-
- Label check_prototypes(this);
- GotoUnless(
- IsSetWord<LoadHandler::DoNegativeLookupOnReceiverBits>(handler_flags),
- &check_prototypes);
- {
- CSA_ASSERT(this, Word32BinaryNot(
- HasInstanceType(p->receiver, JS_GLOBAL_OBJECT_TYPE)));
- // We have a dictionary receiver, do a negative lookup check.
- NameDictionaryNegativeLookup(p->receiver, p->name, miss);
- Goto(&check_prototypes);
- }
-
- Bind(&check_prototypes);
- Node* maybe_holder_cell =
- LoadObjectField(handler, LoadHandler::kHolderCellOffset);
- Label array_handler(this), tuple_handler(this);
- Branch(TaggedIsSmi(maybe_holder_cell), &array_handler, &tuple_handler);
-
- Bind(&tuple_handler);
- {
- Label load_existent(this);
- GotoIf(WordNotEqual(maybe_holder_cell, NullConstant()), &load_existent);
- // This is a handler for a load of a non-existent value.
- Return(UndefinedConstant());
-
- Bind(&load_existent);
- Node* holder = LoadWeakCellValue(maybe_holder_cell);
- // The |holder| is guaranteed to be alive at this point since we passed
- // both the receiver map check and the validity cell check.
- CSA_ASSERT(this, WordNotEqual(holder, IntPtrConstant(0)));
-
- var_holder->Bind(holder);
- var_smi_handler->Bind(smi_handler);
- Goto(if_smi_handler);
- }
-
- Bind(&array_handler);
- {
- typedef LoadICProtoArrayDescriptor Descriptor;
- LoadICProtoArrayStub stub(isolate());
- Node* target = HeapConstant(stub.GetCode());
- TailCallStub(Descriptor(isolate()), target, p->context,
- Arg(Descriptor::kReceiver, p->receiver),
- Arg(Descriptor::kName, p->name),
- Arg(Descriptor::kSlot, p->slot),
- Arg(Descriptor::kVector, p->vector),
- Arg(Descriptor::kHandler, handler));
- }
-}
-
-void CodeStubAssembler::LoadICProtoArray(const LoadICParameters* p,
- Node* handler) {
- Label miss(this);
- CSA_ASSERT(this, Word32BinaryNot(TaggedIsSmi(handler)));
- CSA_ASSERT(this, IsFixedArrayMap(LoadMap(handler)));
-
- Node* smi_handler = LoadObjectField(handler, LoadHandler::kSmiHandlerOffset);
- Node* handler_flags = SmiUntag(smi_handler);
-
- Node* handler_length = LoadAndUntagFixedArrayBaseLength(handler);
-
- Node* holder = EmitLoadICProtoArrayCheck(p, handler, handler_length,
- handler_flags, &miss);
-
- HandleLoadICSmiHandlerCase(p, holder, smi_handler, &miss, kOnlyProperties);
-
- Bind(&miss);
- {
- TailCallRuntime(Runtime::kLoadIC_Miss, p->context, p->receiver, p->name,
- p->slot, p->vector);
- }
-}
-
-Node* CodeStubAssembler::EmitLoadICProtoArrayCheck(const LoadICParameters* p,
- Node* handler,
- Node* handler_length,
- Node* handler_flags,
- Label* miss) {
- Variable start_index(this, MachineType::PointerRepresentation());
- start_index.Bind(IntPtrConstant(LoadHandler::kFirstPrototypeIndex));
-
- Label can_access(this);
- GotoUnless(IsSetWord<LoadHandler::DoAccessCheckOnReceiverBits>(handler_flags),
- &can_access);
- {
- // Skip this entry of a handler.
- start_index.Bind(IntPtrConstant(LoadHandler::kFirstPrototypeIndex + 1));
-
- int offset =
- FixedArray::OffsetOfElementAt(LoadHandler::kFirstPrototypeIndex);
- Node* expected_native_context =
- LoadWeakCellValue(LoadObjectField(handler, offset), miss);
- CSA_ASSERT(this, IsNativeContext(expected_native_context));
-
- Node* native_context = LoadNativeContext(p->context);
- GotoIf(WordEqual(expected_native_context, native_context), &can_access);
- // If the receiver is not a JSGlobalProxy then we miss.
- GotoUnless(IsJSGlobalProxy(p->receiver), miss);
- // For JSGlobalProxy receiver try to compare security tokens of current
- // and expected native contexts.
- Node* expected_token = LoadContextElement(expected_native_context,
- Context::SECURITY_TOKEN_INDEX);
- Node* current_token =
- LoadContextElement(native_context, Context::SECURITY_TOKEN_INDEX);
- Branch(WordEqual(expected_token, current_token), &can_access, miss);
- }
- Bind(&can_access);
-
- BuildFastLoop(
- MachineType::PointerRepresentation(), start_index.value(), handler_length,
- [this, p, handler, miss](CodeStubAssembler*, Node* current) {
- Node* prototype_cell =
- LoadFixedArrayElement(handler, current, 0, INTPTR_PARAMETERS);
- CheckPrototype(prototype_cell, p->name, miss);
- },
- 1, IndexAdvanceMode::kPost);
-
- Node* maybe_holder_cell = LoadFixedArrayElement(
- handler, IntPtrConstant(LoadHandler::kHolderCellIndex), 0,
- INTPTR_PARAMETERS);
- Label load_existent(this);
- GotoIf(WordNotEqual(maybe_holder_cell, NullConstant()), &load_existent);
- // This is a handler for a load of a non-existent value.
- Return(UndefinedConstant());
-
- Bind(&load_existent);
- Node* holder = LoadWeakCellValue(maybe_holder_cell);
- // The |holder| is guaranteed to be alive at this point since we passed
- // the receiver map check, the validity cell check and the prototype chain
- // check.
- CSA_ASSERT(this, WordNotEqual(holder, IntPtrConstant(0)));
- return holder;
-}
-
-void CodeStubAssembler::CheckPrototype(Node* prototype_cell, Node* name,
- Label* miss) {
- Node* maybe_prototype = LoadWeakCellValue(prototype_cell, miss);
-
- Label done(this);
- Label if_property_cell(this), if_dictionary_object(this);
-
- // |maybe_prototype| is either a PropertyCell or a slow-mode prototype.
- Branch(WordEqual(LoadMap(maybe_prototype),
- LoadRoot(Heap::kGlobalPropertyCellMapRootIndex)),
- &if_property_cell, &if_dictionary_object);
-
- Bind(&if_dictionary_object);
- {
- CSA_ASSERT(this, IsDictionaryMap(LoadMap(maybe_prototype)));
- NameDictionaryNegativeLookup(maybe_prototype, name, miss);
- Goto(&done);
- }
-
- Bind(&if_property_cell);
- {
- // Ensure the property cell still contains the hole.
- Node* value = LoadObjectField(maybe_prototype, PropertyCell::kValueOffset);
- GotoIf(WordNotEqual(value, LoadRoot(Heap::kTheHoleValueRootIndex)), miss);
- Goto(&done);
- }
-
- Bind(&done);
-}
-
-void CodeStubAssembler::NameDictionaryNegativeLookup(Node* object, Node* name,
- Label* miss) {
- CSA_ASSERT(this, IsDictionaryMap(LoadMap(object)));
- Node* properties = LoadProperties(object);
- // Ensure the property does not exist in a dictionary-mode object.
- Variable var_name_index(this, MachineType::PointerRepresentation());
- Label done(this);
- NameDictionaryLookup<NameDictionary>(properties, name, miss, &var_name_index,
- &done);
- Bind(&done);
-}
-
-void CodeStubAssembler::LoadIC(const LoadICParameters* p) {
- Variable var_handler(this, MachineRepresentation::kTagged);
- // TODO(ishell): defer blocks when it works.
- Label if_handler(this, &var_handler), try_polymorphic(this),
- try_megamorphic(this /*, Label::kDeferred*/),
- miss(this /*, Label::kDeferred*/);
-
- Node* receiver_map = LoadReceiverMap(p->receiver);
-
- // Check monomorphic case.
- Node* feedback =
- TryMonomorphicCase(p->slot, p->vector, receiver_map, &if_handler,
- &var_handler, &try_polymorphic);
- Bind(&if_handler);
- {
- HandleLoadICHandlerCase(p, var_handler.value(), &miss);
- }
-
- Bind(&try_polymorphic);
- {
- // Check polymorphic case.
- Comment("LoadIC_try_polymorphic");
- GotoUnless(WordEqual(LoadMap(feedback), FixedArrayMapConstant()),
- &try_megamorphic);
- HandlePolymorphicCase(receiver_map, feedback, &if_handler, &var_handler,
- &miss, 2);
- }
-
- Bind(&try_megamorphic);
- {
- // Check megamorphic case.
- GotoUnless(
- WordEqual(feedback, LoadRoot(Heap::kmegamorphic_symbolRootIndex)),
- &miss);
-
- TryProbeStubCache(isolate()->load_stub_cache(), p->receiver, p->name,
- &if_handler, &var_handler, &miss);
- }
- Bind(&miss);
- {
- TailCallRuntime(Runtime::kLoadIC_Miss, p->context, p->receiver, p->name,
- p->slot, p->vector);
- }
-}
-
-void CodeStubAssembler::KeyedLoadIC(const LoadICParameters* p) {
- Variable var_handler(this, MachineRepresentation::kTagged);
- // TODO(ishell): defer blocks when it works.
- Label if_handler(this, &var_handler), try_polymorphic(this),
- try_megamorphic(this /*, Label::kDeferred*/),
- try_polymorphic_name(this /*, Label::kDeferred*/),
- miss(this /*, Label::kDeferred*/);
-
- Node* receiver_map = LoadReceiverMap(p->receiver);
-
- // Check monomorphic case.
- Node* feedback =
- TryMonomorphicCase(p->slot, p->vector, receiver_map, &if_handler,
- &var_handler, &try_polymorphic);
- Bind(&if_handler);
- {
- HandleLoadICHandlerCase(p, var_handler.value(), &miss, kSupportElements);
- }
-
- Bind(&try_polymorphic);
- {
- // Check polymorphic case.
- Comment("KeyedLoadIC_try_polymorphic");
- GotoUnless(WordEqual(LoadMap(feedback), FixedArrayMapConstant()),
- &try_megamorphic);
- HandlePolymorphicCase(receiver_map, feedback, &if_handler, &var_handler,
- &miss, 2);
- }
-
- Bind(&try_megamorphic);
- {
- // Check megamorphic case.
- Comment("KeyedLoadIC_try_megamorphic");
- GotoUnless(
- WordEqual(feedback, LoadRoot(Heap::kmegamorphic_symbolRootIndex)),
- &try_polymorphic_name);
- // TODO(jkummerow): Inline this? Or some of it?
- TailCallStub(CodeFactory::KeyedLoadIC_Megamorphic(isolate()), p->context,
- p->receiver, p->name, p->slot, p->vector);
- }
- Bind(&try_polymorphic_name);
- {
- // We might have a name in feedback, and a fixed array in the next slot.
- Comment("KeyedLoadIC_try_polymorphic_name");
- GotoUnless(WordEqual(feedback, p->name), &miss);
- // If the name comparison succeeded, we know we have a fixed array with
- // at least one map/handler pair.
- Node* offset = ElementOffsetFromIndex(
- p->slot, FAST_HOLEY_ELEMENTS, SMI_PARAMETERS,
- FixedArray::kHeaderSize + kPointerSize - kHeapObjectTag);
- Node* array = Load(MachineType::AnyTagged(), p->vector, offset);
- HandlePolymorphicCase(receiver_map, array, &if_handler, &var_handler, &miss,
- 1);
- }
- Bind(&miss);
- {
- Comment("KeyedLoadIC_miss");
- TailCallRuntime(Runtime::kKeyedLoadIC_Miss, p->context, p->receiver,
- p->name, p->slot, p->vector);
- }
-}
-
-void CodeStubAssembler::KeyedLoadICGeneric(const LoadICParameters* p) {
- Variable var_index(this, MachineType::PointerRepresentation());
- Variable var_details(this, MachineRepresentation::kWord32);
- Variable var_value(this, MachineRepresentation::kTagged);
- Label if_index(this), if_unique_name(this), if_element_hole(this),
- if_oob(this), slow(this), stub_cache_miss(this),
- if_property_dictionary(this), if_found_on_receiver(this);
-
- Node* receiver = p->receiver;
- GotoIf(TaggedIsSmi(receiver), &slow);
- Node* receiver_map = LoadMap(receiver);
- Node* instance_type = LoadMapInstanceType(receiver_map);
- // Receivers requiring non-standard element accesses (interceptors, access
- // checks, strings and string wrappers, proxies) are handled in the runtime.
- GotoIf(Int32LessThanOrEqual(instance_type,
- Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER)),
- &slow);
-
- Node* key = p->name;
- TryToName(key, &if_index, &var_index, &if_unique_name, &slow);
-
- Bind(&if_index);
- {
- Comment("integer index");
- Node* index = var_index.value();
- Node* elements = LoadElements(receiver);
- Node* elements_kind = LoadMapElementsKind(receiver_map);
- Node* is_jsarray_condition =
- Word32Equal(instance_type, Int32Constant(JS_ARRAY_TYPE));
- Variable var_double_value(this, MachineRepresentation::kFloat64);
- Label rebox_double(this, &var_double_value);
-
- // Unimplemented elements kinds fall back to a runtime call.
- Label* unimplemented_elements_kind = &slow;
- IncrementCounter(isolate()->counters()->ic_keyed_load_generic_smi(), 1);
- EmitElementLoad(receiver, elements, elements_kind, index,
- is_jsarray_condition, &if_element_hole, &rebox_double,
- &var_double_value, unimplemented_elements_kind, &if_oob,
- &slow);
-
- Bind(&rebox_double);
- Return(AllocateHeapNumberWithValue(var_double_value.value()));
- }
-
- Bind(&if_oob);
- {
- Comment("out of bounds");
- Node* index = var_index.value();
- // Negative keys can't take the fast OOB path.
- GotoIf(IntPtrLessThan(index, IntPtrConstant(0)), &slow);
- // Positive OOB indices are effectively the same as hole loads.
- Goto(&if_element_hole);
- }
-
- Bind(&if_element_hole);
- {
- Comment("found the hole");
- Label return_undefined(this);
- BranchIfPrototypesHaveNoElements(receiver_map, &return_undefined, &slow);
-
- Bind(&return_undefined);
- Return(UndefinedConstant());
- }
-
- Node* properties = nullptr;
- Bind(&if_unique_name);
- {
- Comment("key is unique name");
- // Check if the receiver has fast or slow properties.
- properties = LoadProperties(receiver);
- Node* properties_map = LoadMap(properties);
- GotoIf(WordEqual(properties_map, LoadRoot(Heap::kHashTableMapRootIndex)),
- &if_property_dictionary);
-
- // Try looking up the property on the receiver; if unsuccessful, look
- // for a handler in the stub cache.
- Comment("DescriptorArray lookup");
-
- // Skip linear search if there are too many descriptors.
- // TODO(jkummerow): Consider implementing binary search.
- // See also TryLookupProperty() which has the same limitation.
- const int32_t kMaxLinear = 210;
- Label stub_cache(this);
- Node* bitfield3 = LoadMapBitField3(receiver_map);
- Node* nof =
- DecodeWordFromWord32<Map::NumberOfOwnDescriptorsBits>(bitfield3);
- GotoIf(UintPtrGreaterThan(nof, IntPtrConstant(kMaxLinear)), &stub_cache);
- Node* descriptors = LoadMapDescriptors(receiver_map);
- Variable var_name_index(this, MachineType::PointerRepresentation());
- Label if_descriptor_found(this);
- DescriptorLookupLinear(key, descriptors, nof, &if_descriptor_found,
- &var_name_index, &stub_cache);
-
- Bind(&if_descriptor_found);
- {
- LoadPropertyFromFastObject(receiver, receiver_map, descriptors,
- var_name_index.value(), &var_details,
- &var_value);
- Goto(&if_found_on_receiver);
- }
-
- Bind(&stub_cache);
- {
- Comment("stub cache probe for fast property load");
- Variable var_handler(this, MachineRepresentation::kTagged);
- Label found_handler(this, &var_handler), stub_cache_miss(this);
- TryProbeStubCache(isolate()->load_stub_cache(), receiver, key,
- &found_handler, &var_handler, &stub_cache_miss);
- Bind(&found_handler);
- { HandleLoadICHandlerCase(p, var_handler.value(), &slow); }
-
- Bind(&stub_cache_miss);
- {
- Comment("KeyedLoadGeneric_miss");
- TailCallRuntime(Runtime::kKeyedLoadIC_Miss, p->context, p->receiver,
- p->name, p->slot, p->vector);
- }
- }
- }
-
- Bind(&if_property_dictionary);
- {
- Comment("dictionary property load");
- // We checked for LAST_CUSTOM_ELEMENTS_RECEIVER before, which rules out
- // seeing global objects here (which would need special handling).
-
- Variable var_name_index(this, MachineType::PointerRepresentation());
- Label dictionary_found(this, &var_name_index);
- NameDictionaryLookup<NameDictionary>(properties, key, &dictionary_found,
- &var_name_index, &slow);
- Bind(&dictionary_found);
- {
- LoadPropertyFromNameDictionary(properties, var_name_index.value(),
- &var_details, &var_value);
- Goto(&if_found_on_receiver);
- }
- }
-
- Bind(&if_found_on_receiver);
- {
- Node* value = CallGetterIfAccessor(var_value.value(), var_details.value(),
- p->context, receiver, &slow);
- IncrementCounter(isolate()->counters()->ic_keyed_load_generic_symbol(), 1);
- Return(value);
- }
-
- Bind(&slow);
- {
- Comment("KeyedLoadGeneric_slow");
- IncrementCounter(isolate()->counters()->ic_keyed_load_generic_slow(), 1);
- // TODO(jkummerow): Should we use the GetProperty TF stub instead?
- TailCallRuntime(Runtime::kKeyedGetProperty, p->context, p->receiver,
- p->name);
- }
-}
-
-void CodeStubAssembler::HandleStoreFieldAndReturn(Node* handler_word,
- Node* holder,
- Representation representation,
- Node* value, Node* transition,
- Label* miss) {
- bool transition_to_field = transition != nullptr;
- Node* prepared_value = PrepareValueForWrite(value, representation, miss);
-
- if (transition_to_field) {
- Label storage_extended(this);
- GotoUnless(IsSetWord<StoreHandler::ExtendStorageBits>(handler_word),
- &storage_extended);
- Comment("[ Extend storage");
- ExtendPropertiesBackingStore(holder);
- Comment("] Extend storage");
- Goto(&storage_extended);
-
- Bind(&storage_extended);
- }
-
- Node* offset = DecodeWord<StoreHandler::FieldOffsetBits>(handler_word);
- Label if_inobject(this), if_out_of_object(this);
- Branch(IsSetWord<StoreHandler::IsInobjectBits>(handler_word), &if_inobject,
- &if_out_of_object);
-
- Bind(&if_inobject);
- {
- StoreNamedField(holder, offset, true, representation, prepared_value,
- transition_to_field);
- if (transition_to_field) {
- StoreObjectField(holder, JSObject::kMapOffset, transition);
- }
- Return(value);
- }
-
- Bind(&if_out_of_object);
- {
- StoreNamedField(holder, offset, false, representation, prepared_value,
- transition_to_field);
- if (transition_to_field) {
- StoreObjectField(holder, JSObject::kMapOffset, transition);
- }
- Return(value);
- }
-}
-
-void CodeStubAssembler::HandleStoreICSmiHandlerCase(Node* handler_word,
- Node* holder, Node* value,
- Node* transition,
- Label* miss) {
- Comment(transition ? "transitioning field store" : "field store");
-
-#ifdef DEBUG
- Node* handler_kind = DecodeWord<StoreHandler::KindBits>(handler_word);
- if (transition) {
- CSA_ASSERT(
- this,
- WordOr(WordEqual(handler_kind,
- IntPtrConstant(StoreHandler::kTransitionToField)),
- WordEqual(handler_kind,
- IntPtrConstant(StoreHandler::kTransitionToConstant))));
- } else {
- CSA_ASSERT(this, WordEqual(handler_kind,
- IntPtrConstant(StoreHandler::kStoreField)));
- }
-#endif
-
- Node* field_representation =
- DecodeWord<StoreHandler::FieldRepresentationBits>(handler_word);
-
- Label if_smi_field(this), if_double_field(this), if_heap_object_field(this),
- if_tagged_field(this);
-
- GotoIf(WordEqual(field_representation, IntPtrConstant(StoreHandler::kTagged)),
- &if_tagged_field);
- GotoIf(WordEqual(field_representation,
- IntPtrConstant(StoreHandler::kHeapObject)),
- &if_heap_object_field);
- GotoIf(WordEqual(field_representation, IntPtrConstant(StoreHandler::kDouble)),
- &if_double_field);
- CSA_ASSERT(this, WordEqual(field_representation,
- IntPtrConstant(StoreHandler::kSmi)));
- Goto(&if_smi_field);
-
- Bind(&if_tagged_field);
- {
- Comment("store tagged field");
- HandleStoreFieldAndReturn(handler_word, holder, Representation::Tagged(),
- value, transition, miss);
- }
-
- Bind(&if_double_field);
- {
- Comment("store double field");
- HandleStoreFieldAndReturn(handler_word, holder, Representation::Double(),
- value, transition, miss);
- }
-
- Bind(&if_heap_object_field);
- {
- Comment("store heap object field");
- // Generate full field type check here and then store value as Tagged.
- Node* prepared_value =
- PrepareValueForWrite(value, Representation::HeapObject(), miss);
- Node* value_index_in_descriptor =
- DecodeWord<StoreHandler::DescriptorValueIndexBits>(handler_word);
- Node* descriptors =
- LoadMapDescriptors(transition ? transition : LoadMap(holder));
- Node* maybe_field_type = LoadFixedArrayElement(
- descriptors, value_index_in_descriptor, 0, INTPTR_PARAMETERS);
- Label do_store(this);
- GotoIf(TaggedIsSmi(maybe_field_type), &do_store);
- // Check that value type matches the field type.
- {
- Node* field_type = LoadWeakCellValue(maybe_field_type, miss);
- Branch(WordEqual(LoadMap(prepared_value), field_type), &do_store, miss);
- }
- Bind(&do_store);
- HandleStoreFieldAndReturn(handler_word, holder, Representation::Tagged(),
- prepared_value, transition, miss);
- }
-
- Bind(&if_smi_field);
- {
- Comment("store smi field");
- HandleStoreFieldAndReturn(handler_word, holder, Representation::Smi(),
- value, transition, miss);
- }
-}
-
-void CodeStubAssembler::HandleStoreICHandlerCase(const StoreICParameters* p,
- Node* handler, Label* miss) {
- Label if_smi_handler(this);
- Label try_proto_handler(this), call_handler(this);
-
- Branch(TaggedIsSmi(handler), &if_smi_handler, &try_proto_handler);
-
- // |handler| is a Smi, encoding what to do. See SmiHandler methods
- // for the encoding format.
- Bind(&if_smi_handler);
- {
- Node* holder = p->receiver;
- Node* handler_word = SmiUntag(handler);
-
- // Handle non-transitioning field stores.
- HandleStoreICSmiHandlerCase(handler_word, holder, p->value, nullptr, miss);
- }
-
- Bind(&try_proto_handler);
- {
- GotoIf(IsCodeMap(LoadMap(handler)), &call_handler);
- HandleStoreICProtoHandler(p, handler, miss);
- }
-
- // |handler| is a heap object. Must be code, call it.
- Bind(&call_handler);
- {
- StoreWithVectorDescriptor descriptor(isolate());
- TailCallStub(descriptor, handler, p->context, p->receiver, p->name,
- p->value, p->slot, p->vector);
- }
-}
-
-void CodeStubAssembler::HandleStoreICProtoHandler(const StoreICParameters* p,
- Node* handler, Label* miss) {
- // IC dispatchers rely on these assumptions to be held.
- STATIC_ASSERT(FixedArray::kLengthOffset ==
- StoreHandler::kTransitionCellOffset);
- DCHECK_EQ(FixedArray::OffsetOfElementAt(StoreHandler::kSmiHandlerIndex),
- StoreHandler::kSmiHandlerOffset);
- DCHECK_EQ(FixedArray::OffsetOfElementAt(StoreHandler::kValidityCellIndex),
- StoreHandler::kValidityCellOffset);
-
- // Both FixedArray and Tuple3 handlers have validity cell at the same offset.
- Label validity_cell_check_done(this);
- Node* validity_cell =
- LoadObjectField(handler, StoreHandler::kValidityCellOffset);
- GotoIf(WordEqual(validity_cell, IntPtrConstant(0)),
- &validity_cell_check_done);
- Node* cell_value = LoadObjectField(validity_cell, Cell::kValueOffset);
- GotoIf(WordNotEqual(cell_value,
- SmiConstant(Smi::FromInt(Map::kPrototypeChainValid))),
- miss);
- Goto(&validity_cell_check_done);
-
- Bind(&validity_cell_check_done);
- Node* smi_handler = LoadObjectField(handler, StoreHandler::kSmiHandlerOffset);
- CSA_ASSERT(this, TaggedIsSmi(smi_handler));
-
- Node* maybe_transition_cell =
- LoadObjectField(handler, StoreHandler::kTransitionCellOffset);
- Label array_handler(this), tuple_handler(this);
- Branch(TaggedIsSmi(maybe_transition_cell), &array_handler, &tuple_handler);
-
- Variable var_transition(this, MachineRepresentation::kTagged);
- Label if_transition(this), if_transition_to_constant(this);
- Bind(&tuple_handler);
- {
- Node* transition = LoadWeakCellValue(maybe_transition_cell, miss);
- var_transition.Bind(transition);
- Goto(&if_transition);
- }
-
- Bind(&array_handler);
- {
- Node* length = SmiUntag(maybe_transition_cell);
- BuildFastLoop(MachineType::PointerRepresentation(),
- IntPtrConstant(StoreHandler::kFirstPrototypeIndex), length,
- [this, p, handler, miss](CodeStubAssembler*, Node* current) {
- Node* prototype_cell = LoadFixedArrayElement(
- handler, current, 0, INTPTR_PARAMETERS);
- CheckPrototype(prototype_cell, p->name, miss);
- },
- 1, IndexAdvanceMode::kPost);
-
- Node* maybe_transition_cell = LoadFixedArrayElement(
- handler, IntPtrConstant(StoreHandler::kTransitionCellIndex), 0,
- INTPTR_PARAMETERS);
- Node* transition = LoadWeakCellValue(maybe_transition_cell, miss);
- var_transition.Bind(transition);
- Goto(&if_transition);
- }
-
- Bind(&if_transition);
- {
- Node* holder = p->receiver;
- Node* transition = var_transition.value();
- Node* handler_word = SmiUntag(smi_handler);
-
- GotoIf(IsSetWord32<Map::Deprecated>(LoadMapBitField3(transition)), miss);
-
- Node* handler_kind = DecodeWord<StoreHandler::KindBits>(handler_word);
- GotoIf(WordEqual(handler_kind,
- IntPtrConstant(StoreHandler::kTransitionToConstant)),
- &if_transition_to_constant);
-
- // Handle transitioning field stores.
- HandleStoreICSmiHandlerCase(handler_word, holder, p->value, transition,
- miss);
-
- Bind(&if_transition_to_constant);
- {
- // Check that constant matches value.
- Node* value_index_in_descriptor =
- DecodeWord<StoreHandler::DescriptorValueIndexBits>(handler_word);
- Node* descriptors = LoadMapDescriptors(transition);
- Node* constant = LoadFixedArrayElement(
- descriptors, value_index_in_descriptor, 0, INTPTR_PARAMETERS);
- GotoIf(WordNotEqual(p->value, constant), miss);
-
- StoreObjectField(p->receiver, JSObject::kMapOffset, transition);
- Return(p->value);
- }
- }
-}
-
-void CodeStubAssembler::StoreIC(const StoreICParameters* p) {
- Variable var_handler(this, MachineRepresentation::kTagged);
- // TODO(ishell): defer blocks when it works.
- Label if_handler(this, &var_handler), try_polymorphic(this),
- try_megamorphic(this /*, Label::kDeferred*/),
- miss(this /*, Label::kDeferred*/);
-
- Node* receiver_map = LoadReceiverMap(p->receiver);
-
- // Check monomorphic case.
- Node* feedback =
- TryMonomorphicCase(p->slot, p->vector, receiver_map, &if_handler,
- &var_handler, &try_polymorphic);
- Bind(&if_handler);
- {
- Comment("StoreIC_if_handler");
- HandleStoreICHandlerCase(p, var_handler.value(), &miss);
- }
-
- Bind(&try_polymorphic);
- {
- // Check polymorphic case.
- Comment("StoreIC_try_polymorphic");
- GotoUnless(
- WordEqual(LoadMap(feedback), LoadRoot(Heap::kFixedArrayMapRootIndex)),
- &try_megamorphic);
- HandlePolymorphicCase(receiver_map, feedback, &if_handler, &var_handler,
- &miss, 2);
- }
-
- Bind(&try_megamorphic);
- {
- // Check megamorphic case.
- GotoUnless(
- WordEqual(feedback, LoadRoot(Heap::kmegamorphic_symbolRootIndex)),
- &miss);
-
- TryProbeStubCache(isolate()->store_stub_cache(), p->receiver, p->name,
- &if_handler, &var_handler, &miss);
- }
- Bind(&miss);
- {
- TailCallRuntime(Runtime::kStoreIC_Miss, p->context, p->value, p->slot,
- p->vector, p->receiver, p->name);
- }
-}
-
-void CodeStubAssembler::KeyedStoreIC(const StoreICParameters* p,
- LanguageMode language_mode) {
- Variable var_handler(this, MachineRepresentation::kTagged);
- // This is to make |miss| label see the var_handler bound on all paths.
- var_handler.Bind(IntPtrConstant(0));
-
- // TODO(ishell): defer blocks when it works.
- Label if_handler(this, &var_handler), try_polymorphic(this),
- try_megamorphic(this /*, Label::kDeferred*/),
- try_polymorphic_name(this /*, Label::kDeferred*/),
- miss(this /*, Label::kDeferred*/);
-
- Node* receiver_map = LoadReceiverMap(p->receiver);
-
- // Check monomorphic case.
- Node* feedback =
- TryMonomorphicCase(p->slot, p->vector, receiver_map, &if_handler,
- &var_handler, &try_polymorphic);
- Bind(&if_handler);
- {
- Comment("KeyedStoreIC_if_handler");
- HandleStoreICHandlerCase(p, var_handler.value(), &miss);
- }
-
- Bind(&try_polymorphic);
- {
- // CheckPolymorphic case.
- Comment("KeyedStoreIC_try_polymorphic");
- GotoUnless(
- WordEqual(LoadMap(feedback), LoadRoot(Heap::kFixedArrayMapRootIndex)),
- &try_megamorphic);
- Label if_transition_handler(this);
- Variable var_transition_map_cell(this, MachineRepresentation::kTagged);
- HandleKeyedStorePolymorphicCase(receiver_map, feedback, &if_handler,
- &var_handler, &if_transition_handler,
- &var_transition_map_cell, &miss);
- Bind(&if_transition_handler);
- Comment("KeyedStoreIC_polymorphic_transition");
- Node* transition_map =
- LoadWeakCellValue(var_transition_map_cell.value(), &miss);
- StoreTransitionDescriptor descriptor(isolate());
- TailCallStub(descriptor, var_handler.value(), p->context, p->receiver,
- p->name, transition_map, p->value, p->slot, p->vector);
- }
-
- Bind(&try_megamorphic);
- {
- // Check megamorphic case.
- Comment("KeyedStoreIC_try_megamorphic");
- GotoUnless(
- WordEqual(feedback, LoadRoot(Heap::kmegamorphic_symbolRootIndex)),
- &try_polymorphic_name);
- TailCallStub(
- CodeFactory::KeyedStoreIC_Megamorphic(isolate(), language_mode),
- p->context, p->receiver, p->name, p->value, p->slot, p->vector);
- }
-
- Bind(&try_polymorphic_name);
- {
- // We might have a name in feedback, and a fixed array in the next slot.
- Comment("KeyedStoreIC_try_polymorphic_name");
- GotoUnless(WordEqual(feedback, p->name), &miss);
- // If the name comparison succeeded, we know we have a FixedArray with
- // at least one map/handler pair.
- Node* offset = ElementOffsetFromIndex(
- p->slot, FAST_HOLEY_ELEMENTS, SMI_PARAMETERS,
- FixedArray::kHeaderSize + kPointerSize - kHeapObjectTag);
- Node* array = Load(MachineType::AnyTagged(), p->vector, offset);
- HandlePolymorphicCase(receiver_map, array, &if_handler, &var_handler, &miss,
- 1);
- }
-
- Bind(&miss);
- {
- Comment("KeyedStoreIC_miss");
- TailCallRuntime(Runtime::kKeyedStoreIC_Miss, p->context, p->value, p->slot,
- p->vector, p->receiver, p->name);
- }
-}
-
-void CodeStubAssembler::LoadGlobalIC(const LoadICParameters* p) {
- Label try_handler(this), miss(this);
- Node* weak_cell =
- LoadFixedArrayElement(p->vector, p->slot, 0, SMI_PARAMETERS);
- CSA_ASSERT(this, HasInstanceType(weak_cell, WEAK_CELL_TYPE));
-
- // Load value or try handler case if the {weak_cell} is cleared.
- Node* property_cell = LoadWeakCellValue(weak_cell, &try_handler);
- CSA_ASSERT(this, HasInstanceType(property_cell, PROPERTY_CELL_TYPE));
-
- Node* value = LoadObjectField(property_cell, PropertyCell::kValueOffset);
- GotoIf(WordEqual(value, TheHoleConstant()), &miss);
- Return(value);
-
- Bind(&try_handler);
- {
- Node* handler =
- LoadFixedArrayElement(p->vector, p->slot, kPointerSize, SMI_PARAMETERS);
- GotoIf(WordEqual(handler, LoadRoot(Heap::kuninitialized_symbolRootIndex)),
- &miss);
-
- // In this case {handler} must be a Code object.
- CSA_ASSERT(this, HasInstanceType(handler, CODE_TYPE));
- LoadWithVectorDescriptor descriptor(isolate());
- Node* native_context = LoadNativeContext(p->context);
- Node* receiver =
- LoadContextElement(native_context, Context::EXTENSION_INDEX);
- Node* fake_name = IntPtrConstant(0);
- TailCallStub(descriptor, handler, p->context, receiver, fake_name, p->slot,
- p->vector);
- }
- Bind(&miss);
- {
- TailCallRuntime(Runtime::kLoadGlobalIC_Miss, p->context, p->slot,
- p->vector);
- }
-}
-
-void CodeStubAssembler::ExtendPropertiesBackingStore(compiler::Node* object) {
- Node* properties = LoadProperties(object);
- Node* length = LoadFixedArrayBaseLength(properties);
-
- ParameterMode mode = OptimalParameterMode();
- length = UntagParameter(length, mode);
-
- Node* delta = IntPtrOrSmiConstant(JSObject::kFieldsAdded, mode);
- Node* new_capacity = IntPtrAdd(length, delta);
-
- // Grow properties array.
- ElementsKind kind = FAST_ELEMENTS;
- DCHECK(kMaxNumberOfDescriptors + JSObject::kFieldsAdded <
- FixedArrayBase::GetMaxLengthForNewSpaceAllocation(kind));
- // The size of a new properties backing store is guaranteed to be small
- // enough that the new backing store will be allocated in new space.
- CSA_ASSERT(this, UintPtrLessThan(new_capacity,
- IntPtrConstant(kMaxNumberOfDescriptors +
- JSObject::kFieldsAdded)));
-
- Node* new_properties = AllocateFixedArray(kind, new_capacity, mode);
-
- FillFixedArrayWithValue(kind, new_properties, length, new_capacity,
- Heap::kUndefinedValueRootIndex, mode);
-
- // |new_properties| is guaranteed to be in new space, so we can skip
- // the write barrier.
- CopyFixedArrayElements(kind, properties, new_properties, length,
- SKIP_WRITE_BARRIER, mode);
-
- StoreObjectField(object, JSObject::kPropertiesOffset, new_properties);
-}
-
-Node* CodeStubAssembler::PrepareValueForWrite(Node* value,
- Representation representation,
- Label* bailout) {
- if (representation.IsDouble()) {
- value = TryTaggedToFloat64(value, bailout);
- } else if (representation.IsHeapObject()) {
- // Field type is checked by the handler, here we only check if the value
- // is a heap object.
- GotoIf(TaggedIsSmi(value), bailout);
- } else if (representation.IsSmi()) {
- GotoUnless(TaggedIsSmi(value), bailout);
- } else {
- DCHECK(representation.IsTagged());
- }
- return value;
-}
-
-void CodeStubAssembler::StoreNamedField(Node* object, FieldIndex index,
- Representation representation,
- Node* value, bool transition_to_field) {
- DCHECK_EQ(index.is_double(), representation.IsDouble());
-
- StoreNamedField(object, IntPtrConstant(index.offset()), index.is_inobject(),
- representation, value, transition_to_field);
-}
-
-void CodeStubAssembler::StoreNamedField(Node* object, Node* offset,
- bool is_inobject,
- Representation representation,
- Node* value, bool transition_to_field) {
- bool store_value_as_double = representation.IsDouble();
- Node* property_storage = object;
- if (!is_inobject) {
- property_storage = LoadProperties(object);
- }
-
- if (representation.IsDouble()) {
- if (!FLAG_unbox_double_fields || !is_inobject) {
- if (transition_to_field) {
- Node* heap_number = AllocateHeapNumberWithValue(value, MUTABLE);
- // Store the new mutable heap number into the object.
- value = heap_number;
- store_value_as_double = false;
- } else {
- // Load the heap number.
- property_storage = LoadObjectField(property_storage, offset);
- // Store the double value into it.
- offset = IntPtrConstant(HeapNumber::kValueOffset);
- }
- }
- }
-
- if (store_value_as_double) {
- StoreObjectFieldNoWriteBarrier(property_storage, offset, value,
- MachineRepresentation::kFloat64);
- } else if (representation.IsSmi()) {
- StoreObjectFieldNoWriteBarrier(property_storage, offset, value);
- } else {
- StoreObjectField(property_storage, offset, value);
- }
-}
-
Node* CodeStubAssembler::EmitKeyedSloppyArguments(Node* receiver, Node* key,
Node* value, Label* bailout) {
// Mapped arguments are actual arguments. Unmapped arguments are values added
@@ -6713,7 +5647,7 @@ Node* CodeStubAssembler::EmitKeyedSloppyArguments(Node* receiver, Node* key,
bool is_load = value == nullptr;
- GotoUnless(TaggedIsSmi(key), bailout);
+ GotoIfNot(TaggedIsSmi(key), bailout);
key = SmiUntag(key);
GotoIf(IntPtrLessThan(key, IntPtrConstant(0)), bailout);
@@ -6730,37 +5664,33 @@ Node* CodeStubAssembler::EmitKeyedSloppyArguments(Node* receiver, Node* key,
GotoIf(UintPtrGreaterThanOrEqual(key, adjusted_length), &if_unmapped);
- Node* mapped_index = LoadFixedArrayElement(
- elements, IntPtrAdd(key, intptr_two), 0, INTPTR_PARAMETERS);
+ Node* mapped_index =
+ LoadFixedArrayElement(elements, IntPtrAdd(key, intptr_two));
Branch(WordEqual(mapped_index, TheHoleConstant()), &if_unmapped, &if_mapped);
Bind(&if_mapped);
{
CSA_ASSERT(this, TaggedIsSmi(mapped_index));
mapped_index = SmiUntag(mapped_index);
- Node* the_context = LoadFixedArrayElement(elements, IntPtrConstant(0), 0,
- INTPTR_PARAMETERS);
+ Node* the_context = LoadFixedArrayElement(elements, 0);
// Assert that we can use LoadFixedArrayElement/StoreFixedArrayElement
// methods for accessing Context.
STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize);
DCHECK_EQ(Context::SlotOffset(0) + kHeapObjectTag,
FixedArray::OffsetOfElementAt(0));
if (is_load) {
- Node* result = LoadFixedArrayElement(the_context, mapped_index, 0,
- INTPTR_PARAMETERS);
+ Node* result = LoadFixedArrayElement(the_context, mapped_index);
CSA_ASSERT(this, WordNotEqual(result, TheHoleConstant()));
var_result.Bind(result);
} else {
- StoreFixedArrayElement(the_context, mapped_index, value,
- UPDATE_WRITE_BARRIER, INTPTR_PARAMETERS);
+ StoreFixedArrayElement(the_context, mapped_index, value);
}
Goto(&end);
}
Bind(&if_unmapped);
{
- Node* backing_store = LoadFixedArrayElement(elements, IntPtrConstant(1), 0,
- INTPTR_PARAMETERS);
+ Node* backing_store = LoadFixedArrayElement(elements, 1);
GotoIf(WordNotEqual(LoadMap(backing_store), FixedArrayMapConstant()),
bailout);
@@ -6770,13 +5700,11 @@ Node* CodeStubAssembler::EmitKeyedSloppyArguments(Node* receiver, Node* key,
// The key falls into unmapped range.
if (is_load) {
- Node* result =
- LoadFixedArrayElement(backing_store, key, 0, INTPTR_PARAMETERS);
+ Node* result = LoadFixedArrayElement(backing_store, key);
GotoIf(WordEqual(result, TheHoleConstant()), bailout);
var_result.Bind(result);
} else {
- StoreFixedArrayElement(backing_store, key, value, UPDATE_WRITE_BARRIER,
- INTPTR_PARAMETERS);
+ StoreFixedArrayElement(backing_store, key, value);
}
Goto(&end);
}
@@ -6844,7 +5772,7 @@ void CodeStubAssembler::StoreElement(Node* elements, ElementsKind kind,
value = Float64SilenceNaN(value);
StoreFixedDoubleArrayElement(elements, index, value, mode);
} else {
- StoreFixedArrayElement(elements, index, value, barrier_mode, mode);
+ StoreFixedArrayElement(elements, index, value, barrier_mode, 0, mode);
}
}
@@ -6852,8 +5780,7 @@ Node* CodeStubAssembler::Int32ToUint8Clamped(Node* int32_value) {
Label done(this);
Node* int32_zero = Int32Constant(0);
Node* int32_255 = Int32Constant(255);
- Variable var_value(this, MachineRepresentation::kWord32);
- var_value.Bind(int32_value);
+ Variable var_value(this, MachineRepresentation::kWord32, int32_value);
GotoIf(Uint32LessThanOrEqual(int32_value, int32_255), &done);
var_value.Bind(int32_zero);
GotoIf(Int32LessThan(int32_value, int32_zero), &done);
@@ -6865,8 +5792,7 @@ Node* CodeStubAssembler::Int32ToUint8Clamped(Node* int32_value) {
Node* CodeStubAssembler::Float64ToUint8Clamped(Node* float64_value) {
Label done(this);
- Variable var_value(this, MachineRepresentation::kWord32);
- var_value.Bind(Int32Constant(0));
+ Variable var_value(this, MachineRepresentation::kWord32, Int32Constant(0));
GotoIf(Float64LessThanOrEqual(float64_value, Float64Constant(0.0)), &done);
var_value.Bind(Int32Constant(255));
GotoIf(Float64LessThanOrEqual(Float64Constant(255.0), float64_value), &done);
@@ -6909,7 +5835,7 @@ Node* CodeStubAssembler::PrepareValueForWriteToTypedArray(
Label done(this, &var_result), if_smi(this);
GotoIf(TaggedIsSmi(input), &if_smi);
// Try to convert a heap number to a Smi.
- GotoUnless(IsHeapNumberMap(LoadMap(input)), bailout);
+ GotoIfNot(IsHeapNumberMap(LoadMap(input)), bailout);
{
Node* value = LoadHeapNumberValue(input);
if (rep == MachineRepresentation::kWord32) {
@@ -6979,24 +5905,20 @@ void CodeStubAssembler::EmitElementStore(Node* object, Node* key, Node* value,
// Check if buffer has been neutered.
Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
- Node* bitfield = LoadObjectField(buffer, JSArrayBuffer::kBitFieldOffset,
- MachineType::Uint32());
- Node* neutered_bit =
- Word32And(bitfield, Int32Constant(JSArrayBuffer::WasNeutered::kMask));
- GotoUnless(Word32Equal(neutered_bit, Int32Constant(0)), bailout);
+ GotoIf(IsDetachedBuffer(buffer), bailout);
// Bounds check.
- Node* length = UntagParameter(
+ Node* length = TaggedToParameter(
LoadObjectField(object, JSTypedArray::kLengthOffset), parameter_mode);
if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
// Skip the store if we write beyond the length.
- GotoUnless(IntPtrLessThan(key, length), &done);
+ GotoIfNot(IntPtrLessThan(key, length), &done);
// ... but bailout if the key is negative.
} else {
DCHECK_EQ(STANDARD_STORE, store_mode);
}
- GotoUnless(UintPtrLessThan(key, length), bailout);
+ GotoIfNot(UintPtrLessThan(key, length), bailout);
// Backing store = external_pointer + base_pointer.
Node* external_pointer =
@@ -7004,7 +5926,8 @@ void CodeStubAssembler::EmitElementStore(Node* object, Node* key, Node* value,
MachineType::Pointer());
Node* base_pointer =
LoadObjectField(elements, FixedTypedArrayBase::kBasePointerOffset);
- Node* backing_store = IntPtrAdd(external_pointer, base_pointer);
+ Node* backing_store =
+ IntPtrAdd(external_pointer, BitcastTaggedToWord(base_pointer));
StoreElement(backing_store, elements_kind, key, value, parameter_mode);
Goto(&done);
@@ -7016,13 +5939,13 @@ void CodeStubAssembler::EmitElementStore(Node* object, Node* key, Node* value,
Node* length = is_jsarray ? LoadObjectField(object, JSArray::kLengthOffset)
: LoadFixedArrayBaseLength(elements);
- length = UntagParameter(length, parameter_mode);
+ length = TaggedToParameter(length, parameter_mode);
// In case value is stored into a fast smi array, assure that the value is
// a smi before manipulating the backing store. Otherwise the backing store
// may be left in an invalid state.
if (IsFastSmiElementsKind(elements_kind)) {
- GotoUnless(TaggedIsSmi(value), bailout);
+ GotoIfNot(TaggedIsSmi(value), bailout);
} else if (IsFastDoubleElementsKind(elements_kind)) {
value = TryTaggedToFloat64(value, bailout);
}
@@ -7031,7 +5954,7 @@ void CodeStubAssembler::EmitElementStore(Node* object, Node* key, Node* value,
elements = CheckForCapacityGrow(object, elements, elements_kind, length,
key, parameter_mode, is_jsarray, bailout);
} else {
- GotoUnless(UintPtrLessThan(key, length), bailout);
+ GotoIfNot(UintPtrLessThan(key, length), bailout);
if ((store_mode == STORE_NO_TRANSITION_HANDLE_COW) &&
IsFastSmiOrObjectElementsKind(elements_kind)) {
@@ -7061,7 +5984,7 @@ Node* CodeStubAssembler::CheckForCapacityGrow(Node* object, Node* elements,
Bind(&grow_case);
{
Node* current_capacity =
- UntagParameter(LoadFixedArrayBaseLength(elements), mode);
+ TaggedToParameter(LoadFixedArrayBaseLength(elements), mode);
checked_elements.Bind(elements);
@@ -7079,14 +6002,14 @@ Node* CodeStubAssembler::CheckForCapacityGrow(Node* object, Node* elements,
if (is_js_array) {
Node* new_length = IntPtrAdd(key, IntPtrOrSmiConstant(1, mode));
StoreObjectFieldNoWriteBarrier(object, JSArray::kLengthOffset,
- TagParameter(new_length, mode));
+ ParameterToTagged(new_length, mode));
}
Goto(&done);
}
Bind(&no_grow_case);
{
- GotoUnless(UintPtrLessThan(key, length), bailout);
+ GotoIfNot(UintPtrLessThan(key, length), bailout);
checked_elements.Bind(elements);
Goto(&done);
}
@@ -7099,15 +6022,15 @@ Node* CodeStubAssembler::CopyElementsOnWrite(Node* object, Node* elements,
ElementsKind kind, Node* length,
ParameterMode mode,
Label* bailout) {
- Variable new_elements_var(this, MachineRepresentation::kTagged);
+ Variable new_elements_var(this, MachineRepresentation::kTagged, elements);
Label done(this);
- new_elements_var.Bind(elements);
- GotoUnless(
+ GotoIfNot(
WordEqual(LoadMap(elements), LoadRoot(Heap::kFixedCOWArrayMapRootIndex)),
&done);
{
- Node* capacity = UntagParameter(LoadFixedArrayBaseLength(elements), mode);
+ Node* capacity =
+ TaggedToParameter(LoadFixedArrayBaseLength(elements), mode);
Node* new_elements = GrowElementsCapacity(object, elements, kind, kind,
length, capacity, mode, bailout);
@@ -7119,9 +6042,11 @@ Node* CodeStubAssembler::CopyElementsOnWrite(Node* object, Node* elements,
return new_elements_var.value();
}
-void CodeStubAssembler::TransitionElementsKind(
- compiler::Node* object, compiler::Node* map, ElementsKind from_kind,
- ElementsKind to_kind, bool is_jsarray, Label* bailout) {
+void CodeStubAssembler::TransitionElementsKind(Node* object, Node* map,
+ ElementsKind from_kind,
+ ElementsKind to_kind,
+ bool is_jsarray,
+ Label* bailout) {
DCHECK(!IsFastHoleyElementsKind(from_kind) ||
IsFastHoleyElementsKind(to_kind));
if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) {
@@ -7151,7 +6076,7 @@ void CodeStubAssembler::TransitionElementsKind(
Bind(&done);
}
- StoreObjectField(object, JSObject::kMapOffset, map);
+ StoreMap(object, map);
}
void CodeStubAssembler::TrapAllocationMemento(Node* object,
@@ -7167,7 +6092,8 @@ void CodeStubAssembler::TrapAllocationMemento(Node* object,
kMementoMapOffset + AllocationMemento::kSize - kPointerSize;
// Bail out if the object is not in new space.
- Node* object_page = PageFromAddress(object);
+ Node* object_word = BitcastTaggedToWord(object);
+ Node* object_page = PageFromAddress(object_word);
{
Node* page_flags = Load(MachineType::IntPtr(), object_page,
IntPtrConstant(Page::kFlagsOffset));
@@ -7178,7 +6104,7 @@ void CodeStubAssembler::TrapAllocationMemento(Node* object,
}
Node* memento_last_word = IntPtrAdd(
- object, IntPtrConstant(kMementoLastWordOffset - kHeapObjectTag));
+ object_word, IntPtrConstant(kMementoLastWordOffset - kHeapObjectTag));
Node* memento_last_word_page = PageFromAddress(memento_last_word);
Node* new_space_top = Load(MachineType::Pointer(), new_space_top_address);
@@ -7227,11 +6153,10 @@ Node* CodeStubAssembler::EnumLength(Node* map) {
void CodeStubAssembler::CheckEnumCache(Node* receiver, Label* use_cache,
Label* use_runtime) {
- Variable current_js_object(this, MachineRepresentation::kTagged);
- current_js_object.Bind(receiver);
+ Variable current_js_object(this, MachineRepresentation::kTagged, receiver);
- Variable current_map(this, MachineRepresentation::kTagged);
- current_map.Bind(LoadMap(current_js_object.value()));
+ Variable current_map(this, MachineRepresentation::kTagged,
+ LoadMap(current_js_object.value()));
// These variables are updated in the loop below.
Variable* loop_vars[2] = {&current_js_object, &current_map};
@@ -7291,15 +6216,13 @@ Node* CodeStubAssembler::CreateAllocationSiteInFeedbackVector(
Node* size = IntPtrConstant(AllocationSite::kSize);
Node* site = Allocate(size, CodeStubAssembler::kPretenured);
- // Store the map
- StoreObjectFieldRoot(site, AllocationSite::kMapOffset,
- Heap::kAllocationSiteMapRootIndex);
- Node* kind = SmiConstant(Smi::FromInt(GetInitialFastElementsKind()));
+ StoreMap(site, AllocationSiteMapConstant());
+ Node* kind = SmiConstant(GetInitialFastElementsKind());
StoreObjectFieldNoWriteBarrier(site, AllocationSite::kTransitionInfoOffset,
kind);
// Unlike literals, constructed arrays don't have nested sites
- Node* zero = IntPtrConstant(0);
+ Node* zero = SmiConstant(0);
StoreObjectFieldNoWriteBarrier(site, AllocationSite::kNestedSiteOffset, zero);
// Pretenuring calculation field.
@@ -7327,7 +6250,7 @@ Node* CodeStubAssembler::CreateAllocationSiteInFeedbackVector(
StoreObjectField(site, AllocationSite::kWeakNextOffset, next_site);
StoreNoWriteBarrier(MachineRepresentation::kTagged, site_list, site);
- StoreFixedArrayElement(feedback_vector, slot, site, UPDATE_WRITE_BARRIER,
+ StoreFixedArrayElement(feedback_vector, slot, site, UPDATE_WRITE_BARRIER, 0,
CodeStubAssembler::SMI_PARAMETERS);
return site;
}
@@ -7339,26 +6262,28 @@ Node* CodeStubAssembler::CreateWeakCellInFeedbackVector(Node* feedback_vector,
Node* cell = Allocate(size, CodeStubAssembler::kPretenured);
// Initialize the WeakCell.
- StoreObjectFieldRoot(cell, WeakCell::kMapOffset, Heap::kWeakCellMapRootIndex);
+ DCHECK(Heap::RootIsImmortalImmovable(Heap::kWeakCellMapRootIndex));
+ StoreMapNoWriteBarrier(cell, Heap::kWeakCellMapRootIndex);
StoreObjectField(cell, WeakCell::kValueOffset, value);
StoreObjectFieldRoot(cell, WeakCell::kNextOffset,
Heap::kTheHoleValueRootIndex);
// Store the WeakCell in the feedback vector.
- StoreFixedArrayElement(feedback_vector, slot, cell, UPDATE_WRITE_BARRIER,
+ StoreFixedArrayElement(feedback_vector, slot, cell, UPDATE_WRITE_BARRIER, 0,
CodeStubAssembler::SMI_PARAMETERS);
return cell;
}
-void CodeStubAssembler::BuildFastLoop(
- const CodeStubAssembler::VariableList& vars,
- MachineRepresentation index_rep, Node* start_index, Node* end_index,
- std::function<void(CodeStubAssembler* assembler, Node* index)> body,
- int increment, IndexAdvanceMode mode) {
- Variable var(this, index_rep);
+Node* CodeStubAssembler::BuildFastLoop(
+ const CodeStubAssembler::VariableList& vars, Node* start_index,
+ Node* end_index, const FastLoopBody& body, int increment,
+ ParameterMode parameter_mode, IndexAdvanceMode advance_mode) {
+ MachineRepresentation index_rep = (parameter_mode == INTPTR_PARAMETERS)
+ ? MachineType::PointerRepresentation()
+ : MachineRepresentation::kTaggedSigned;
+ Variable var(this, index_rep, start_index);
VariableList vars_copy(vars, zone());
vars_copy.Add(&var, zone());
- var.Bind(start_index);
Label loop(this, vars_copy);
Label after_loop(this);
// Introduce an explicit second check of the termination condition before the
@@ -7371,25 +6296,23 @@ void CodeStubAssembler::BuildFastLoop(
Branch(WordEqual(var.value(), end_index), &after_loop, &loop);
Bind(&loop);
{
- if (mode == IndexAdvanceMode::kPre) {
- var.Bind(IntPtrAdd(var.value(), IntPtrConstant(increment)));
+ if (advance_mode == IndexAdvanceMode::kPre) {
+ Increment(var, increment, parameter_mode);
}
- body(this, var.value());
- if (mode == IndexAdvanceMode::kPost) {
- var.Bind(IntPtrAdd(var.value(), IntPtrConstant(increment)));
+ body(var.value());
+ if (advance_mode == IndexAdvanceMode::kPost) {
+ Increment(var, increment, parameter_mode);
}
Branch(WordNotEqual(var.value(), end_index), &loop, &after_loop);
}
Bind(&after_loop);
+ return var.value();
}
void CodeStubAssembler::BuildFastFixedArrayForEach(
- compiler::Node* fixed_array, ElementsKind kind,
- compiler::Node* first_element_inclusive,
- compiler::Node* last_element_exclusive,
- std::function<void(CodeStubAssembler* assembler,
- compiler::Node* fixed_array, compiler::Node* offset)>
- body,
+ const CodeStubAssembler::VariableList& vars, Node* fixed_array,
+ ElementsKind kind, Node* first_element_inclusive,
+ Node* last_element_exclusive, const FastFixedArrayForEachBody& body,
ParameterMode mode, ForEachDirection direction) {
STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
int32_t first_val;
@@ -7406,7 +6329,7 @@ void CodeStubAssembler::BuildFastFixedArrayForEach(
Node* offset =
ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
FixedArray::kHeaderSize - kHeapObjectTag);
- body(this, fixed_array, offset);
+ body(fixed_array, offset);
}
} else {
for (int i = last_val - 1; i >= first_val; --i) {
@@ -7414,7 +6337,7 @@ void CodeStubAssembler::BuildFastFixedArrayForEach(
Node* offset =
ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
FixedArray::kHeaderSize - kHeapObjectTag);
- body(this, fixed_array, offset);
+ body(fixed_array, offset);
}
}
return;
@@ -7431,20 +6354,42 @@ void CodeStubAssembler::BuildFastFixedArrayForEach(
int increment = IsFastDoubleElementsKind(kind) ? kDoubleSize : kPointerSize;
BuildFastLoop(
- MachineType::PointerRepresentation(), start, limit,
- [fixed_array, body](CodeStubAssembler* assembler, Node* offset) {
- body(assembler, fixed_array, offset);
- },
+ vars, start, limit,
+ [fixed_array, &body](Node* offset) { body(fixed_array, offset); },
direction == ForEachDirection::kReverse ? -increment : increment,
+ INTPTR_PARAMETERS,
direction == ForEachDirection::kReverse ? IndexAdvanceMode::kPre
: IndexAdvanceMode::kPost);
}
-void CodeStubAssembler::BranchIfNumericRelationalComparison(
- RelationalComparisonMode mode, compiler::Node* lhs, compiler::Node* rhs,
- Label* if_true, Label* if_false) {
- typedef compiler::Node Node;
+void CodeStubAssembler::GotoIfFixedArraySizeDoesntFitInNewSpace(
+ Node* element_count, Label* doesnt_fit, int base_size, ParameterMode mode) {
+ int max_newspace_parameters =
+ (kMaxRegularHeapObjectSize - base_size) / kPointerSize;
+ GotoIf(IntPtrOrSmiGreaterThan(
+ element_count, IntPtrOrSmiConstant(max_newspace_parameters, mode),
+ mode),
+ doesnt_fit);
+}
+
+void CodeStubAssembler::InitializeFieldsWithRoot(
+ Node* object, Node* start_offset, Node* end_offset,
+ Heap::RootListIndex root_index) {
+ start_offset = IntPtrAdd(start_offset, IntPtrConstant(-kHeapObjectTag));
+ end_offset = IntPtrAdd(end_offset, IntPtrConstant(-kHeapObjectTag));
+ Node* root_value = LoadRoot(root_index);
+ BuildFastLoop(end_offset, start_offset,
+ [this, object, root_value](Node* current) {
+ StoreNoWriteBarrier(MachineRepresentation::kTagged, object,
+ current, root_value);
+ },
+ -kPointerSize, INTPTR_PARAMETERS,
+ CodeStubAssembler::IndexAdvanceMode::kPre);
+}
+void CodeStubAssembler::BranchIfNumericRelationalComparison(
+ RelationalComparisonMode mode, Node* lhs, Node* rhs, Label* if_true,
+ Label* if_false) {
Label end(this);
Variable result(this, MachineRepresentation::kTagged);
@@ -7484,7 +6429,7 @@ void CodeStubAssembler::BranchIfNumericRelationalComparison(
Bind(&if_rhsisnotsmi);
{
- CSA_ASSERT(this, WordEqual(LoadMap(rhs), HeapNumberMapConstant()));
+ CSA_ASSERT(this, IsHeapNumberMap(LoadMap(rhs)));
// Convert the {lhs} and {rhs} to floating point values, and
// perform a floating point comparison.
var_fcmp_lhs.Bind(SmiToFloat64(lhs));
@@ -7495,7 +6440,7 @@ void CodeStubAssembler::BranchIfNumericRelationalComparison(
Bind(&if_lhsisnotsmi);
{
- CSA_ASSERT(this, WordEqual(LoadMap(lhs), HeapNumberMapConstant()));
+ CSA_ASSERT(this, IsHeapNumberMap(LoadMap(lhs)));
// Check if {rhs} is a Smi or a HeapObject.
Label if_rhsissmi(this), if_rhsisnotsmi(this);
@@ -7512,7 +6457,7 @@ void CodeStubAssembler::BranchIfNumericRelationalComparison(
Bind(&if_rhsisnotsmi);
{
- CSA_ASSERT(this, WordEqual(LoadMap(rhs), HeapNumberMapConstant()));
+ CSA_ASSERT(this, IsHeapNumberMap(LoadMap(rhs)));
// Convert the {lhs} and {rhs} to floating point values, and
// perform a floating point comparison.
@@ -7546,19 +6491,16 @@ void CodeStubAssembler::BranchIfNumericRelationalComparison(
}
}
-void CodeStubAssembler::GotoUnlessNumberLessThan(compiler::Node* lhs,
- compiler::Node* rhs,
+void CodeStubAssembler::GotoUnlessNumberLessThan(Node* lhs, Node* rhs,
Label* if_false) {
Label if_true(this);
BranchIfNumericRelationalComparison(kLessThan, lhs, rhs, &if_true, if_false);
Bind(&if_true);
}
-compiler::Node* CodeStubAssembler::RelationalComparison(
- RelationalComparisonMode mode, compiler::Node* lhs, compiler::Node* rhs,
- compiler::Node* context) {
- typedef compiler::Node Node;
-
+Node* CodeStubAssembler::RelationalComparison(RelationalComparisonMode mode,
+ Node* lhs, Node* rhs,
+ Node* context) {
Label return_true(this), return_false(this), end(this);
Variable result(this, MachineRepresentation::kTagged);
@@ -7569,12 +6511,10 @@ compiler::Node* CodeStubAssembler::RelationalComparison(
// We might need to loop several times due to ToPrimitive and/or ToNumber
// conversions.
- Variable var_lhs(this, MachineRepresentation::kTagged),
- var_rhs(this, MachineRepresentation::kTagged);
+ Variable var_lhs(this, MachineRepresentation::kTagged, lhs),
+ var_rhs(this, MachineRepresentation::kTagged, rhs);
Variable* loop_vars[2] = {&var_lhs, &var_rhs};
Label loop(this, 2, loop_vars);
- var_lhs.Bind(lhs);
- var_rhs.Bind(rhs);
Goto(&loop);
Bind(&loop);
{
@@ -7644,9 +6584,6 @@ compiler::Node* CodeStubAssembler::RelationalComparison(
Bind(&if_lhsisnotsmi);
{
- // Load the HeapNumber map for later comparisons.
- Node* number_map = HeapNumberMapConstant();
-
// Load the map of {lhs}.
Node* lhs_map = LoadMap(lhs);
@@ -7658,8 +6595,7 @@ compiler::Node* CodeStubAssembler::RelationalComparison(
{
// Check if the {lhs} is a HeapNumber.
Label if_lhsisnumber(this), if_lhsisnotnumber(this, Label::kDeferred);
- Branch(WordEqual(lhs_map, number_map), &if_lhsisnumber,
- &if_lhsisnotnumber);
+ Branch(IsHeapNumberMap(lhs_map), &if_lhsisnumber, &if_lhsisnotnumber);
Bind(&if_lhsisnumber);
{
@@ -7689,8 +6625,7 @@ compiler::Node* CodeStubAssembler::RelationalComparison(
// Check if {lhs} is a HeapNumber.
Label if_lhsisnumber(this), if_lhsisnotnumber(this);
- Branch(WordEqual(lhs_map, number_map), &if_lhsisnumber,
- &if_lhsisnotnumber);
+ Branch(IsHeapNumberMap(lhs_map), &if_lhsisnumber, &if_lhsisnotnumber);
Bind(&if_lhsisnumber);
{
@@ -7879,17 +6814,14 @@ compiler::Node* CodeStubAssembler::RelationalComparison(
namespace {
-void GenerateEqual_Same(CodeStubAssembler* assembler, compiler::Node* value,
+void GenerateEqual_Same(CodeStubAssembler* assembler, Node* value,
CodeStubAssembler::Label* if_equal,
CodeStubAssembler::Label* if_notequal) {
// In case of abstract or strict equality checks, we need additional checks
// for NaN values because they are not considered equal, even if both the
// left and the right hand side reference exactly the same value.
- // TODO(bmeurer): This seems to violate the SIMD.js specification, but it
- // seems to be what is tested in the current SIMD.js testsuite.
typedef CodeStubAssembler::Label Label;
- typedef compiler::Node Node;
// Check if {value} is a Smi or a HeapObject.
Label if_valueissmi(assembler), if_valueisnotsmi(assembler);
@@ -7922,26 +6854,15 @@ void GenerateEqual_Same(CodeStubAssembler* assembler, compiler::Node* value,
assembler->Bind(&if_valueissmi);
assembler->Goto(if_equal);
}
-
-void GenerateEqual_Simd128Value_HeapObject(
- CodeStubAssembler* assembler, compiler::Node* lhs, compiler::Node* lhs_map,
- compiler::Node* rhs, compiler::Node* rhs_map,
- CodeStubAssembler::Label* if_equal, CodeStubAssembler::Label* if_notequal) {
- assembler->BranchIfSimd128Equal(lhs, lhs_map, rhs, rhs_map, if_equal,
- if_notequal);
-}
-
} // namespace
// ES6 section 7.2.12 Abstract Equality Comparison
-compiler::Node* CodeStubAssembler::Equal(ResultMode mode, compiler::Node* lhs,
- compiler::Node* rhs,
- compiler::Node* context) {
+Node* CodeStubAssembler::Equal(ResultMode mode, Node* lhs, Node* rhs,
+ Node* context) {
// This is a slightly optimized version of Object::Equals represented as
// scheduled TurboFan graph utilizing the CodeStubAssembler. Whenever you
// change something functionality wise in here, remember to update the
// Object::Equals method as well.
- typedef compiler::Node Node;
Label if_equal(this), if_notequal(this),
do_rhsstringtonumber(this, Label::kDeferred), end(this);
@@ -7954,12 +6875,10 @@ compiler::Node* CodeStubAssembler::Equal(ResultMode mode, compiler::Node* lhs,
// We might need to loop several times due to ToPrimitive and/or ToNumber
// conversions.
- Variable var_lhs(this, MachineRepresentation::kTagged),
- var_rhs(this, MachineRepresentation::kTagged);
+ Variable var_lhs(this, MachineRepresentation::kTagged, lhs),
+ var_rhs(this, MachineRepresentation::kTagged, rhs);
Variable* loop_vars[2] = {&var_lhs, &var_rhs};
Label loop(this, 2, loop_vars);
- var_lhs.Bind(lhs);
- var_rhs.Bind(rhs);
Goto(&loop);
Bind(&loop);
{
@@ -8001,10 +6920,8 @@ compiler::Node* CodeStubAssembler::Equal(ResultMode mode, compiler::Node* lhs,
Node* rhs_map = LoadMap(rhs);
// Check if {rhs} is a HeapNumber.
- Node* number_map = HeapNumberMapConstant();
Label if_rhsisnumber(this), if_rhsisnotnumber(this);
- Branch(WordEqual(rhs_map, number_map), &if_rhsisnumber,
- &if_rhsisnotnumber);
+ Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
Bind(&if_rhsisnumber);
{
@@ -8094,8 +7011,8 @@ compiler::Node* CodeStubAssembler::Equal(ResultMode mode, compiler::Node* lhs,
Bind(&if_rhsisnotsmi);
{
Label if_lhsisstring(this), if_lhsisnumber(this),
- if_lhsissymbol(this), if_lhsissimd128value(this),
- if_lhsisoddball(this), if_lhsisreceiver(this);
+ if_lhsissymbol(this), if_lhsisoddball(this),
+ if_lhsisreceiver(this);
// Both {lhs} and {rhs} are HeapObjects, load their maps
// and their instance types.
@@ -8107,7 +7024,7 @@ compiler::Node* CodeStubAssembler::Equal(ResultMode mode, compiler::Node* lhs,
Node* rhs_instance_type = LoadMapInstanceType(rhs_map);
// Dispatch based on the instance type of {lhs}.
- size_t const kNumCases = FIRST_NONSTRING_TYPE + 4;
+ size_t const kNumCases = FIRST_NONSTRING_TYPE + 3;
Label* case_labels[kNumCases];
int32_t case_values[kNumCases];
for (int32_t i = 0; i < FIRST_NONSTRING_TYPE; ++i) {
@@ -8118,10 +7035,8 @@ compiler::Node* CodeStubAssembler::Equal(ResultMode mode, compiler::Node* lhs,
case_values[FIRST_NONSTRING_TYPE + 0] = HEAP_NUMBER_TYPE;
case_labels[FIRST_NONSTRING_TYPE + 1] = &if_lhsissymbol;
case_values[FIRST_NONSTRING_TYPE + 1] = SYMBOL_TYPE;
- case_labels[FIRST_NONSTRING_TYPE + 2] = &if_lhsissimd128value;
- case_values[FIRST_NONSTRING_TYPE + 2] = SIMD128_VALUE_TYPE;
- case_labels[FIRST_NONSTRING_TYPE + 3] = &if_lhsisoddball;
- case_values[FIRST_NONSTRING_TYPE + 3] = ODDBALL_TYPE;
+ case_labels[FIRST_NONSTRING_TYPE + 2] = &if_lhsisoddball;
+ case_values[FIRST_NONSTRING_TYPE + 2] = ODDBALL_TYPE;
Switch(lhs_instance_type, &if_lhsisreceiver, case_values, case_labels,
arraysize(case_values));
for (int32_t i = 0; i < FIRST_NONSTRING_TYPE; ++i) {
@@ -8305,47 +7220,6 @@ compiler::Node* CodeStubAssembler::Equal(ResultMode mode, compiler::Node* lhs,
}
}
- Bind(&if_lhsissimd128value);
- {
- // Check if the {rhs} is also a Simd128Value.
- Label if_rhsissimd128value(this), if_rhsisnotsimd128value(this);
- Branch(Word32Equal(lhs_instance_type, rhs_instance_type),
- &if_rhsissimd128value, &if_rhsisnotsimd128value);
-
- Bind(&if_rhsissimd128value);
- {
- // Both {lhs} and {rhs} is a Simd128Value.
- GenerateEqual_Simd128Value_HeapObject(
- this, lhs, lhs_map, rhs, rhs_map, &if_equal, &if_notequal);
- }
-
- Bind(&if_rhsisnotsimd128value);
- {
- // Check if the {rhs} is a JSReceiver.
- Label if_rhsisreceiver(this), if_rhsisnotreceiver(this);
- STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
- Branch(IsJSReceiverInstanceType(rhs_instance_type),
- &if_rhsisreceiver, &if_rhsisnotreceiver);
-
- Bind(&if_rhsisreceiver);
- {
- // The {lhs} is a Primitive and the {rhs} is a JSReceiver.
- // Swapping {lhs} and {rhs} is not observable and doesn't
- // matter for the result, so we can just swap them and use
- // the JSReceiver handling below (for {lhs} being a JSReceiver).
- var_lhs.Bind(rhs);
- var_rhs.Bind(lhs);
- Goto(&loop);
- }
-
- Bind(&if_rhsisnotreceiver);
- {
- // The {rhs} is some other Primitive.
- Goto(&if_notequal);
- }
- }
- }
-
Bind(&if_lhsisreceiver);
{
// Check if the {rhs} is also a JSReceiver.
@@ -8435,10 +7309,8 @@ compiler::Node* CodeStubAssembler::Equal(ResultMode mode, compiler::Node* lhs,
return result.value();
}
-compiler::Node* CodeStubAssembler::StrictEqual(ResultMode mode,
- compiler::Node* lhs,
- compiler::Node* rhs,
- compiler::Node* context) {
+Node* CodeStubAssembler::StrictEqual(ResultMode mode, Node* lhs, Node* rhs,
+ Node* context) {
// Here's pseudo-code for the algorithm below in case of kDontNegateResult
// mode; for kNegateResult mode we properly negate the result.
//
@@ -8466,10 +7338,6 @@ compiler::Node* CodeStubAssembler::StrictEqual(ResultMode mode,
// } else {
// return false;
// }
- // } else if (lhs->IsSimd128()) {
- // if (rhs->IsSimd128()) {
- // return %StrictEqual(lhs, rhs);
- // }
// } else {
// return false;
// }
@@ -8487,8 +7355,6 @@ compiler::Node* CodeStubAssembler::StrictEqual(ResultMode mode,
// }
// }
- typedef compiler::Node Node;
-
Label if_equal(this), if_notequal(this), end(this);
Variable result(this, MachineRepresentation::kTagged);
@@ -8505,9 +7371,8 @@ compiler::Node* CodeStubAssembler::StrictEqual(ResultMode mode,
Bind(&if_notsame);
{
- // The {lhs} and {rhs} reference different objects, yet for Smi, HeapNumber,
- // String and Simd128Value they can still be considered equal.
- Node* number_map = HeapNumberMapConstant();
+ // The {lhs} and {rhs} reference different objects, yet for Smi, HeapNumber
+ // and String they can still be considered equal.
// Check if {lhs} is a Smi or a HeapObject.
Label if_lhsissmi(this), if_lhsisnotsmi(this);
@@ -8520,8 +7385,7 @@ compiler::Node* CodeStubAssembler::StrictEqual(ResultMode mode,
// Check if {lhs} is a HeapNumber.
Label if_lhsisnumber(this), if_lhsisnotnumber(this);
- Branch(WordEqual(lhs_map, number_map), &if_lhsisnumber,
- &if_lhsisnotnumber);
+ Branch(IsHeapNumberMap(lhs_map), &if_lhsisnumber, &if_lhsisnotnumber);
Bind(&if_lhsisnumber);
{
@@ -8546,8 +7410,7 @@ compiler::Node* CodeStubAssembler::StrictEqual(ResultMode mode,
// Check if {rhs} is also a HeapNumber.
Label if_rhsisnumber(this), if_rhsisnotnumber(this);
- Branch(WordEqual(rhs_map, number_map), &if_rhsisnumber,
- &if_rhsisnotnumber);
+ Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
Bind(&if_rhsisnumber);
{
@@ -8608,26 +7471,7 @@ compiler::Node* CodeStubAssembler::StrictEqual(ResultMode mode,
}
Bind(&if_lhsisnotstring);
- {
- // Check if {lhs} is a Simd128Value.
- Label if_lhsissimd128value(this), if_lhsisnotsimd128value(this);
- Branch(Word32Equal(lhs_instance_type,
- Int32Constant(SIMD128_VALUE_TYPE)),
- &if_lhsissimd128value, &if_lhsisnotsimd128value);
-
- Bind(&if_lhsissimd128value);
- {
- // Load the map of {rhs}.
- Node* rhs_map = LoadMap(rhs);
-
- // Check if {rhs} is also a Simd128Value that is equal to {lhs}.
- GenerateEqual_Simd128Value_HeapObject(
- this, lhs, lhs_map, rhs, rhs_map, &if_equal, &if_notequal);
- }
-
- Bind(&if_lhsisnotsimd128value);
- Goto(&if_notequal);
- }
+ Goto(&if_notequal);
}
}
}
@@ -8652,8 +7496,7 @@ compiler::Node* CodeStubAssembler::StrictEqual(ResultMode mode,
// The {rhs} could be a HeapNumber with the same value as {lhs}.
Label if_rhsisnumber(this), if_rhsisnotnumber(this);
- Branch(WordEqual(rhs_map, number_map), &if_rhsisnumber,
- &if_rhsisnotnumber);
+ Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
Bind(&if_rhsisnumber);
{
@@ -8690,14 +7533,12 @@ compiler::Node* CodeStubAssembler::StrictEqual(ResultMode mode,
// ECMA#sec-samevalue
// This algorithm differs from the Strict Equality Comparison Algorithm in its
// treatment of signed zeroes and NaNs.
-compiler::Node* CodeStubAssembler::SameValue(compiler::Node* lhs,
- compiler::Node* rhs,
- compiler::Node* context) {
- Variable var_result(this, MachineType::PointerRepresentation());
+Node* CodeStubAssembler::SameValue(Node* lhs, Node* rhs, Node* context) {
+ Variable var_result(this, MachineRepresentation::kWord32);
Label strict_equal(this), out(this);
- Node* const int_false = IntPtrConstant(0);
- Node* const int_true = IntPtrConstant(1);
+ Node* const int_false = Int32Constant(0);
+ Node* const int_true = Int32Constant(1);
Label if_equal(this), if_notequal(this);
Branch(WordEqual(lhs, rhs), &if_equal, &if_notequal);
@@ -8727,8 +7568,8 @@ compiler::Node* CodeStubAssembler::SameValue(compiler::Node* lhs,
// Return true iff {rhs} is NaN.
Node* const result =
- Select(Float64Equal(rhs_float, rhs_float), int_false, int_true,
- MachineType::PointerRepresentation());
+ SelectConstant(Float64Equal(rhs_float, rhs_float), int_false,
+ int_true, MachineRepresentation::kWord32);
var_result.Bind(result);
Goto(&out);
}
@@ -8776,9 +7617,7 @@ compiler::Node* CodeStubAssembler::SameValue(compiler::Node* lhs,
return var_result.value();
}
-compiler::Node* CodeStubAssembler::ForInFilter(compiler::Node* key,
- compiler::Node* object,
- compiler::Node* context) {
+Node* CodeStubAssembler::ForInFilter(Node* key, Node* object, Node* context) {
Label return_undefined(this, Label::kDeferred), return_to_name(this),
end(this);
@@ -8806,13 +7645,9 @@ compiler::Node* CodeStubAssembler::ForInFilter(compiler::Node* key,
return var_result.value();
}
-compiler::Node* CodeStubAssembler::HasProperty(
- compiler::Node* object, compiler::Node* key, compiler::Node* context,
+Node* CodeStubAssembler::HasProperty(
+ Node* object, Node* key, Node* context,
Runtime::FunctionId fallback_runtime_function_id) {
- typedef compiler::Node Node;
- typedef CodeStubAssembler::Label Label;
- typedef CodeStubAssembler::Variable Variable;
-
Label call_runtime(this, Label::kDeferred), return_true(this),
return_false(this), end(this);
@@ -8860,8 +7695,58 @@ compiler::Node* CodeStubAssembler::HasProperty(
return result.value();
}
-compiler::Node* CodeStubAssembler::Typeof(compiler::Node* value,
- compiler::Node* context) {
+Node* CodeStubAssembler::ClassOf(Node* value) {
+ Variable var_result(this, MachineRepresentation::kTaggedPointer);
+ Label if_function(this, Label::kDeferred), if_object(this, Label::kDeferred),
+ if_primitive(this, Label::kDeferred), return_result(this);
+
+ // Check if {value} is a Smi.
+ GotoIf(TaggedIsSmi(value), &if_primitive);
+
+ Node* value_map = LoadMap(value);
+ Node* value_instance_type = LoadMapInstanceType(value_map);
+
+ // Check if {value} is a JSFunction or JSBoundFunction.
+ STATIC_ASSERT(LAST_TYPE == LAST_FUNCTION_TYPE);
+ GotoIf(Uint32LessThanOrEqual(Int32Constant(FIRST_FUNCTION_TYPE),
+ value_instance_type),
+ &if_function);
+
+ // Check if {value} is a primitive HeapObject.
+ STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
+ GotoIf(Uint32LessThan(value_instance_type,
+ Int32Constant(FIRST_JS_RECEIVER_TYPE)),
+ &if_primitive);
+
+ // Load the {value}s constructor, and check that it's a JSFunction.
+ Node* constructor = LoadMapConstructor(value_map);
+ GotoIfNot(IsJSFunction(constructor), &if_object);
+
+ // Return the instance class name for the {constructor}.
+ Node* shared_info =
+ LoadObjectField(constructor, JSFunction::kSharedFunctionInfoOffset);
+ Node* instance_class_name = LoadObjectField(
+ shared_info, SharedFunctionInfo::kInstanceClassNameOffset);
+ var_result.Bind(instance_class_name);
+ Goto(&return_result);
+
+ Bind(&if_function);
+ var_result.Bind(LoadRoot(Heap::kFunction_stringRootIndex));
+ Goto(&return_result);
+
+ Bind(&if_object);
+ var_result.Bind(LoadRoot(Heap::kObject_stringRootIndex));
+ Goto(&return_result);
+
+ Bind(&if_primitive);
+ var_result.Bind(NullConstant());
+ Goto(&return_result);
+
+ Bind(&return_result);
+ return var_result.value();
+}
+
+Node* CodeStubAssembler::Typeof(Node* value, Node* context) {
Variable result_var(this, MachineRepresentation::kTagged);
Label return_number(this, Label::kDeferred), if_oddball(this),
@@ -8886,20 +7771,13 @@ compiler::Node* CodeStubAssembler::Typeof(compiler::Node* value,
Int32Constant(1 << Map::kIsCallable)),
&return_function);
- GotoUnless(Word32Equal(callable_or_undetectable_mask, Int32Constant(0)),
- &return_undefined);
+ GotoIfNot(Word32Equal(callable_or_undetectable_mask, Int32Constant(0)),
+ &return_undefined);
GotoIf(IsJSReceiverInstanceType(instance_type), &return_object);
GotoIf(IsStringInstanceType(instance_type), &return_string);
-#define SIMD128_BRANCH(TYPE, Type, type, lane_count, lane_type) \
- Label return_##type(this); \
- Node* type##_map = HeapConstant(factory()->type##_map()); \
- GotoIf(WordEqual(map, type##_map), &return_##type);
- SIMD128_TYPES(SIMD128_BRANCH)
-#undef SIMD128_BRANCH
-
CSA_ASSERT(this, Word32Equal(instance_type, Int32Constant(SYMBOL_TYPE)));
result_var.Bind(HeapConstant(isolate()->factory()->symbol_string()));
Goto(&return_result);
@@ -8941,52 +7819,121 @@ compiler::Node* CodeStubAssembler::Typeof(compiler::Node* value,
Goto(&return_result);
}
-#define SIMD128_BIND_RETURN(TYPE, Type, type, lane_count, lane_type) \
- Bind(&return_##type); \
- { \
- result_var.Bind(HeapConstant(isolate()->factory()->type##_string())); \
- Goto(&return_result); \
- }
- SIMD128_TYPES(SIMD128_BIND_RETURN)
-#undef SIMD128_BIND_RETURN
-
Bind(&return_result);
return result_var.value();
}
-compiler::Node* CodeStubAssembler::InstanceOf(compiler::Node* object,
- compiler::Node* callable,
- compiler::Node* context) {
- Label return_runtime(this, Label::kDeferred), end(this);
- Variable result(this, MachineRepresentation::kTagged);
+Node* CodeStubAssembler::GetSuperConstructor(Node* active_function,
+ Node* context) {
+ CSA_ASSERT(this, IsJSFunction(active_function));
- // Check if no one installed @@hasInstance somewhere.
- GotoUnless(
- WordEqual(LoadObjectField(LoadRoot(Heap::kHasInstanceProtectorRootIndex),
- PropertyCell::kValueOffset),
- SmiConstant(Smi::FromInt(Isolate::kProtectorValid))),
- &return_runtime);
+ Label is_not_constructor(this, Label::kDeferred), out(this);
+ Variable result(this, MachineRepresentation::kTagged);
- // Check if {callable} is a valid receiver.
- GotoIf(TaggedIsSmi(callable), &return_runtime);
- GotoUnless(IsCallableMap(LoadMap(callable)), &return_runtime);
+ Node* map = LoadMap(active_function);
+ Node* prototype = LoadMapPrototype(map);
+ Node* prototype_map = LoadMap(prototype);
+ GotoIfNot(IsConstructorMap(prototype_map), &is_not_constructor);
- // Use the inline OrdinaryHasInstance directly.
- result.Bind(OrdinaryHasInstance(context, callable, object));
- Goto(&end);
+ result.Bind(prototype);
+ Goto(&out);
- // TODO(bmeurer): Use GetPropertyStub here once available.
- Bind(&return_runtime);
+ Bind(&is_not_constructor);
{
- result.Bind(CallRuntime(Runtime::kInstanceOf, context, object, callable));
- Goto(&end);
+ CallRuntime(Runtime::kThrowNotSuperConstructor, context, prototype,
+ active_function);
+ Unreachable();
}
- Bind(&end);
+ Bind(&out);
return result.value();
}
-compiler::Node* CodeStubAssembler::NumberInc(compiler::Node* value) {
+Node* CodeStubAssembler::InstanceOf(Node* object, Node* callable,
+ Node* context) {
+ Variable var_result(this, MachineRepresentation::kTagged);
+ Label if_notcallable(this, Label::kDeferred),
+ if_notreceiver(this, Label::kDeferred), if_otherhandler(this),
+ if_nohandler(this, Label::kDeferred), return_true(this),
+ return_false(this), return_result(this, &var_result);
+
+ // Ensure that the {callable} is actually a JSReceiver.
+ GotoIf(TaggedIsSmi(callable), &if_notreceiver);
+ GotoIfNot(IsJSReceiver(callable), &if_notreceiver);
+
+ // Load the @@hasInstance property from {callable}.
+ Node* inst_of_handler = CallStub(CodeFactory::GetProperty(isolate()), context,
+ callable, HasInstanceSymbolConstant());
+
+ // Optimize for the likely case where {inst_of_handler} is the builtin
+ // Function.prototype[@@hasInstance] method, and emit a direct call in
+ // that case without any additional checking.
+ Node* native_context = LoadNativeContext(context);
+ Node* function_has_instance =
+ LoadContextElement(native_context, Context::FUNCTION_HAS_INSTANCE_INDEX);
+ GotoIfNot(WordEqual(inst_of_handler, function_has_instance),
+ &if_otherhandler);
+ {
+ // Call to Function.prototype[@@hasInstance] directly.
+ Callable builtin(isolate()->builtins()->FunctionPrototypeHasInstance(),
+ CallTrampolineDescriptor(isolate()));
+ Node* result = CallJS(builtin, context, inst_of_handler, callable, object);
+ var_result.Bind(result);
+ Goto(&return_result);
+ }
+
+ Bind(&if_otherhandler);
+ {
+ // Check if there's actually an {inst_of_handler}.
+ GotoIf(IsNull(inst_of_handler), &if_nohandler);
+ GotoIf(IsUndefined(inst_of_handler), &if_nohandler);
+
+ // Call the {inst_of_handler} for {callable} and {object}.
+ Node* result = CallJS(
+ CodeFactory::Call(isolate(), ConvertReceiverMode::kNotNullOrUndefined),
+ context, inst_of_handler, callable, object);
+
+ // Convert the {result} to a Boolean.
+ BranchIfToBooleanIsTrue(result, &return_true, &return_false);
+ }
+
+ Bind(&if_nohandler);
+ {
+ // Ensure that the {callable} is actually Callable.
+ GotoIfNot(IsCallable(callable), &if_notcallable);
+
+ // Use the OrdinaryHasInstance algorithm.
+ Node* result = CallStub(CodeFactory::OrdinaryHasInstance(isolate()),
+ context, callable, object);
+ var_result.Bind(result);
+ Goto(&return_result);
+ }
+
+ Bind(&if_notcallable);
+ {
+ CallRuntime(Runtime::kThrowNonCallableInInstanceOfCheck, context);
+ Unreachable();
+ }
+
+ Bind(&if_notreceiver);
+ {
+ CallRuntime(Runtime::kThrowNonObjectInInstanceOfCheck, context);
+ Unreachable();
+ }
+
+ Bind(&return_true);
+ var_result.Bind(TrueConstant());
+ Goto(&return_result);
+
+ Bind(&return_false);
+ var_result.Bind(FalseConstant());
+ Goto(&return_result);
+
+ Bind(&return_result);
+ return var_result.value();
+}
+
+Node* CodeStubAssembler::NumberInc(Node* value) {
Variable var_result(this, MachineRepresentation::kTagged),
var_finc_value(this, MachineRepresentation::kFloat64);
Label if_issmi(this), if_isnotsmi(this), do_finc(this), end(this);
@@ -9005,7 +7952,7 @@ compiler::Node* CodeStubAssembler::NumberInc(compiler::Node* value) {
Branch(overflow, &if_overflow, &if_notoverflow);
Bind(&if_notoverflow);
- var_result.Bind(Projection(0, pair));
+ var_result.Bind(BitcastWordToTaggedSigned(Projection(0, pair)));
Goto(&end);
Bind(&if_overflow);
@@ -9038,9 +7985,23 @@ compiler::Node* CodeStubAssembler::NumberInc(compiler::Node* value) {
return var_result.value();
}
-compiler::Node* CodeStubAssembler::CreateArrayIterator(
- compiler::Node* array, compiler::Node* array_map,
- compiler::Node* array_type, compiler::Node* context, IterationKind mode) {
+void CodeStubAssembler::GotoIfNotNumber(Node* input, Label* is_not_number) {
+ Label is_number(this);
+ GotoIf(TaggedIsSmi(input), &is_number);
+ Node* input_map = LoadMap(input);
+ Branch(IsHeapNumberMap(input_map), &is_number, is_not_number);
+ Bind(&is_number);
+}
+
+void CodeStubAssembler::GotoIfNumber(Node* input, Label* is_number) {
+ GotoIf(TaggedIsSmi(input), is_number);
+ Node* input_map = LoadMap(input);
+ GotoIf(IsHeapNumberMap(input_map), is_number);
+}
+
+Node* CodeStubAssembler::CreateArrayIterator(Node* array, Node* array_map,
+ Node* array_type, Node* context,
+ IterationKind mode) {
int kBaseMapIndex = 0;
switch (mode) {
case IterationKind::kKeys:
@@ -9094,7 +8055,8 @@ compiler::Node* CodeStubAssembler::CreateArrayIterator(
Bind(&if_isgeneric);
{
Label if_isfast(this), if_isslow(this);
- BranchIfFastJSArray(array, context, &if_isfast, &if_isslow);
+ BranchIfFastJSArray(array, context, FastJSArrayAccessMode::INBOUNDS_READ,
+ &if_isfast, &if_isslow);
Bind(&if_isfast);
{
@@ -9128,7 +8090,8 @@ compiler::Node* CodeStubAssembler::CreateArrayIterator(
Bind(&if_isgeneric);
{
Label if_isfast(this), if_isslow(this);
- BranchIfFastJSArray(array, context, &if_isfast, &if_isslow);
+ BranchIfFastJSArray(array, context, FastJSArrayAccessMode::INBOUNDS_READ,
+ &if_isfast, &if_isslow);
Bind(&if_isfast);
{
@@ -9146,7 +8109,7 @@ compiler::Node* CodeStubAssembler::CreateArrayIterator(
// here, and take the slow path if any fail.
Node* protector_cell = LoadRoot(Heap::kArrayProtectorRootIndex);
DCHECK(isolate()->heap()->array_protector()->IsPropertyCell());
- GotoUnless(
+ GotoIfNot(
WordEqual(
LoadObjectField(protector_cell, PropertyCell::kValueOffset),
SmiConstant(Smi::FromInt(Isolate::kProtectorValid))),
@@ -9157,13 +8120,13 @@ compiler::Node* CodeStubAssembler::CreateArrayIterator(
Node* prototype = LoadMapPrototype(array_map);
Node* array_prototype = LoadContextElement(
native_context, Context::INITIAL_ARRAY_PROTOTYPE_INDEX);
- GotoUnless(WordEqual(prototype, array_prototype), &if_isslow);
+ GotoIfNot(WordEqual(prototype, array_prototype), &if_isslow);
Node* map = LoadMap(prototype);
prototype = LoadMapPrototype(map);
Node* object_prototype = LoadContextElement(
native_context, Context::INITIAL_OBJECT_PROTOTYPE_INDEX);
- GotoUnless(WordEqual(prototype, object_prototype), &if_isslow);
+ GotoIfNot(WordEqual(prototype, object_prototype), &if_isslow);
map = LoadMap(prototype);
prototype = LoadMapPrototype(map);
@@ -9173,7 +8136,7 @@ compiler::Node* CodeStubAssembler::CreateArrayIterator(
{
Node* map_index =
IntPtrAdd(IntPtrConstant(kBaseMapIndex + kFastIteratorOffset),
- LoadMapElementsKind(array_map));
+ ChangeUint32ToWord(LoadMapElementsKind(array_map)));
CSA_ASSERT(this, IntPtrGreaterThanOrEqual(
map_index, IntPtrConstant(kBaseMapIndex +
kFastIteratorOffset)));
@@ -9201,7 +8164,7 @@ compiler::Node* CodeStubAssembler::CreateArrayIterator(
{
Node* map_index =
IntPtrAdd(IntPtrConstant(kBaseMapIndex - UINT8_ELEMENTS),
- LoadMapElementsKind(array_map));
+ ChangeUint32ToWord(LoadMapElementsKind(array_map)));
CSA_ASSERT(
this, IntPtrLessThan(map_index, IntPtrConstant(kBaseMapIndex +
kFastIteratorOffset)));
@@ -9215,9 +8178,8 @@ compiler::Node* CodeStubAssembler::CreateArrayIterator(
Bind(&allocate_iterator);
{
- Node* map =
- LoadFixedArrayElement(LoadNativeContext(context), var_map_index.value(),
- 0, CodeStubAssembler::INTPTR_PARAMETERS);
+ Node* map = LoadFixedArrayElement(LoadNativeContext(context),
+ var_map_index.value());
var_result.Bind(AllocateJSArrayIterator(array, var_array_map.value(), map));
Goto(&return_result);
}
@@ -9226,8 +8188,8 @@ compiler::Node* CodeStubAssembler::CreateArrayIterator(
return var_result.value();
}
-compiler::Node* CodeStubAssembler::AllocateJSArrayIterator(
- compiler::Node* array, compiler::Node* array_map, compiler::Node* map) {
+Node* CodeStubAssembler::AllocateJSArrayIterator(Node* array, Node* array_map,
+ Node* map) {
Node* iterator = Allocate(JSArrayIterator::kSize);
StoreMapNoWriteBarrier(iterator, map);
StoreObjectFieldRoot(iterator, JSArrayIterator::kPropertiesOffset,
@@ -9243,96 +8205,94 @@ compiler::Node* CodeStubAssembler::AllocateJSArrayIterator(
return iterator;
}
-compiler::Node* CodeStubAssembler::IsDetachedBuffer(compiler::Node* buffer) {
+Node* CodeStubAssembler::IsDetachedBuffer(Node* buffer) {
CSA_ASSERT(this, HasInstanceType(buffer, JS_ARRAY_BUFFER_TYPE));
Node* buffer_bit_field = LoadObjectField(
buffer, JSArrayBuffer::kBitFieldOffset, MachineType::Uint32());
- Node* was_neutered_mask = Int32Constant(JSArrayBuffer::WasNeutered::kMask);
-
- return Word32NotEqual(Word32And(buffer_bit_field, was_neutered_mask),
- Int32Constant(0));
+ return IsSetWord32<JSArrayBuffer::WasNeutered>(buffer_bit_field);
}
-CodeStubArguments::CodeStubArguments(CodeStubAssembler* assembler,
- compiler::Node* argc,
+CodeStubArguments::CodeStubArguments(CodeStubAssembler* assembler, Node* argc,
+ Node* fp,
CodeStubAssembler::ParameterMode mode)
: assembler_(assembler),
+ argc_mode_(mode),
argc_(argc),
arguments_(nullptr),
- fp_(assembler->LoadFramePointer()) {
- compiler::Node* offset = assembler->ElementOffsetFromIndex(
+ fp_(fp != nullptr ? fp : assembler->LoadFramePointer()) {
+ Node* offset = assembler->ElementOffsetFromIndex(
argc_, FAST_ELEMENTS, mode,
(StandardFrameConstants::kFixedSlotCountAboveFp - 1) * kPointerSize);
- arguments_ = assembler_->IntPtrAddFoldConstants(fp_, offset);
- if (mode == CodeStubAssembler::INTEGER_PARAMETERS) {
- argc_ = assembler->ChangeInt32ToIntPtr(argc_);
- } else if (mode == CodeStubAssembler::SMI_PARAMETERS) {
- argc_ = assembler->SmiUntag(argc_);
- }
+ arguments_ = assembler_->IntPtrAdd(fp_, offset);
}
-compiler::Node* CodeStubArguments::GetReceiver() {
+Node* CodeStubArguments::GetReceiver() const {
return assembler_->Load(MachineType::AnyTagged(), arguments_,
assembler_->IntPtrConstant(kPointerSize));
}
-compiler::Node* CodeStubArguments::AtIndex(
- compiler::Node* index, CodeStubAssembler::ParameterMode mode) {
+Node* CodeStubArguments::AtIndexPtr(
+ Node* index, CodeStubAssembler::ParameterMode mode) const {
typedef compiler::Node Node;
- Node* negated_index = assembler_->IntPtrSubFoldConstants(
- assembler_->IntPtrOrSmiConstant(0, mode), index);
+ Node* negated_index = assembler_->IntPtrOrSmiSub(
+ assembler_->IntPtrOrSmiConstant(0, mode), index, mode);
Node* offset =
assembler_->ElementOffsetFromIndex(negated_index, FAST_ELEMENTS, mode, 0);
- return assembler_->Load(MachineType::AnyTagged(), arguments_, offset);
+ return assembler_->IntPtrAdd(arguments_, offset);
+}
+
+Node* CodeStubArguments::AtIndex(Node* index,
+ CodeStubAssembler::ParameterMode mode) const {
+ DCHECK_EQ(argc_mode_, mode);
+ CSA_ASSERT(assembler_,
+ assembler_->UintPtrOrSmiLessThan(index, GetLength(), mode));
+ return assembler_->Load(MachineType::AnyTagged(), AtIndexPtr(index, mode));
}
-compiler::Node* CodeStubArguments::AtIndex(int index) {
+Node* CodeStubArguments::AtIndex(int index) const {
return AtIndex(assembler_->IntPtrConstant(index));
}
-void CodeStubArguments::ForEach(const CodeStubAssembler::VariableList& vars,
- CodeStubArguments::ForEachBodyFunction body,
- compiler::Node* first, compiler::Node* last,
- CodeStubAssembler::ParameterMode mode) {
+void CodeStubArguments::ForEach(
+ const CodeStubAssembler::VariableList& vars,
+ const CodeStubArguments::ForEachBodyFunction& body, Node* first, Node* last,
+ CodeStubAssembler::ParameterMode mode) {
assembler_->Comment("CodeStubArguments::ForEach");
- DCHECK_IMPLIES(first == nullptr || last == nullptr,
- mode == CodeStubAssembler::INTPTR_PARAMETERS);
if (first == nullptr) {
first = assembler_->IntPtrOrSmiConstant(0, mode);
}
if (last == nullptr) {
+ DCHECK_EQ(mode, argc_mode_);
last = argc_;
}
- compiler::Node* start = assembler_->IntPtrSubFoldConstants(
+ Node* start = assembler_->IntPtrSub(
arguments_,
assembler_->ElementOffsetFromIndex(first, FAST_ELEMENTS, mode));
- compiler::Node* end = assembler_->IntPtrSubFoldConstants(
+ Node* end = assembler_->IntPtrSub(
arguments_,
assembler_->ElementOffsetFromIndex(last, FAST_ELEMENTS, mode));
- assembler_->BuildFastLoop(
- vars, MachineType::PointerRepresentation(), start, end,
- [body](CodeStubAssembler* assembler, compiler::Node* current) {
- Node* arg = assembler->Load(MachineType::AnyTagged(), current);
- body(assembler, arg);
- },
- -kPointerSize, CodeStubAssembler::IndexAdvanceMode::kPost);
+ assembler_->BuildFastLoop(vars, start, end,
+ [this, &body](Node* current) {
+ Node* arg = assembler_->Load(
+ MachineType::AnyTagged(), current);
+ body(arg);
+ },
+ -kPointerSize, CodeStubAssembler::INTPTR_PARAMETERS,
+ CodeStubAssembler::IndexAdvanceMode::kPost);
}
-void CodeStubArguments::PopAndReturn(compiler::Node* value) {
+void CodeStubArguments::PopAndReturn(Node* value) {
assembler_->PopAndReturn(
- assembler_->IntPtrAddFoldConstants(argc_, assembler_->IntPtrConstant(1)),
- value);
+ assembler_->IntPtrAdd(argc_, assembler_->IntPtrConstant(1)), value);
}
-compiler::Node* CodeStubAssembler::IsFastElementsKind(
- compiler::Node* elements_kind) {
+Node* CodeStubAssembler::IsFastElementsKind(Node* elements_kind) {
return Uint32LessThanOrEqual(elements_kind,
Int32Constant(LAST_FAST_ELEMENTS_KIND));
}
-compiler::Node* CodeStubAssembler::IsHoleyFastElementsKind(
- compiler::Node* elements_kind) {
+Node* CodeStubAssembler::IsHoleyFastElementsKind(Node* elements_kind) {
CSA_ASSERT(this, IsFastElementsKind(elements_kind));
STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == (FAST_SMI_ELEMENTS | 1));
@@ -9344,5 +8304,110 @@ compiler::Node* CodeStubAssembler::IsHoleyFastElementsKind(
return Word32Equal(holey_elements, Int32Constant(1));
}
+Node* CodeStubAssembler::IsDebugActive() {
+ Node* is_debug_active = Load(
+ MachineType::Uint8(),
+ ExternalConstant(ExternalReference::debug_is_active_address(isolate())));
+ return Word32NotEqual(is_debug_active, Int32Constant(0));
+}
+
+Node* CodeStubAssembler::IsPromiseHookEnabledOrDebugIsActive() {
+ Node* const promise_hook_or_debug_is_active =
+ Load(MachineType::Uint8(),
+ ExternalConstant(
+ ExternalReference::promise_hook_or_debug_is_active_address(
+ isolate())));
+ return Word32NotEqual(promise_hook_or_debug_is_active, Int32Constant(0));
+}
+
+Node* CodeStubAssembler::AllocateFunctionWithMapAndContext(Node* map,
+ Node* shared_info,
+ Node* context) {
+ Node* const code = BitcastTaggedToWord(
+ LoadObjectField(shared_info, SharedFunctionInfo::kCodeOffset));
+ Node* const code_entry =
+ IntPtrAdd(code, IntPtrConstant(Code::kHeaderSize - kHeapObjectTag));
+
+ Node* const fun = Allocate(JSFunction::kSize);
+ StoreMapNoWriteBarrier(fun, map);
+ StoreObjectFieldRoot(fun, JSObject::kPropertiesOffset,
+ Heap::kEmptyFixedArrayRootIndex);
+ StoreObjectFieldRoot(fun, JSObject::kElementsOffset,
+ Heap::kEmptyFixedArrayRootIndex);
+ StoreObjectFieldRoot(fun, JSFunction::kFeedbackVectorOffset,
+ Heap::kUndefinedCellRootIndex);
+ StoreObjectFieldRoot(fun, JSFunction::kPrototypeOrInitialMapOffset,
+ Heap::kTheHoleValueRootIndex);
+ StoreObjectFieldNoWriteBarrier(fun, JSFunction::kSharedFunctionInfoOffset,
+ shared_info);
+ StoreObjectFieldNoWriteBarrier(fun, JSFunction::kContextOffset, context);
+ StoreObjectFieldNoWriteBarrier(fun, JSFunction::kCodeEntryOffset, code_entry,
+ MachineType::PointerRepresentation());
+ StoreObjectFieldRoot(fun, JSFunction::kNextFunctionLinkOffset,
+ Heap::kUndefinedValueRootIndex);
+
+ return fun;
+}
+
+Node* CodeStubAssembler::AllocatePromiseReactionJobInfo(
+ Node* value, Node* tasks, Node* deferred_promise, Node* deferred_on_resolve,
+ Node* deferred_on_reject, Node* context) {
+ Node* const result = Allocate(PromiseReactionJobInfo::kSize);
+ StoreMapNoWriteBarrier(result, Heap::kPromiseReactionJobInfoMapRootIndex);
+ StoreObjectFieldNoWriteBarrier(result, PromiseReactionJobInfo::kValueOffset,
+ value);
+ StoreObjectFieldNoWriteBarrier(result, PromiseReactionJobInfo::kTasksOffset,
+ tasks);
+ StoreObjectFieldNoWriteBarrier(
+ result, PromiseReactionJobInfo::kDeferredPromiseOffset, deferred_promise);
+ StoreObjectFieldNoWriteBarrier(
+ result, PromiseReactionJobInfo::kDeferredOnResolveOffset,
+ deferred_on_resolve);
+ StoreObjectFieldNoWriteBarrier(
+ result, PromiseReactionJobInfo::kDeferredOnRejectOffset,
+ deferred_on_reject);
+ StoreObjectFieldNoWriteBarrier(result, PromiseReactionJobInfo::kContextOffset,
+ context);
+ return result;
+}
+
+Node* CodeStubAssembler::MarkerIsFrameType(Node* marker_or_function,
+ StackFrame::Type frame_type) {
+ return WordEqual(
+ marker_or_function,
+ IntPtrConstant(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
+}
+
+Node* CodeStubAssembler::MarkerIsNotFrameType(Node* marker_or_function,
+ StackFrame::Type frame_type) {
+ return WordNotEqual(
+ marker_or_function,
+ IntPtrConstant(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
+}
+
+void CodeStubAssembler::Print(const char* s) {
+#ifdef DEBUG
+ std::string formatted(s);
+ formatted += "\n";
+ Handle<String> string = isolate()->factory()->NewStringFromAsciiChecked(
+ formatted.c_str(), TENURED);
+ CallRuntime(Runtime::kGlobalPrint, NoContextConstant(), HeapConstant(string));
+#endif
+}
+
+void CodeStubAssembler::Print(const char* prefix, Node* tagged_value) {
+#ifdef DEBUG
+ if (prefix != nullptr) {
+ std::string formatted(prefix);
+ formatted += ": ";
+ Handle<String> string = isolate()->factory()->NewStringFromAsciiChecked(
+ formatted.c_str(), TENURED);
+ CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
+ HeapConstant(string));
+ }
+ CallRuntime(Runtime::kDebugPrint, NoContextConstant(), tagged_value);
+#endif
+}
+
} // namespace internal
} // namespace v8