deps: update v8 to 3.20.17.7
This commit is contained in:
parent
59a075e108
commit
a1cf3ada62
2
deps/v8/src/arm/code-stubs-arm.cc
vendored
2
deps/v8/src/arm/code-stubs-arm.cc
vendored
@ -4430,6 +4430,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
|
|||||||
{
|
{
|
||||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||||
|
|
||||||
|
__ SmiTag(r0);
|
||||||
__ push(r0);
|
__ push(r0);
|
||||||
__ push(r1);
|
__ push(r1);
|
||||||
__ push(r2);
|
__ push(r2);
|
||||||
@ -4440,6 +4441,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
|
|||||||
__ pop(r2);
|
__ pop(r2);
|
||||||
__ pop(r1);
|
__ pop(r1);
|
||||||
__ pop(r0);
|
__ pop(r0);
|
||||||
|
__ SmiUntag(r0);
|
||||||
}
|
}
|
||||||
__ b(&done);
|
__ b(&done);
|
||||||
|
|
||||||
|
7
deps/v8/src/arm/lithium-arm.cc
vendored
7
deps/v8/src/arm/lithium-arm.cc
vendored
@ -1866,13 +1866,6 @@ LInstruction* LChunkBuilder::DoBoundsCheckBaseIndexInformation(
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
|
|
||||||
// The control instruction marking the end of a block that completed
|
|
||||||
// abruptly (e.g., threw an exception). There is nothing specific to do.
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
|
LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
|
||||||
LOperand* value = UseFixed(instr->value(), r0);
|
LOperand* value = UseFixed(instr->value(), r0);
|
||||||
return MarkAsCall(new(zone()) LThrow(value), instr);
|
return MarkAsCall(new(zone()) LThrow(value), instr);
|
||||||
|
1
deps/v8/src/arm/lithium-codegen-arm.cc
vendored
1
deps/v8/src/arm/lithium-codegen-arm.cc
vendored
@ -1398,6 +1398,7 @@ void LCodeGen::DoDivI(LDivI* instr) {
|
|||||||
__ rsb(dividend, dividend, Operand(0), LeaveCC, lt);
|
__ rsb(dividend, dividend, Operand(0), LeaveCC, lt);
|
||||||
__ mov(dividend, Operand(dividend, ASR, power));
|
__ mov(dividend, Operand(dividend, ASR, power));
|
||||||
if (divisor > 0) __ rsb(dividend, dividend, Operand(0), LeaveCC, lt);
|
if (divisor > 0) __ rsb(dividend, dividend, Operand(0), LeaveCC, lt);
|
||||||
|
if (divisor < 0) __ rsb(dividend, dividend, Operand(0), LeaveCC, gt);
|
||||||
return; // Don't fall through to "__ rsb" below.
|
return; // Don't fall through to "__ rsb" below.
|
||||||
} else {
|
} else {
|
||||||
// Deoptimize if remainder is not 0.
|
// Deoptimize if remainder is not 0.
|
||||||
|
12
deps/v8/src/arm/macro-assembler-arm.cc
vendored
12
deps/v8/src/arm/macro-assembler-arm.cc
vendored
@ -1732,12 +1732,16 @@ void MacroAssembler::Allocate(int object_size,
|
|||||||
|
|
||||||
if ((flags & DOUBLE_ALIGNMENT) != 0) {
|
if ((flags & DOUBLE_ALIGNMENT) != 0) {
|
||||||
// Align the next allocation. Storing the filler map without checking top is
|
// Align the next allocation. Storing the filler map without checking top is
|
||||||
// always safe because the limit of the heap is always aligned.
|
// safe in new-space because the limit of the heap is aligned there.
|
||||||
ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
|
ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
|
||||||
ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
|
ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
|
||||||
and_(scratch2, result, Operand(kDoubleAlignmentMask), SetCC);
|
and_(scratch2, result, Operand(kDoubleAlignmentMask), SetCC);
|
||||||
Label aligned;
|
Label aligned;
|
||||||
b(eq, &aligned);
|
b(eq, &aligned);
|
||||||
|
if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
|
||||||
|
cmp(result, Operand(ip));
|
||||||
|
b(hs, gc_required);
|
||||||
|
}
|
||||||
mov(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
|
mov(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
|
||||||
str(scratch2, MemOperand(result, kDoubleSize / 2, PostIndex));
|
str(scratch2, MemOperand(result, kDoubleSize / 2, PostIndex));
|
||||||
bind(&aligned);
|
bind(&aligned);
|
||||||
@ -1830,12 +1834,16 @@ void MacroAssembler::Allocate(Register object_size,
|
|||||||
|
|
||||||
if ((flags & DOUBLE_ALIGNMENT) != 0) {
|
if ((flags & DOUBLE_ALIGNMENT) != 0) {
|
||||||
// Align the next allocation. Storing the filler map without checking top is
|
// Align the next allocation. Storing the filler map without checking top is
|
||||||
// always safe because the limit of the heap is always aligned.
|
// safe in new-space because the limit of the heap is aligned there.
|
||||||
ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
|
ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
|
||||||
ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
|
ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
|
||||||
and_(scratch2, result, Operand(kDoubleAlignmentMask), SetCC);
|
and_(scratch2, result, Operand(kDoubleAlignmentMask), SetCC);
|
||||||
Label aligned;
|
Label aligned;
|
||||||
b(eq, &aligned);
|
b(eq, &aligned);
|
||||||
|
if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
|
||||||
|
cmp(result, Operand(ip));
|
||||||
|
b(hs, gc_required);
|
||||||
|
}
|
||||||
mov(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
|
mov(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
|
||||||
str(scratch2, MemOperand(result, kDoubleSize / 2, PostIndex));
|
str(scratch2, MemOperand(result, kDoubleSize / 2, PostIndex));
|
||||||
bind(&aligned);
|
bind(&aligned);
|
||||||
|
4
deps/v8/src/ast.cc
vendored
4
deps/v8/src/ast.cc
vendored
@ -708,7 +708,9 @@ void AstVisitor::VisitDeclarations(ZoneList<Declaration*>* declarations) {
|
|||||||
|
|
||||||
void AstVisitor::VisitStatements(ZoneList<Statement*>* statements) {
|
void AstVisitor::VisitStatements(ZoneList<Statement*>* statements) {
|
||||||
for (int i = 0; i < statements->length(); i++) {
|
for (int i = 0; i < statements->length(); i++) {
|
||||||
Visit(statements->at(i));
|
Statement* stmt = statements->at(i);
|
||||||
|
Visit(stmt);
|
||||||
|
if (stmt->IsJump()) break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
2
deps/v8/src/debug.cc
vendored
2
deps/v8/src/debug.cc
vendored
@ -1627,7 +1627,7 @@ bool Debug::StepNextContinue(BreakLocationIterator* break_location_iterator,
|
|||||||
// object.
|
// object.
|
||||||
bool Debug::IsDebugBreak(Address addr) {
|
bool Debug::IsDebugBreak(Address addr) {
|
||||||
Code* code = Code::GetCodeFromTargetAddress(addr);
|
Code* code = Code::GetCodeFromTargetAddress(addr);
|
||||||
return code->is_debug_break();
|
return code->is_debug_stub() && code->extra_ic_state() == DEBUG_BREAK;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
4
deps/v8/src/deoptimizer.h
vendored
4
deps/v8/src/deoptimizer.h
vendored
@ -166,7 +166,9 @@ class Deoptimizer : public Malloced {
|
|||||||
|
|
||||||
int output_count() const { return output_count_; }
|
int output_count() const { return output_count_; }
|
||||||
|
|
||||||
Code::Kind compiled_code_kind() const { return compiled_code_->kind(); }
|
Handle<JSFunction> function() const { return Handle<JSFunction>(function_); }
|
||||||
|
Handle<Code> compiled_code() const { return Handle<Code>(compiled_code_); }
|
||||||
|
BailoutType bailout_type() const { return bailout_type_; }
|
||||||
|
|
||||||
// Number of created JS frames. Not all created frames are necessarily JS.
|
// Number of created JS frames. Not all created frames are necessarily JS.
|
||||||
int jsframe_count() const { return jsframe_count_; }
|
int jsframe_count() const { return jsframe_count_; }
|
||||||
|
37
deps/v8/src/heap-inl.h
vendored
37
deps/v8/src/heap-inl.h
vendored
@ -439,6 +439,43 @@ AllocationSpace Heap::TargetSpaceId(InstanceType type) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
bool Heap::AllowedToBeMigrated(HeapObject* object, AllocationSpace dst) {
|
||||||
|
// Object migration is governed by the following rules:
|
||||||
|
//
|
||||||
|
// 1) Objects in new-space can be migrated to one of the old spaces
|
||||||
|
// that matches their target space or they stay in new-space.
|
||||||
|
// 2) Objects in old-space stay in the same space when migrating.
|
||||||
|
// 3) Fillers (two or more words) can migrate due to left-trimming of
|
||||||
|
// fixed arrays in new-space, old-data-space and old-pointer-space.
|
||||||
|
// 4) Fillers (one word) can never migrate, they are skipped by
|
||||||
|
// incremental marking explicitly to prevent invalid pattern.
|
||||||
|
//
|
||||||
|
// Since this function is used for debugging only, we do not place
|
||||||
|
// asserts here, but check everything explicitly.
|
||||||
|
if (object->map() == one_pointer_filler_map()) return false;
|
||||||
|
InstanceType type = object->map()->instance_type();
|
||||||
|
MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
|
||||||
|
AllocationSpace src = chunk->owner()->identity();
|
||||||
|
switch (src) {
|
||||||
|
case NEW_SPACE:
|
||||||
|
return dst == src || dst == TargetSpaceId(type);
|
||||||
|
case OLD_POINTER_SPACE:
|
||||||
|
return dst == src && (dst == TargetSpaceId(type) || object->IsFiller());
|
||||||
|
case OLD_DATA_SPACE:
|
||||||
|
return dst == src && dst == TargetSpaceId(type);
|
||||||
|
case CODE_SPACE:
|
||||||
|
return dst == src && type == CODE_TYPE;
|
||||||
|
case MAP_SPACE:
|
||||||
|
case CELL_SPACE:
|
||||||
|
case PROPERTY_CELL_SPACE:
|
||||||
|
case LO_SPACE:
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
UNREACHABLE();
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
void Heap::CopyBlock(Address dst, Address src, int byte_size) {
|
void Heap::CopyBlock(Address dst, Address src, int byte_size) {
|
||||||
CopyWords(reinterpret_cast<Object**>(dst),
|
CopyWords(reinterpret_cast<Object**>(dst),
|
||||||
reinterpret_cast<Object**>(src),
|
reinterpret_cast<Object**>(src),
|
||||||
|
15
deps/v8/src/heap.cc
vendored
15
deps/v8/src/heap.cc
vendored
@ -2088,10 +2088,13 @@ class ScavengingVisitor : public StaticVisitorBase {
|
|||||||
MaybeObject* maybe_result;
|
MaybeObject* maybe_result;
|
||||||
|
|
||||||
if (object_contents == DATA_OBJECT) {
|
if (object_contents == DATA_OBJECT) {
|
||||||
|
// TODO(mstarzinger): Turn this check into a regular assert soon!
|
||||||
|
CHECK(heap->AllowedToBeMigrated(object, OLD_DATA_SPACE));
|
||||||
maybe_result = heap->old_data_space()->AllocateRaw(allocation_size);
|
maybe_result = heap->old_data_space()->AllocateRaw(allocation_size);
|
||||||
} else {
|
} else {
|
||||||
maybe_result =
|
// TODO(mstarzinger): Turn this check into a regular assert soon!
|
||||||
heap->old_pointer_space()->AllocateRaw(allocation_size);
|
CHECK(heap->AllowedToBeMigrated(object, OLD_POINTER_SPACE));
|
||||||
|
maybe_result = heap->old_pointer_space()->AllocateRaw(allocation_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
Object* result = NULL; // Initialization to please compiler.
|
Object* result = NULL; // Initialization to please compiler.
|
||||||
@ -2121,6 +2124,8 @@ class ScavengingVisitor : public StaticVisitorBase {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// TODO(mstarzinger): Turn this check into a regular assert soon!
|
||||||
|
CHECK(heap->AllowedToBeMigrated(object, NEW_SPACE));
|
||||||
MaybeObject* allocation = heap->new_space()->AllocateRaw(allocation_size);
|
MaybeObject* allocation = heap->new_space()->AllocateRaw(allocation_size);
|
||||||
heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
|
heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
|
||||||
Object* result = allocation->ToObjectUnchecked();
|
Object* result = allocation->ToObjectUnchecked();
|
||||||
@ -4013,10 +4018,10 @@ MaybeObject* Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
|
|||||||
return AllocateByteArray(length);
|
return AllocateByteArray(length);
|
||||||
}
|
}
|
||||||
int size = ByteArray::SizeFor(length);
|
int size = ByteArray::SizeFor(length);
|
||||||
|
AllocationSpace space =
|
||||||
|
(size > Page::kMaxNonCodeHeapObjectSize) ? LO_SPACE : OLD_DATA_SPACE;
|
||||||
Object* result;
|
Object* result;
|
||||||
{ MaybeObject* maybe_result = (size <= Page::kMaxNonCodeHeapObjectSize)
|
{ MaybeObject* maybe_result = AllocateRaw(size, space, space);
|
||||||
? old_data_space_->AllocateRaw(size)
|
|
||||||
: lo_space_->AllocateRaw(size, NOT_EXECUTABLE);
|
|
||||||
if (!maybe_result->ToObject(&result)) return maybe_result;
|
if (!maybe_result->ToObject(&result)) return maybe_result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
8
deps/v8/src/heap.h
vendored
8
deps/v8/src/heap.h
vendored
@ -1391,6 +1391,10 @@ class Heap {
|
|||||||
inline OldSpace* TargetSpace(HeapObject* object);
|
inline OldSpace* TargetSpace(HeapObject* object);
|
||||||
static inline AllocationSpace TargetSpaceId(InstanceType type);
|
static inline AllocationSpace TargetSpaceId(InstanceType type);
|
||||||
|
|
||||||
|
// Checks whether the given object is allowed to be migrated from it's
|
||||||
|
// current space into the given destination space. Used for debugging.
|
||||||
|
inline bool AllowedToBeMigrated(HeapObject* object, AllocationSpace dest);
|
||||||
|
|
||||||
// Sets the stub_cache_ (only used when expanding the dictionary).
|
// Sets the stub_cache_ (only used when expanding the dictionary).
|
||||||
void public_set_code_stubs(UnseededNumberDictionary* value) {
|
void public_set_code_stubs(UnseededNumberDictionary* value) {
|
||||||
roots_[kCodeStubsRootIndex] = value;
|
roots_[kCodeStubsRootIndex] = value;
|
||||||
@ -1490,6 +1494,10 @@ class Heap {
|
|||||||
inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
|
inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
|
||||||
|
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
|
void set_allocation_timeout(int timeout) {
|
||||||
|
allocation_timeout_ = timeout;
|
||||||
|
}
|
||||||
|
|
||||||
bool disallow_allocation_failure() {
|
bool disallow_allocation_failure() {
|
||||||
return disallow_allocation_failure_;
|
return disallow_allocation_failure_;
|
||||||
}
|
}
|
||||||
|
4
deps/v8/src/hydrogen-environment-liveness.cc
vendored
4
deps/v8/src/hydrogen-environment-liveness.cc
vendored
@ -163,12 +163,8 @@ void HEnvironmentLivenessAnalysisPhase::UpdateLivenessAtInstruction(
|
|||||||
live->Clear();
|
live->Clear();
|
||||||
for (int i = 0; i < enter->return_targets()->length(); ++i) {
|
for (int i = 0; i < enter->return_targets()->length(); ++i) {
|
||||||
int return_id = enter->return_targets()->at(i)->block_id();
|
int return_id = enter->return_targets()->at(i)->block_id();
|
||||||
// When an AbnormalExit is involved, it can happen that the return
|
|
||||||
// target block doesn't actually exist.
|
|
||||||
if (return_id < live_at_block_start_.length()) {
|
|
||||||
live->Union(*live_at_block_start_[return_id]);
|
live->Union(*live_at_block_start_[return_id]);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
last_simulate_ = NULL;
|
last_simulate_ = NULL;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
12
deps/v8/src/hydrogen-instructions.h
vendored
12
deps/v8/src/hydrogen-instructions.h
vendored
@ -63,7 +63,6 @@ class LChunkBuilder;
|
|||||||
|
|
||||||
|
|
||||||
#define HYDROGEN_CONCRETE_INSTRUCTION_LIST(V) \
|
#define HYDROGEN_CONCRETE_INSTRUCTION_LIST(V) \
|
||||||
V(AbnormalExit) \
|
|
||||||
V(AccessArgumentsAt) \
|
V(AccessArgumentsAt) \
|
||||||
V(Add) \
|
V(Add) \
|
||||||
V(Allocate) \
|
V(Allocate) \
|
||||||
@ -1426,16 +1425,6 @@ class HReturn: public HTemplateControlInstruction<0, 3> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
class HAbnormalExit: public HTemplateControlInstruction<0, 0> {
|
|
||||||
public:
|
|
||||||
virtual Representation RequiredInputRepresentation(int index) {
|
|
||||||
return Representation::None();
|
|
||||||
}
|
|
||||||
|
|
||||||
DECLARE_CONCRETE_INSTRUCTION(AbnormalExit)
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
class HUnaryOperation: public HTemplateInstruction<1> {
|
class HUnaryOperation: public HTemplateInstruction<1> {
|
||||||
public:
|
public:
|
||||||
HUnaryOperation(HValue* value, HType type = HType::Tagged())
|
HUnaryOperation(HValue* value, HType type = HType::Tagged())
|
||||||
@ -4081,6 +4070,7 @@ class HStringCompareAndBranch: public HTemplateControlInstruction<2, 3> {
|
|||||||
SetOperandAt(1, left);
|
SetOperandAt(1, left);
|
||||||
SetOperandAt(2, right);
|
SetOperandAt(2, right);
|
||||||
set_representation(Representation::Tagged());
|
set_representation(Representation::Tagged());
|
||||||
|
SetGVNFlag(kChangesNewSpacePromotion);
|
||||||
}
|
}
|
||||||
|
|
||||||
HValue* context() { return OperandAt(0); }
|
HValue* context() { return OperandAt(0); }
|
||||||
|
28
deps/v8/src/hydrogen.cc
vendored
28
deps/v8/src/hydrogen.cc
vendored
@ -2729,16 +2729,6 @@ void TestContext::BuildBranch(HValue* value) {
|
|||||||
if (value != NULL && value->CheckFlag(HValue::kIsArguments)) {
|
if (value != NULL && value->CheckFlag(HValue::kIsArguments)) {
|
||||||
builder->Bailout(kArgumentsObjectValueInATestContext);
|
builder->Bailout(kArgumentsObjectValueInATestContext);
|
||||||
}
|
}
|
||||||
if (value->IsConstant()) {
|
|
||||||
HConstant* constant_value = HConstant::cast(value);
|
|
||||||
if (constant_value->BooleanValue()) {
|
|
||||||
builder->current_block()->Goto(if_true(), builder->function_state());
|
|
||||||
} else {
|
|
||||||
builder->current_block()->Goto(if_false(), builder->function_state());
|
|
||||||
}
|
|
||||||
builder->set_current_block(NULL);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
HBasicBlock* empty_true = builder->graph()->CreateBasicBlock();
|
HBasicBlock* empty_true = builder->graph()->CreateBasicBlock();
|
||||||
HBasicBlock* empty_false = builder->graph()->CreateBasicBlock();
|
HBasicBlock* empty_false = builder->graph()->CreateBasicBlock();
|
||||||
ToBooleanStub::Types expected(condition()->to_boolean_types());
|
ToBooleanStub::Types expected(condition()->to_boolean_types());
|
||||||
@ -3083,7 +3073,9 @@ void HOptimizedGraphBuilder::SetUpScope(Scope* scope) {
|
|||||||
|
|
||||||
void HOptimizedGraphBuilder::VisitStatements(ZoneList<Statement*>* statements) {
|
void HOptimizedGraphBuilder::VisitStatements(ZoneList<Statement*>* statements) {
|
||||||
for (int i = 0; i < statements->length(); i++) {
|
for (int i = 0; i < statements->length(); i++) {
|
||||||
CHECK_ALIVE(Visit(statements->at(i)));
|
Statement* stmt = statements->at(i);
|
||||||
|
CHECK_ALIVE(Visit(stmt));
|
||||||
|
if (stmt->IsJump()) break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -5365,8 +5357,6 @@ void HOptimizedGraphBuilder::VisitThrow(Throw* expr) {
|
|||||||
HThrow* instr = Add<HThrow>(value);
|
HThrow* instr = Add<HThrow>(value);
|
||||||
instr->set_position(expr->position());
|
instr->set_position(expr->position());
|
||||||
Add<HSimulate>(expr->id());
|
Add<HSimulate>(expr->id());
|
||||||
current_block()->FinishExit(new(zone()) HAbnormalExit);
|
|
||||||
set_current_block(NULL);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -5536,6 +5526,7 @@ HInstruction* HOptimizedGraphBuilder::TryBuildConsolidatedElementLoad(
|
|||||||
bool has_smi_or_object_maps = false;
|
bool has_smi_or_object_maps = false;
|
||||||
bool has_js_array_access = false;
|
bool has_js_array_access = false;
|
||||||
bool has_non_js_array_access = false;
|
bool has_non_js_array_access = false;
|
||||||
|
bool has_seen_holey_elements = false;
|
||||||
Handle<Map> most_general_consolidated_map;
|
Handle<Map> most_general_consolidated_map;
|
||||||
for (int i = 0; i < maps->length(); ++i) {
|
for (int i = 0; i < maps->length(); ++i) {
|
||||||
Handle<Map> map = maps->at(i);
|
Handle<Map> map = maps->at(i);
|
||||||
@ -5558,6 +5549,10 @@ HInstruction* HOptimizedGraphBuilder::TryBuildConsolidatedElementLoad(
|
|||||||
} else {
|
} else {
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
// Remember if we've ever seen holey elements.
|
||||||
|
if (IsHoleyElementsKind(map->elements_kind())) {
|
||||||
|
has_seen_holey_elements = true;
|
||||||
|
}
|
||||||
// Remember the most general elements kind, the code for its load will
|
// Remember the most general elements kind, the code for its load will
|
||||||
// properly handle all of the more specific cases.
|
// properly handle all of the more specific cases.
|
||||||
if ((i == 0) || IsMoreGeneralElementsKindTransition(
|
if ((i == 0) || IsMoreGeneralElementsKindTransition(
|
||||||
@ -5569,10 +5564,15 @@ HInstruction* HOptimizedGraphBuilder::TryBuildConsolidatedElementLoad(
|
|||||||
if (!has_double_maps && !has_smi_or_object_maps) return NULL;
|
if (!has_double_maps && !has_smi_or_object_maps) return NULL;
|
||||||
|
|
||||||
HCheckMaps* check_maps = Add<HCheckMaps>(object, maps);
|
HCheckMaps* check_maps = Add<HCheckMaps>(object, maps);
|
||||||
|
// FAST_ELEMENTS is considered more general than FAST_HOLEY_SMI_ELEMENTS.
|
||||||
|
// If we've seen both, the consolidated load must use FAST_HOLEY_ELEMENTS.
|
||||||
|
ElementsKind consolidated_elements_kind = has_seen_holey_elements
|
||||||
|
? GetHoleyElementsKind(most_general_consolidated_map->elements_kind())
|
||||||
|
: most_general_consolidated_map->elements_kind();
|
||||||
HInstruction* instr = BuildUncheckedMonomorphicElementAccess(
|
HInstruction* instr = BuildUncheckedMonomorphicElementAccess(
|
||||||
object, key, val, check_maps,
|
object, key, val, check_maps,
|
||||||
most_general_consolidated_map->instance_type() == JS_ARRAY_TYPE,
|
most_general_consolidated_map->instance_type() == JS_ARRAY_TYPE,
|
||||||
most_general_consolidated_map->elements_kind(),
|
consolidated_elements_kind,
|
||||||
false, NEVER_RETURN_HOLE, STANDARD_STORE);
|
false, NEVER_RETURN_HOLE, STANDARD_STORE);
|
||||||
return instr;
|
return instr;
|
||||||
}
|
}
|
||||||
|
2
deps/v8/src/ia32/code-stubs-ia32.cc
vendored
2
deps/v8/src/ia32/code-stubs-ia32.cc
vendored
@ -4451,6 +4451,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
|
|||||||
{
|
{
|
||||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||||
|
|
||||||
|
__ SmiTag(eax);
|
||||||
__ push(eax);
|
__ push(eax);
|
||||||
__ push(edi);
|
__ push(edi);
|
||||||
__ push(ebx);
|
__ push(ebx);
|
||||||
@ -4461,6 +4462,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
|
|||||||
__ pop(ebx);
|
__ pop(ebx);
|
||||||
__ pop(edi);
|
__ pop(edi);
|
||||||
__ pop(eax);
|
__ pop(eax);
|
||||||
|
__ SmiUntag(eax);
|
||||||
}
|
}
|
||||||
__ jmp(&done);
|
__ jmp(&done);
|
||||||
|
|
||||||
|
1
deps/v8/src/ia32/lithium-codegen-ia32.cc
vendored
1
deps/v8/src/ia32/lithium-codegen-ia32.cc
vendored
@ -1441,6 +1441,7 @@ void LCodeGen::DoDivI(LDivI* instr) {
|
|||||||
__ cmp(dividend, 0);
|
__ cmp(dividend, 0);
|
||||||
__ j(less, &negative, Label::kNear);
|
__ j(less, &negative, Label::kNear);
|
||||||
__ sar(dividend, power);
|
__ sar(dividend, power);
|
||||||
|
if (divisor < 0) __ neg(dividend);
|
||||||
__ jmp(&done, Label::kNear);
|
__ jmp(&done, Label::kNear);
|
||||||
|
|
||||||
__ bind(&negative);
|
__ bind(&negative);
|
||||||
|
7
deps/v8/src/ia32/lithium-ia32.cc
vendored
7
deps/v8/src/ia32/lithium-ia32.cc
vendored
@ -1880,13 +1880,6 @@ LInstruction* LChunkBuilder::DoBoundsCheckBaseIndexInformation(
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
|
|
||||||
// The control instruction marking the end of a block that completed
|
|
||||||
// abruptly (e.g., threw an exception). There is nothing specific to do.
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
|
LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
|
||||||
LOperand* context = UseFixed(instr->context(), esi);
|
LOperand* context = UseFixed(instr->context(), esi);
|
||||||
LOperand* value = UseFixed(instr->value(), eax);
|
LOperand* value = UseFixed(instr->value(), eax);
|
||||||
|
39
deps/v8/src/ia32/macro-assembler-ia32.cc
vendored
39
deps/v8/src/ia32/macro-assembler-ia32.cc
vendored
@ -1307,26 +1307,29 @@ void MacroAssembler::Allocate(int object_size,
|
|||||||
// Load address of new object into result.
|
// Load address of new object into result.
|
||||||
LoadAllocationTopHelper(result, scratch, flags);
|
LoadAllocationTopHelper(result, scratch, flags);
|
||||||
|
|
||||||
|
ExternalReference allocation_limit =
|
||||||
|
AllocationUtils::GetAllocationLimitReference(isolate(), flags);
|
||||||
|
|
||||||
// Align the next allocation. Storing the filler map without checking top is
|
// Align the next allocation. Storing the filler map without checking top is
|
||||||
// always safe because the limit of the heap is always aligned.
|
// safe in new-space because the limit of the heap is aligned there.
|
||||||
if ((flags & DOUBLE_ALIGNMENT) != 0) {
|
if ((flags & DOUBLE_ALIGNMENT) != 0) {
|
||||||
ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
|
ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
|
||||||
ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
|
ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
|
||||||
Label aligned;
|
Label aligned;
|
||||||
test(result, Immediate(kDoubleAlignmentMask));
|
test(result, Immediate(kDoubleAlignmentMask));
|
||||||
j(zero, &aligned, Label::kNear);
|
j(zero, &aligned, Label::kNear);
|
||||||
|
if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
|
||||||
|
cmp(result, Operand::StaticVariable(allocation_limit));
|
||||||
|
j(above_equal, gc_required);
|
||||||
|
}
|
||||||
mov(Operand(result, 0),
|
mov(Operand(result, 0),
|
||||||
Immediate(isolate()->factory()->one_pointer_filler_map()));
|
Immediate(isolate()->factory()->one_pointer_filler_map()));
|
||||||
add(result, Immediate(kDoubleSize / 2));
|
add(result, Immediate(kDoubleSize / 2));
|
||||||
bind(&aligned);
|
bind(&aligned);
|
||||||
}
|
}
|
||||||
|
|
||||||
Register top_reg = result_end.is_valid() ? result_end : result;
|
|
||||||
|
|
||||||
// Calculate new top and bail out if space is exhausted.
|
// Calculate new top and bail out if space is exhausted.
|
||||||
ExternalReference allocation_limit =
|
Register top_reg = result_end.is_valid() ? result_end : result;
|
||||||
AllocationUtils::GetAllocationLimitReference(isolate(), flags);
|
|
||||||
|
|
||||||
if (!top_reg.is(result)) {
|
if (!top_reg.is(result)) {
|
||||||
mov(top_reg, result);
|
mov(top_reg, result);
|
||||||
}
|
}
|
||||||
@ -1381,14 +1384,21 @@ void MacroAssembler::Allocate(int header_size,
|
|||||||
// Load address of new object into result.
|
// Load address of new object into result.
|
||||||
LoadAllocationTopHelper(result, scratch, flags);
|
LoadAllocationTopHelper(result, scratch, flags);
|
||||||
|
|
||||||
|
ExternalReference allocation_limit =
|
||||||
|
AllocationUtils::GetAllocationLimitReference(isolate(), flags);
|
||||||
|
|
||||||
// Align the next allocation. Storing the filler map without checking top is
|
// Align the next allocation. Storing the filler map without checking top is
|
||||||
// always safe because the limit of the heap is always aligned.
|
// safe in new-space because the limit of the heap is aligned there.
|
||||||
if ((flags & DOUBLE_ALIGNMENT) != 0) {
|
if ((flags & DOUBLE_ALIGNMENT) != 0) {
|
||||||
ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
|
ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
|
||||||
ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
|
ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
|
||||||
Label aligned;
|
Label aligned;
|
||||||
test(result, Immediate(kDoubleAlignmentMask));
|
test(result, Immediate(kDoubleAlignmentMask));
|
||||||
j(zero, &aligned, Label::kNear);
|
j(zero, &aligned, Label::kNear);
|
||||||
|
if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
|
||||||
|
cmp(result, Operand::StaticVariable(allocation_limit));
|
||||||
|
j(above_equal, gc_required);
|
||||||
|
}
|
||||||
mov(Operand(result, 0),
|
mov(Operand(result, 0),
|
||||||
Immediate(isolate()->factory()->one_pointer_filler_map()));
|
Immediate(isolate()->factory()->one_pointer_filler_map()));
|
||||||
add(result, Immediate(kDoubleSize / 2));
|
add(result, Immediate(kDoubleSize / 2));
|
||||||
@ -1396,9 +1406,6 @@ void MacroAssembler::Allocate(int header_size,
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Calculate new top and bail out if space is exhausted.
|
// Calculate new top and bail out if space is exhausted.
|
||||||
ExternalReference allocation_limit =
|
|
||||||
AllocationUtils::GetAllocationLimitReference(isolate(), flags);
|
|
||||||
|
|
||||||
// We assume that element_count*element_size + header_size does not
|
// We assume that element_count*element_size + header_size does not
|
||||||
// overflow.
|
// overflow.
|
||||||
if (element_count_type == REGISTER_VALUE_IS_SMI) {
|
if (element_count_type == REGISTER_VALUE_IS_SMI) {
|
||||||
@ -1452,14 +1459,21 @@ void MacroAssembler::Allocate(Register object_size,
|
|||||||
// Load address of new object into result.
|
// Load address of new object into result.
|
||||||
LoadAllocationTopHelper(result, scratch, flags);
|
LoadAllocationTopHelper(result, scratch, flags);
|
||||||
|
|
||||||
|
ExternalReference allocation_limit =
|
||||||
|
AllocationUtils::GetAllocationLimitReference(isolate(), flags);
|
||||||
|
|
||||||
// Align the next allocation. Storing the filler map without checking top is
|
// Align the next allocation. Storing the filler map without checking top is
|
||||||
// always safe because the limit of the heap is always aligned.
|
// safe in new-space because the limit of the heap is aligned there.
|
||||||
if ((flags & DOUBLE_ALIGNMENT) != 0) {
|
if ((flags & DOUBLE_ALIGNMENT) != 0) {
|
||||||
ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
|
ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
|
||||||
ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
|
ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
|
||||||
Label aligned;
|
Label aligned;
|
||||||
test(result, Immediate(kDoubleAlignmentMask));
|
test(result, Immediate(kDoubleAlignmentMask));
|
||||||
j(zero, &aligned, Label::kNear);
|
j(zero, &aligned, Label::kNear);
|
||||||
|
if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
|
||||||
|
cmp(result, Operand::StaticVariable(allocation_limit));
|
||||||
|
j(above_equal, gc_required);
|
||||||
|
}
|
||||||
mov(Operand(result, 0),
|
mov(Operand(result, 0),
|
||||||
Immediate(isolate()->factory()->one_pointer_filler_map()));
|
Immediate(isolate()->factory()->one_pointer_filler_map()));
|
||||||
add(result, Immediate(kDoubleSize / 2));
|
add(result, Immediate(kDoubleSize / 2));
|
||||||
@ -1467,9 +1481,6 @@ void MacroAssembler::Allocate(Register object_size,
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Calculate new top and bail out if space is exhausted.
|
// Calculate new top and bail out if space is exhausted.
|
||||||
ExternalReference allocation_limit =
|
|
||||||
AllocationUtils::GetAllocationLimitReference(isolate(), flags);
|
|
||||||
|
|
||||||
if (!object_size.is(result_end)) {
|
if (!object_size.is(result_end)) {
|
||||||
mov(result_end, object_size);
|
mov(result_end, object_size);
|
||||||
}
|
}
|
||||||
|
2
deps/v8/src/ic.cc
vendored
2
deps/v8/src/ic.cc
vendored
@ -379,7 +379,7 @@ void IC::Clear(Address address) {
|
|||||||
Code* target = GetTargetAtAddress(address);
|
Code* target = GetTargetAtAddress(address);
|
||||||
|
|
||||||
// Don't clear debug break inline cache as it will remove the break point.
|
// Don't clear debug break inline cache as it will remove the break point.
|
||||||
if (target->is_debug_break()) return;
|
if (target->is_debug_stub()) return;
|
||||||
|
|
||||||
switch (target->kind()) {
|
switch (target->kind()) {
|
||||||
case Code::LOAD_IC: return LoadIC::Clear(address, target);
|
case Code::LOAD_IC: return LoadIC::Clear(address, target);
|
||||||
|
2
deps/v8/src/liveedit.cc
vendored
2
deps/v8/src/liveedit.cc
vendored
@ -1691,7 +1691,7 @@ static const char* DropFrames(Vector<StackFrame*> frames,
|
|||||||
Code* pre_top_frame_code = pre_top_frame->LookupCode();
|
Code* pre_top_frame_code = pre_top_frame->LookupCode();
|
||||||
bool frame_has_padding;
|
bool frame_has_padding;
|
||||||
if (pre_top_frame_code->is_inline_cache_stub() &&
|
if (pre_top_frame_code->is_inline_cache_stub() &&
|
||||||
pre_top_frame_code->is_debug_break()) {
|
pre_top_frame_code->is_debug_stub()) {
|
||||||
// OK, we can drop inline cache calls.
|
// OK, we can drop inline cache calls.
|
||||||
*mode = Debug::FRAME_DROPPED_IN_IC_CALL;
|
*mode = Debug::FRAME_DROPPED_IN_IC_CALL;
|
||||||
frame_has_padding = Debug::FramePaddingLayout::kIsSupported;
|
frame_has_padding = Debug::FramePaddingLayout::kIsSupported;
|
||||||
|
13
deps/v8/src/mark-compact.cc
vendored
13
deps/v8/src/mark-compact.cc
vendored
@ -2743,12 +2743,10 @@ void MarkCompactCollector::MigrateObject(Address dst,
|
|||||||
int size,
|
int size,
|
||||||
AllocationSpace dest) {
|
AllocationSpace dest) {
|
||||||
HEAP_PROFILE(heap(), ObjectMoveEvent(src, dst));
|
HEAP_PROFILE(heap(), ObjectMoveEvent(src, dst));
|
||||||
// TODO(hpayer): Replace that check with an assert.
|
// TODO(hpayer): Replace these checks with asserts.
|
||||||
|
CHECK(heap()->AllowedToBeMigrated(HeapObject::FromAddress(src), dest));
|
||||||
CHECK(dest != LO_SPACE && size <= Page::kMaxNonCodeHeapObjectSize);
|
CHECK(dest != LO_SPACE && size <= Page::kMaxNonCodeHeapObjectSize);
|
||||||
if (dest == OLD_POINTER_SPACE) {
|
if (dest == OLD_POINTER_SPACE) {
|
||||||
// TODO(hpayer): Replace this check with an assert.
|
|
||||||
HeapObject* heap_object = HeapObject::FromAddress(src);
|
|
||||||
CHECK(heap_->TargetSpace(heap_object) == heap_->old_pointer_space());
|
|
||||||
Address src_slot = src;
|
Address src_slot = src;
|
||||||
Address dst_slot = dst;
|
Address dst_slot = dst;
|
||||||
ASSERT(IsAligned(size, kPointerSize));
|
ASSERT(IsAligned(size, kPointerSize));
|
||||||
@ -2794,13 +2792,6 @@ void MarkCompactCollector::MigrateObject(Address dst,
|
|||||||
Code::cast(HeapObject::FromAddress(dst))->Relocate(dst - src);
|
Code::cast(HeapObject::FromAddress(dst))->Relocate(dst - src);
|
||||||
} else {
|
} else {
|
||||||
ASSERT(dest == OLD_DATA_SPACE || dest == NEW_SPACE);
|
ASSERT(dest == OLD_DATA_SPACE || dest == NEW_SPACE);
|
||||||
// Objects in old data space can just be moved by compaction to a different
|
|
||||||
// page in old data space.
|
|
||||||
// TODO(hpayer): Replace the following check with an assert.
|
|
||||||
CHECK(!heap_->old_data_space()->Contains(src) ||
|
|
||||||
(heap_->old_data_space()->Contains(dst) &&
|
|
||||||
heap_->TargetSpace(HeapObject::FromAddress(src)) ==
|
|
||||||
heap_->old_data_space()));
|
|
||||||
heap()->MoveBlock(dst, src, size);
|
heap()->MoveBlock(dst, src, size);
|
||||||
}
|
}
|
||||||
Memory::Address_at(src) = dst;
|
Memory::Address_at(src) = dst;
|
||||||
|
2
deps/v8/src/mips/code-stubs-mips.cc
vendored
2
deps/v8/src/mips/code-stubs-mips.cc
vendored
@ -4808,12 +4808,14 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
|
|||||||
1 << 5 | // a1
|
1 << 5 | // a1
|
||||||
1 << 6; // a2
|
1 << 6; // a2
|
||||||
|
|
||||||
|
__ SmiTag(a0);
|
||||||
__ MultiPush(kSavedRegs);
|
__ MultiPush(kSavedRegs);
|
||||||
|
|
||||||
CreateAllocationSiteStub create_stub;
|
CreateAllocationSiteStub create_stub;
|
||||||
__ CallStub(&create_stub);
|
__ CallStub(&create_stub);
|
||||||
|
|
||||||
__ MultiPop(kSavedRegs);
|
__ MultiPop(kSavedRegs);
|
||||||
|
__ SmiUntag(a0);
|
||||||
}
|
}
|
||||||
__ Branch(&done);
|
__ Branch(&done);
|
||||||
|
|
||||||
|
125
deps/v8/src/mips/lithium-codegen-mips.cc
vendored
125
deps/v8/src/mips/lithium-codegen-mips.cc
vendored
@ -762,7 +762,7 @@ void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void LCodeGen::DeoptimizeIf(Condition cc,
|
void LCodeGen::DeoptimizeIf(Condition condition,
|
||||||
LEnvironment* environment,
|
LEnvironment* environment,
|
||||||
Deoptimizer::BailoutType bailout_type,
|
Deoptimizer::BailoutType bailout_type,
|
||||||
Register src1,
|
Register src1,
|
||||||
@ -789,16 +789,16 @@ void LCodeGen::DeoptimizeIf(Condition cc,
|
|||||||
|
|
||||||
if (info()->ShouldTrapOnDeopt()) {
|
if (info()->ShouldTrapOnDeopt()) {
|
||||||
Label skip;
|
Label skip;
|
||||||
if (cc != al) {
|
if (condition != al) {
|
||||||
__ Branch(&skip, NegateCondition(cc), src1, src2);
|
__ Branch(&skip, NegateCondition(condition), src1, src2);
|
||||||
}
|
}
|
||||||
__ stop("trap_on_deopt");
|
__ stop("trap_on_deopt");
|
||||||
__ bind(&skip);
|
__ bind(&skip);
|
||||||
}
|
}
|
||||||
|
|
||||||
ASSERT(info()->IsStub() || frame_is_built_);
|
ASSERT(info()->IsStub() || frame_is_built_);
|
||||||
if (cc == al && frame_is_built_) {
|
if (condition == al && frame_is_built_) {
|
||||||
__ Call(entry, RelocInfo::RUNTIME_ENTRY, cc, src1, src2);
|
__ Call(entry, RelocInfo::RUNTIME_ENTRY, condition, src1, src2);
|
||||||
} else {
|
} else {
|
||||||
// We often have several deopts to the same entry, reuse the last
|
// We often have several deopts to the same entry, reuse the last
|
||||||
// jump entry if this is the case.
|
// jump entry if this is the case.
|
||||||
@ -811,19 +811,19 @@ void LCodeGen::DeoptimizeIf(Condition cc,
|
|||||||
!frame_is_built_);
|
!frame_is_built_);
|
||||||
deopt_jump_table_.Add(table_entry, zone());
|
deopt_jump_table_.Add(table_entry, zone());
|
||||||
}
|
}
|
||||||
__ Branch(&deopt_jump_table_.last().label, cc, src1, src2);
|
__ Branch(&deopt_jump_table_.last().label, condition, src1, src2);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void LCodeGen::DeoptimizeIf(Condition cc,
|
void LCodeGen::DeoptimizeIf(Condition condition,
|
||||||
LEnvironment* environment,
|
LEnvironment* environment,
|
||||||
Register src1,
|
Register src1,
|
||||||
const Operand& src2) {
|
const Operand& src2) {
|
||||||
Deoptimizer::BailoutType bailout_type = info()->IsStub()
|
Deoptimizer::BailoutType bailout_type = info()->IsStub()
|
||||||
? Deoptimizer::LAZY
|
? Deoptimizer::LAZY
|
||||||
: Deoptimizer::EAGER;
|
: Deoptimizer::EAGER;
|
||||||
DeoptimizeIf(cc, environment, bailout_type, src1, src2);
|
DeoptimizeIf(condition, environment, bailout_type, src1, src2);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -1993,20 +1993,22 @@ int LCodeGen::GetNextEmittedBlock() const {
|
|||||||
|
|
||||||
template<class InstrType>
|
template<class InstrType>
|
||||||
void LCodeGen::EmitBranch(InstrType instr,
|
void LCodeGen::EmitBranch(InstrType instr,
|
||||||
Condition cc, Register src1, const Operand& src2) {
|
Condition condition,
|
||||||
|
Register src1,
|
||||||
|
const Operand& src2) {
|
||||||
int left_block = instr->TrueDestination(chunk_);
|
int left_block = instr->TrueDestination(chunk_);
|
||||||
int right_block = instr->FalseDestination(chunk_);
|
int right_block = instr->FalseDestination(chunk_);
|
||||||
|
|
||||||
int next_block = GetNextEmittedBlock();
|
int next_block = GetNextEmittedBlock();
|
||||||
if (right_block == left_block || cc == al) {
|
if (right_block == left_block || condition == al) {
|
||||||
EmitGoto(left_block);
|
EmitGoto(left_block);
|
||||||
} else if (left_block == next_block) {
|
} else if (left_block == next_block) {
|
||||||
__ Branch(chunk_->GetAssemblyLabel(right_block),
|
__ Branch(chunk_->GetAssemblyLabel(right_block),
|
||||||
NegateCondition(cc), src1, src2);
|
NegateCondition(condition), src1, src2);
|
||||||
} else if (right_block == next_block) {
|
} else if (right_block == next_block) {
|
||||||
__ Branch(chunk_->GetAssemblyLabel(left_block), cc, src1, src2);
|
__ Branch(chunk_->GetAssemblyLabel(left_block), condition, src1, src2);
|
||||||
} else {
|
} else {
|
||||||
__ Branch(chunk_->GetAssemblyLabel(left_block), cc, src1, src2);
|
__ Branch(chunk_->GetAssemblyLabel(left_block), condition, src1, src2);
|
||||||
__ Branch(chunk_->GetAssemblyLabel(right_block));
|
__ Branch(chunk_->GetAssemblyLabel(right_block));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -2014,7 +2016,9 @@ void LCodeGen::EmitBranch(InstrType instr,
|
|||||||
|
|
||||||
template<class InstrType>
|
template<class InstrType>
|
||||||
void LCodeGen::EmitBranchF(InstrType instr,
|
void LCodeGen::EmitBranchF(InstrType instr,
|
||||||
Condition cc, FPURegister src1, FPURegister src2) {
|
Condition condition,
|
||||||
|
FPURegister src1,
|
||||||
|
FPURegister src2) {
|
||||||
int right_block = instr->FalseDestination(chunk_);
|
int right_block = instr->FalseDestination(chunk_);
|
||||||
int left_block = instr->TrueDestination(chunk_);
|
int left_block = instr->TrueDestination(chunk_);
|
||||||
|
|
||||||
@ -2023,16 +2027,29 @@ void LCodeGen::EmitBranchF(InstrType instr,
|
|||||||
EmitGoto(left_block);
|
EmitGoto(left_block);
|
||||||
} else if (left_block == next_block) {
|
} else if (left_block == next_block) {
|
||||||
__ BranchF(chunk_->GetAssemblyLabel(right_block), NULL,
|
__ BranchF(chunk_->GetAssemblyLabel(right_block), NULL,
|
||||||
NegateCondition(cc), src1, src2);
|
NegateCondition(condition), src1, src2);
|
||||||
} else if (right_block == next_block) {
|
} else if (right_block == next_block) {
|
||||||
__ BranchF(chunk_->GetAssemblyLabel(left_block), NULL, cc, src1, src2);
|
__ BranchF(chunk_->GetAssemblyLabel(left_block), NULL,
|
||||||
|
condition, src1, src2);
|
||||||
} else {
|
} else {
|
||||||
__ BranchF(chunk_->GetAssemblyLabel(left_block), NULL, cc, src1, src2);
|
__ BranchF(chunk_->GetAssemblyLabel(left_block), NULL,
|
||||||
|
condition, src1, src2);
|
||||||
__ Branch(chunk_->GetAssemblyLabel(right_block));
|
__ Branch(chunk_->GetAssemblyLabel(right_block));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
template<class InstrType>
|
||||||
|
void LCodeGen::EmitFalseBranchF(InstrType instr,
|
||||||
|
Condition condition,
|
||||||
|
FPURegister src1,
|
||||||
|
FPURegister src2) {
|
||||||
|
int false_block = instr->FalseDestination(chunk_);
|
||||||
|
__ BranchF(chunk_->GetAssemblyLabel(false_block), NULL,
|
||||||
|
condition, src1, src2);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
void LCodeGen::DoDebugBreak(LDebugBreak* instr) {
|
void LCodeGen::DoDebugBreak(LDebugBreak* instr) {
|
||||||
__ stop("LDebugBreak");
|
__ stop("LDebugBreak");
|
||||||
}
|
}
|
||||||
@ -2293,6 +2310,23 @@ void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void LCodeGen::DoCmpHoleAndBranch(LCmpHoleAndBranch* instr) {
|
||||||
|
if (instr->hydrogen()->representation().IsTagged()) {
|
||||||
|
Register input_reg = ToRegister(instr->object());
|
||||||
|
__ li(at, Operand(factory()->the_hole_value()));
|
||||||
|
EmitBranch(instr, eq, input_reg, Operand(at));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
DoubleRegister input_reg = ToDoubleRegister(instr->object());
|
||||||
|
EmitFalseBranchF(instr, eq, input_reg, input_reg);
|
||||||
|
|
||||||
|
Register scratch = scratch0();
|
||||||
|
__ FmoveHigh(scratch, input_reg);
|
||||||
|
EmitBranch(instr, eq, scratch, Operand(kHoleNanUpper32));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
Condition LCodeGen::EmitIsObject(Register input,
|
Condition LCodeGen::EmitIsObject(Register input,
|
||||||
Register temp1,
|
Register temp1,
|
||||||
Register temp2,
|
Register temp2,
|
||||||
@ -4149,17 +4183,17 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void LCodeGen::ApplyCheckIf(Condition cc,
|
void LCodeGen::ApplyCheckIf(Condition condition,
|
||||||
LBoundsCheck* check,
|
LBoundsCheck* check,
|
||||||
Register src1,
|
Register src1,
|
||||||
const Operand& src2) {
|
const Operand& src2) {
|
||||||
if (FLAG_debug_code && check->hydrogen()->skip_check()) {
|
if (FLAG_debug_code && check->hydrogen()->skip_check()) {
|
||||||
Label done;
|
Label done;
|
||||||
__ Branch(&done, NegateCondition(cc), src1, src2);
|
__ Branch(&done, NegateCondition(condition), src1, src2);
|
||||||
__ stop("eliminated bounds check failed");
|
__ stop("eliminated bounds check failed");
|
||||||
__ bind(&done);
|
__ bind(&done);
|
||||||
} else {
|
} else {
|
||||||
DeoptimizeIf(cc, check->environment(), src1, src2);
|
DeoptimizeIf(condition, check->environment(), src1, src2);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -4702,29 +4736,6 @@ void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
|
|||||||
Register temp1 = ToRegister(instr->temp());
|
Register temp1 = ToRegister(instr->temp());
|
||||||
Register temp2 = ToRegister(instr->temp2());
|
Register temp2 = ToRegister(instr->temp2());
|
||||||
|
|
||||||
bool convert_hole = false;
|
|
||||||
HValue* change_input = instr->hydrogen()->value();
|
|
||||||
if (change_input->IsLoadKeyed()) {
|
|
||||||
HLoadKeyed* load = HLoadKeyed::cast(change_input);
|
|
||||||
convert_hole = load->UsesMustHandleHole();
|
|
||||||
}
|
|
||||||
|
|
||||||
Label no_special_nan_handling;
|
|
||||||
Label done;
|
|
||||||
if (convert_hole) {
|
|
||||||
DoubleRegister input_reg = ToDoubleRegister(instr->value());
|
|
||||||
__ BranchF(&no_special_nan_handling, NULL, eq, input_reg, input_reg);
|
|
||||||
__ Move(reg, scratch0(), input_reg);
|
|
||||||
Label canonicalize;
|
|
||||||
__ Branch(&canonicalize, ne, scratch0(), Operand(kHoleNanUpper32));
|
|
||||||
__ li(reg, factory()->undefined_value());
|
|
||||||
__ Branch(&done);
|
|
||||||
__ bind(&canonicalize);
|
|
||||||
__ Move(input_reg,
|
|
||||||
FixedDoubleArray::canonical_not_the_hole_nan_as_double());
|
|
||||||
}
|
|
||||||
|
|
||||||
__ bind(&no_special_nan_handling);
|
|
||||||
DeferredNumberTagD* deferred = new(zone()) DeferredNumberTagD(this, instr);
|
DeferredNumberTagD* deferred = new(zone()) DeferredNumberTagD(this, instr);
|
||||||
if (FLAG_inline_new) {
|
if (FLAG_inline_new) {
|
||||||
__ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex);
|
__ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex);
|
||||||
@ -4738,7 +4749,6 @@ void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
|
|||||||
__ sdc1(input_reg, MemOperand(reg, HeapNumber::kValueOffset));
|
__ sdc1(input_reg, MemOperand(reg, HeapNumber::kValueOffset));
|
||||||
// Now that we have finished with the object's real address tag it
|
// Now that we have finished with the object's real address tag it
|
||||||
__ Addu(reg, reg, kHeapObjectTag);
|
__ Addu(reg, reg, kHeapObjectTag);
|
||||||
__ bind(&done);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -4780,7 +4790,7 @@ void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
|
|||||||
|
|
||||||
void LCodeGen::EmitNumberUntagD(Register input_reg,
|
void LCodeGen::EmitNumberUntagD(Register input_reg,
|
||||||
DoubleRegister result_reg,
|
DoubleRegister result_reg,
|
||||||
bool allow_undefined_as_nan,
|
bool can_convert_undefined_to_nan,
|
||||||
bool deoptimize_on_minus_zero,
|
bool deoptimize_on_minus_zero,
|
||||||
LEnvironment* env,
|
LEnvironment* env,
|
||||||
NumberUntagDMode mode) {
|
NumberUntagDMode mode) {
|
||||||
@ -4788,16 +4798,14 @@ void LCodeGen::EmitNumberUntagD(Register input_reg,
|
|||||||
|
|
||||||
Label load_smi, heap_number, done;
|
Label load_smi, heap_number, done;
|
||||||
|
|
||||||
STATIC_ASSERT(NUMBER_CANDIDATE_IS_ANY_TAGGED_CONVERT_HOLE >
|
if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) {
|
||||||
NUMBER_CANDIDATE_IS_ANY_TAGGED);
|
|
||||||
if (mode >= NUMBER_CANDIDATE_IS_ANY_TAGGED) {
|
|
||||||
// Smi check.
|
// Smi check.
|
||||||
__ UntagAndJumpIfSmi(scratch, input_reg, &load_smi);
|
__ UntagAndJumpIfSmi(scratch, input_reg, &load_smi);
|
||||||
|
|
||||||
// Heap number map check.
|
// Heap number map check.
|
||||||
__ lw(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
|
__ lw(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
|
||||||
__ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
|
__ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
|
||||||
if (!allow_undefined_as_nan) {
|
if (!can_convert_undefined_to_nan) {
|
||||||
DeoptimizeIf(ne, env, scratch, Operand(at));
|
DeoptimizeIf(ne, env, scratch, Operand(at));
|
||||||
} else {
|
} else {
|
||||||
Label heap_number, convert;
|
Label heap_number, convert;
|
||||||
@ -4805,10 +4813,6 @@ void LCodeGen::EmitNumberUntagD(Register input_reg,
|
|||||||
|
|
||||||
// Convert undefined (and hole) to NaN.
|
// Convert undefined (and hole) to NaN.
|
||||||
__ LoadRoot(at, Heap::kUndefinedValueRootIndex);
|
__ LoadRoot(at, Heap::kUndefinedValueRootIndex);
|
||||||
if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED_CONVERT_HOLE) {
|
|
||||||
__ Branch(&convert, eq, input_reg, Operand(at));
|
|
||||||
__ LoadRoot(at, Heap::kTheHoleValueRootIndex);
|
|
||||||
}
|
|
||||||
DeoptimizeIf(ne, env, input_reg, Operand(at));
|
DeoptimizeIf(ne, env, input_reg, Operand(at));
|
||||||
|
|
||||||
__ bind(&convert);
|
__ bind(&convert);
|
||||||
@ -4956,21 +4960,12 @@ void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
|
|||||||
Register input_reg = ToRegister(input);
|
Register input_reg = ToRegister(input);
|
||||||
DoubleRegister result_reg = ToDoubleRegister(result);
|
DoubleRegister result_reg = ToDoubleRegister(result);
|
||||||
|
|
||||||
NumberUntagDMode mode = NUMBER_CANDIDATE_IS_ANY_TAGGED;
|
|
||||||
HValue* value = instr->hydrogen()->value();
|
HValue* value = instr->hydrogen()->value();
|
||||||
if (value->type().IsSmi()) {
|
NumberUntagDMode mode = value->representation().IsSmi()
|
||||||
mode = NUMBER_CANDIDATE_IS_SMI;
|
? NUMBER_CANDIDATE_IS_SMI : NUMBER_CANDIDATE_IS_ANY_TAGGED;
|
||||||
} else if (value->IsLoadKeyed()) {
|
|
||||||
HLoadKeyed* load = HLoadKeyed::cast(value);
|
|
||||||
if (load->UsesMustHandleHole()) {
|
|
||||||
if (load->hole_mode() == ALLOW_RETURN_HOLE) {
|
|
||||||
mode = NUMBER_CANDIDATE_IS_ANY_TAGGED_CONVERT_HOLE;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
EmitNumberUntagD(input_reg, result_reg,
|
EmitNumberUntagD(input_reg, result_reg,
|
||||||
instr->hydrogen()->allow_undefined_as_nan(),
|
instr->hydrogen()->can_convert_undefined_to_nan(),
|
||||||
instr->hydrogen()->deoptimize_on_minus_zero(),
|
instr->hydrogen()->deoptimize_on_minus_zero(),
|
||||||
instr->environment(),
|
instr->environment(),
|
||||||
mode);
|
mode);
|
||||||
|
15
deps/v8/src/mips/lithium-codegen-mips.h
vendored
15
deps/v8/src/mips/lithium-codegen-mips.h
vendored
@ -277,16 +277,16 @@ class LCodeGen BASE_EMBEDDED {
|
|||||||
|
|
||||||
void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
|
void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
|
||||||
Safepoint::DeoptMode mode);
|
Safepoint::DeoptMode mode);
|
||||||
void DeoptimizeIf(Condition cc,
|
void DeoptimizeIf(Condition condition,
|
||||||
LEnvironment* environment,
|
LEnvironment* environment,
|
||||||
Deoptimizer::BailoutType bailout_type,
|
Deoptimizer::BailoutType bailout_type,
|
||||||
Register src1 = zero_reg,
|
Register src1 = zero_reg,
|
||||||
const Operand& src2 = Operand(zero_reg));
|
const Operand& src2 = Operand(zero_reg));
|
||||||
void DeoptimizeIf(Condition cc,
|
void DeoptimizeIf(Condition condition,
|
||||||
LEnvironment* environment,
|
LEnvironment* environment,
|
||||||
Register src1 = zero_reg,
|
Register src1 = zero_reg,
|
||||||
const Operand& src2 = Operand(zero_reg));
|
const Operand& src2 = Operand(zero_reg));
|
||||||
void ApplyCheckIf(Condition cc,
|
void ApplyCheckIf(Condition condition,
|
||||||
LBoundsCheck* check,
|
LBoundsCheck* check,
|
||||||
Register src1 = zero_reg,
|
Register src1 = zero_reg,
|
||||||
const Operand& src2 = Operand(zero_reg));
|
const Operand& src2 = Operand(zero_reg));
|
||||||
@ -329,12 +329,17 @@ class LCodeGen BASE_EMBEDDED {
|
|||||||
void EmitGoto(int block);
|
void EmitGoto(int block);
|
||||||
template<class InstrType>
|
template<class InstrType>
|
||||||
void EmitBranch(InstrType instr,
|
void EmitBranch(InstrType instr,
|
||||||
Condition cc,
|
Condition condition,
|
||||||
Register src1,
|
Register src1,
|
||||||
const Operand& src2);
|
const Operand& src2);
|
||||||
template<class InstrType>
|
template<class InstrType>
|
||||||
void EmitBranchF(InstrType instr,
|
void EmitBranchF(InstrType instr,
|
||||||
Condition cc,
|
Condition condition,
|
||||||
|
FPURegister src1,
|
||||||
|
FPURegister src2);
|
||||||
|
template<class InstrType>
|
||||||
|
void EmitFalseBranchF(InstrType instr,
|
||||||
|
Condition condition,
|
||||||
FPURegister src1,
|
FPURegister src1,
|
||||||
FPURegister src2);
|
FPURegister src2);
|
||||||
void EmitCmpI(LOperand* left, LOperand* right);
|
void EmitCmpI(LOperand* left, LOperand* right);
|
||||||
|
14
deps/v8/src/mips/lithium-mips.cc
vendored
14
deps/v8/src/mips/lithium-mips.cc
vendored
@ -1652,6 +1652,13 @@ LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
LInstruction* LChunkBuilder::DoCompareHoleAndBranch(
|
||||||
|
HCompareHoleAndBranch* instr) {
|
||||||
|
LOperand* object = UseRegisterAtStart(instr->object());
|
||||||
|
return new(zone()) LCmpHoleAndBranch(object);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
|
LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
|
||||||
ASSERT(instr->value()->representation().IsTagged());
|
ASSERT(instr->value()->representation().IsTagged());
|
||||||
LOperand* temp = TempRegister();
|
LOperand* temp = TempRegister();
|
||||||
@ -1778,13 +1785,6 @@ LInstruction* LChunkBuilder::DoBoundsCheckBaseIndexInformation(
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
|
|
||||||
// The control instruction marking the end of a block that completed
|
|
||||||
// abruptly (e.g., threw an exception). There is nothing specific to do.
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
|
LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
|
||||||
LOperand* value = UseFixed(instr->value(), a0);
|
LOperand* value = UseFixed(instr->value(), a0);
|
||||||
return MarkAsCall(new(zone()) LThrow(value), instr);
|
return MarkAsCall(new(zone()) LThrow(value), instr);
|
||||||
|
17
deps/v8/src/mips/lithium-mips.h
vendored
17
deps/v8/src/mips/lithium-mips.h
vendored
@ -74,6 +74,7 @@ class LCodeGen;
|
|||||||
V(ClassOfTestAndBranch) \
|
V(ClassOfTestAndBranch) \
|
||||||
V(CompareNumericAndBranch) \
|
V(CompareNumericAndBranch) \
|
||||||
V(CmpObjectEqAndBranch) \
|
V(CmpObjectEqAndBranch) \
|
||||||
|
V(CmpHoleAndBranch) \
|
||||||
V(CmpMapAndBranch) \
|
V(CmpMapAndBranch) \
|
||||||
V(CmpT) \
|
V(CmpT) \
|
||||||
V(ConstantD) \
|
V(ConstantD) \
|
||||||
@ -887,12 +888,24 @@ class LCmpObjectEqAndBranch: public LControlInstruction<2, 0> {
|
|||||||
LOperand* left() { return inputs_[0]; }
|
LOperand* left() { return inputs_[0]; }
|
||||||
LOperand* right() { return inputs_[1]; }
|
LOperand* right() { return inputs_[1]; }
|
||||||
|
|
||||||
DECLARE_CONCRETE_INSTRUCTION(CmpObjectEqAndBranch,
|
DECLARE_CONCRETE_INSTRUCTION(CmpObjectEqAndBranch, "cmp-object-eq-and-branch")
|
||||||
"cmp-object-eq-and-branch")
|
|
||||||
DECLARE_HYDROGEN_ACCESSOR(CompareObjectEqAndBranch)
|
DECLARE_HYDROGEN_ACCESSOR(CompareObjectEqAndBranch)
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
class LCmpHoleAndBranch: public LControlInstruction<1, 0> {
|
||||||
|
public:
|
||||||
|
explicit LCmpHoleAndBranch(LOperand* object) {
|
||||||
|
inputs_[0] = object;
|
||||||
|
}
|
||||||
|
|
||||||
|
LOperand* object() { return inputs_[0]; }
|
||||||
|
|
||||||
|
DECLARE_CONCRETE_INSTRUCTION(CmpHoleAndBranch, "cmp-hole-and-branch")
|
||||||
|
DECLARE_HYDROGEN_ACCESSOR(CompareHoleAndBranch)
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
class LIsObjectAndBranch: public LControlInstruction<1, 1> {
|
class LIsObjectAndBranch: public LControlInstruction<1, 1> {
|
||||||
public:
|
public:
|
||||||
LIsObjectAndBranch(LOperand* value, LOperand* temp) {
|
LIsObjectAndBranch(LOperand* value, LOperand* temp) {
|
||||||
|
10
deps/v8/src/mips/macro-assembler-mips.cc
vendored
10
deps/v8/src/mips/macro-assembler-mips.cc
vendored
@ -2944,12 +2944,15 @@ void MacroAssembler::Allocate(int object_size,
|
|||||||
|
|
||||||
if ((flags & DOUBLE_ALIGNMENT) != 0) {
|
if ((flags & DOUBLE_ALIGNMENT) != 0) {
|
||||||
// Align the next allocation. Storing the filler map without checking top is
|
// Align the next allocation. Storing the filler map without checking top is
|
||||||
// always safe because the limit of the heap is always aligned.
|
// safe in new-space because the limit of the heap is aligned there.
|
||||||
ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
|
ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
|
||||||
ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
|
ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
|
||||||
And(scratch2, result, Operand(kDoubleAlignmentMask));
|
And(scratch2, result, Operand(kDoubleAlignmentMask));
|
||||||
Label aligned;
|
Label aligned;
|
||||||
Branch(&aligned, eq, scratch2, Operand(zero_reg));
|
Branch(&aligned, eq, scratch2, Operand(zero_reg));
|
||||||
|
if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
|
||||||
|
Branch(gc_required, Ugreater_equal, result, Operand(t9));
|
||||||
|
}
|
||||||
li(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
|
li(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
|
||||||
sw(scratch2, MemOperand(result));
|
sw(scratch2, MemOperand(result));
|
||||||
Addu(result, result, Operand(kDoubleSize / 2));
|
Addu(result, result, Operand(kDoubleSize / 2));
|
||||||
@ -3028,12 +3031,15 @@ void MacroAssembler::Allocate(Register object_size,
|
|||||||
|
|
||||||
if ((flags & DOUBLE_ALIGNMENT) != 0) {
|
if ((flags & DOUBLE_ALIGNMENT) != 0) {
|
||||||
// Align the next allocation. Storing the filler map without checking top is
|
// Align the next allocation. Storing the filler map without checking top is
|
||||||
// always safe because the limit of the heap is always aligned.
|
// safe in new-space because the limit of the heap is aligned there.
|
||||||
ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
|
ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
|
||||||
ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
|
ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
|
||||||
And(scratch2, result, Operand(kDoubleAlignmentMask));
|
And(scratch2, result, Operand(kDoubleAlignmentMask));
|
||||||
Label aligned;
|
Label aligned;
|
||||||
Branch(&aligned, eq, scratch2, Operand(zero_reg));
|
Branch(&aligned, eq, scratch2, Operand(zero_reg));
|
||||||
|
if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
|
||||||
|
Branch(gc_required, Ugreater_equal, result, Operand(t9));
|
||||||
|
}
|
||||||
li(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
|
li(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
|
||||||
sw(scratch2, MemOperand(result));
|
sw(scratch2, MemOperand(result));
|
||||||
Addu(result, result, Operand(kDoubleSize / 2));
|
Addu(result, result, Operand(kDoubleSize / 2));
|
||||||
|
8
deps/v8/src/mips/macro-assembler-mips.h
vendored
8
deps/v8/src/mips/macro-assembler-mips.h
vendored
@ -235,6 +235,14 @@ class MacroAssembler: public Assembler {
|
|||||||
mfc1(dst_high, FPURegister::from_code(src.code() + 1));
|
mfc1(dst_high, FPURegister::from_code(src.code() + 1));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
inline void FmoveHigh(Register dst_high, FPURegister src) {
|
||||||
|
mfc1(dst_high, FPURegister::from_code(src.code() + 1));
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void FmoveLow(Register dst_low, FPURegister src) {
|
||||||
|
mfc1(dst_low, src);
|
||||||
|
}
|
||||||
|
|
||||||
inline void Move(FPURegister dst, Register src_low, Register src_high) {
|
inline void Move(FPURegister dst, Register src_low, Register src_high) {
|
||||||
mtc1(src_low, dst);
|
mtc1(src_low, dst);
|
||||||
mtc1(src_high, FPURegister::from_code(dst.code() + 1));
|
mtc1(src_high, FPURegister::from_code(dst.code() + 1));
|
||||||
|
4
deps/v8/src/objects-inl.h
vendored
4
deps/v8/src/objects-inl.h
vendored
@ -4071,8 +4071,8 @@ bool Code::is_inline_cache_stub() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
bool Code::is_debug_break() {
|
bool Code::is_debug_stub() {
|
||||||
return ic_state() == DEBUG_STUB && extra_ic_state() == DEBUG_BREAK;
|
return ic_state() == DEBUG_STUB;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
12
deps/v8/src/objects.cc
vendored
12
deps/v8/src/objects.cc
vendored
@ -3909,9 +3909,9 @@ MaybeObject* JSObject::SetPropertyForResult(LookupResult* lookup,
|
|||||||
lookup->holder()->MigrateToMap(Map::cast(back));
|
lookup->holder()->MigrateToMap(Map::cast(back));
|
||||||
if (maybe_failure->IsFailure()) return maybe_failure;
|
if (maybe_failure->IsFailure()) return maybe_failure;
|
||||||
}
|
}
|
||||||
DescriptorArray* desc = transition_map->instance_descriptors();
|
descriptors = transition_map->instance_descriptors();
|
||||||
int descriptor = transition_map->LastAdded();
|
representation =
|
||||||
representation = desc->GetDetails(descriptor).representation();
|
descriptors->GetDetails(descriptor).representation();
|
||||||
}
|
}
|
||||||
int field_index = descriptors->GetFieldIndex(descriptor);
|
int field_index = descriptors->GetFieldIndex(descriptor);
|
||||||
result = lookup->holder()->AddFastPropertyUsingMap(
|
result = lookup->holder()->AddFastPropertyUsingMap(
|
||||||
@ -4117,9 +4117,9 @@ MaybeObject* JSObject::SetLocalPropertyIgnoreAttributes(
|
|||||||
MaybeObject* maybe_failure = self->MigrateToMap(Map::cast(back));
|
MaybeObject* maybe_failure = self->MigrateToMap(Map::cast(back));
|
||||||
if (maybe_failure->IsFailure()) return maybe_failure;
|
if (maybe_failure->IsFailure()) return maybe_failure;
|
||||||
}
|
}
|
||||||
DescriptorArray* desc = transition_map->instance_descriptors();
|
descriptors = transition_map->instance_descriptors();
|
||||||
int descriptor = transition_map->LastAdded();
|
representation =
|
||||||
representation = desc->GetDetails(descriptor).representation();
|
descriptors->GetDetails(descriptor).representation();
|
||||||
}
|
}
|
||||||
int field_index = descriptors->GetFieldIndex(descriptor);
|
int field_index = descriptors->GetFieldIndex(descriptor);
|
||||||
result = self->AddFastPropertyUsingMap(
|
result = self->AddFastPropertyUsingMap(
|
||||||
|
8
deps/v8/src/objects.h
vendored
8
deps/v8/src/objects.h
vendored
@ -2719,7 +2719,11 @@ class JSObject: public JSReceiver {
|
|||||||
// don't want to be wasteful with long lived objects.
|
// don't want to be wasteful with long lived objects.
|
||||||
static const int kMaxUncheckedOldFastElementsLength = 500;
|
static const int kMaxUncheckedOldFastElementsLength = 500;
|
||||||
|
|
||||||
static const int kInitialMaxFastElementArray = 100000;
|
// TODO(2790): HAllocate currently always allocates fast backing stores
|
||||||
|
// in new space, where on x64 we can only fit ~98K elements. Keep this
|
||||||
|
// limit lower than that until HAllocate is made smarter.
|
||||||
|
static const int kInitialMaxFastElementArray = 95000;
|
||||||
|
|
||||||
static const int kFastPropertiesSoftLimit = 12;
|
static const int kFastPropertiesSoftLimit = 12;
|
||||||
static const int kMaxFastProperties = 64;
|
static const int kMaxFastProperties = 64;
|
||||||
static const int kMaxInstanceSize = 255 * kPointerSize;
|
static const int kMaxInstanceSize = 255 * kPointerSize;
|
||||||
@ -4909,7 +4913,7 @@ class Code: public HeapObject {
|
|||||||
|
|
||||||
// Testers for IC stub kinds.
|
// Testers for IC stub kinds.
|
||||||
inline bool is_inline_cache_stub();
|
inline bool is_inline_cache_stub();
|
||||||
inline bool is_debug_break();
|
inline bool is_debug_stub();
|
||||||
inline bool is_load_stub() { return kind() == LOAD_IC; }
|
inline bool is_load_stub() { return kind() == LOAD_IC; }
|
||||||
inline bool is_keyed_load_stub() { return kind() == KEYED_LOAD_IC; }
|
inline bool is_keyed_load_stub() { return kind() == KEYED_LOAD_IC; }
|
||||||
inline bool is_store_stub() { return kind() == STORE_IC; }
|
inline bool is_store_stub() { return kind() == STORE_IC; }
|
||||||
|
96
deps/v8/src/runtime.cc
vendored
96
deps/v8/src/runtime.cc
vendored
@ -8294,26 +8294,24 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InstallRecompiledCode) {
|
|||||||
|
|
||||||
class ActivationsFinder : public ThreadVisitor {
|
class ActivationsFinder : public ThreadVisitor {
|
||||||
public:
|
public:
|
||||||
explicit ActivationsFinder(JSFunction* function)
|
Code* code_;
|
||||||
: function_(function), has_activations_(false) {}
|
bool has_code_activations_;
|
||||||
|
|
||||||
|
explicit ActivationsFinder(Code* code)
|
||||||
|
: code_(code),
|
||||||
|
has_code_activations_(false) { }
|
||||||
|
|
||||||
void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
|
void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
|
||||||
if (has_activations_) return;
|
JavaScriptFrameIterator it(isolate, top);
|
||||||
|
VisitFrames(&it);
|
||||||
for (JavaScriptFrameIterator it(isolate, top); !it.done(); it.Advance()) {
|
|
||||||
JavaScriptFrame* frame = it.frame();
|
|
||||||
if (frame->is_optimized() && frame->function() == function_) {
|
|
||||||
has_activations_ = true;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool has_activations() { return has_activations_; }
|
void VisitFrames(JavaScriptFrameIterator* it) {
|
||||||
|
for (; !it->done(); it->Advance()) {
|
||||||
private:
|
JavaScriptFrame* frame = it->frame();
|
||||||
JSFunction* function_;
|
if (code_->contains(frame->pc())) has_code_activations_ = true;
|
||||||
bool has_activations_;
|
}
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
@ -8336,7 +8334,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NotifyDeoptimized) {
|
|||||||
Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
|
Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
|
||||||
ASSERT(AllowHeapAllocation::IsAllowed());
|
ASSERT(AllowHeapAllocation::IsAllowed());
|
||||||
|
|
||||||
ASSERT(deoptimizer->compiled_code_kind() == Code::OPTIMIZED_FUNCTION);
|
Handle<JSFunction> function = deoptimizer->function();
|
||||||
|
Handle<Code> optimized_code = deoptimizer->compiled_code();
|
||||||
|
|
||||||
|
ASSERT(optimized_code->kind() == Code::OPTIMIZED_FUNCTION);
|
||||||
|
ASSERT(type == deoptimizer->bailout_type());
|
||||||
|
|
||||||
// Make sure to materialize objects before causing any allocation.
|
// Make sure to materialize objects before causing any allocation.
|
||||||
JavaScriptFrameIterator it(isolate);
|
JavaScriptFrameIterator it(isolate);
|
||||||
@ -8345,10 +8347,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NotifyDeoptimized) {
|
|||||||
|
|
||||||
JavaScriptFrame* frame = it.frame();
|
JavaScriptFrame* frame = it.frame();
|
||||||
RUNTIME_ASSERT(frame->function()->IsJSFunction());
|
RUNTIME_ASSERT(frame->function()->IsJSFunction());
|
||||||
Handle<JSFunction> function(frame->function(), isolate);
|
|
||||||
Handle<Code> optimized_code(function->code());
|
|
||||||
RUNTIME_ASSERT((type != Deoptimizer::EAGER &&
|
|
||||||
type != Deoptimizer::SOFT) || function->IsOptimized());
|
|
||||||
|
|
||||||
// Avoid doing too much work when running with --always-opt and keep
|
// Avoid doing too much work when running with --always-opt and keep
|
||||||
// the optimized code around.
|
// the optimized code around.
|
||||||
@ -8356,33 +8354,24 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NotifyDeoptimized) {
|
|||||||
return isolate->heap()->undefined_value();
|
return isolate->heap()->undefined_value();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Find other optimized activations of the function or functions that
|
// Search for other activations of the same function and code.
|
||||||
// share the same optimized code.
|
ActivationsFinder activations_finder(*optimized_code);
|
||||||
bool has_other_activations = false;
|
activations_finder.VisitFrames(&it);
|
||||||
while (!it.done()) {
|
|
||||||
JavaScriptFrame* frame = it.frame();
|
|
||||||
JSFunction* other_function = frame->function();
|
|
||||||
if (frame->is_optimized() && other_function->code() == function->code()) {
|
|
||||||
has_other_activations = true;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
it.Advance();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!has_other_activations) {
|
|
||||||
ActivationsFinder activations_finder(*function);
|
|
||||||
isolate->thread_manager()->IterateArchivedThreads(&activations_finder);
|
isolate->thread_manager()->IterateArchivedThreads(&activations_finder);
|
||||||
has_other_activations = activations_finder.has_activations();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!has_other_activations) {
|
if (!activations_finder.has_code_activations_) {
|
||||||
|
if (function->code() == *optimized_code) {
|
||||||
if (FLAG_trace_deopt) {
|
if (FLAG_trace_deopt) {
|
||||||
PrintF("[removing optimized code for: ");
|
PrintF("[removing optimized code for: ");
|
||||||
function->PrintName();
|
function->PrintName();
|
||||||
PrintF("]\n");
|
PrintF("]\n");
|
||||||
}
|
}
|
||||||
function->ReplaceCode(function->shared()->code());
|
function->ReplaceCode(function->shared()->code());
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
|
// TODO(titzer): we should probably do DeoptimizeCodeList(code)
|
||||||
|
// unconditionally if the code is not already marked for deoptimization.
|
||||||
|
// If there is an index by shared function info, all the better.
|
||||||
Deoptimizer::DeoptimizeFunction(*function);
|
Deoptimizer::DeoptimizeFunction(*function);
|
||||||
}
|
}
|
||||||
// Evict optimized code for this function from the cache so that it doesn't
|
// Evict optimized code for this function from the cache so that it doesn't
|
||||||
@ -8635,6 +8624,19 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
RUNTIME_FUNCTION(MaybeObject*, Runtime_SetAllocationTimeout) {
|
||||||
|
SealHandleScope shs(isolate);
|
||||||
|
ASSERT(args.length() == 2);
|
||||||
|
#ifdef DEBUG
|
||||||
|
CONVERT_SMI_ARG_CHECKED(interval, 0);
|
||||||
|
CONVERT_SMI_ARG_CHECKED(timeout, 1);
|
||||||
|
isolate->heap()->set_allocation_timeout(timeout);
|
||||||
|
FLAG_gc_interval = interval;
|
||||||
|
#endif
|
||||||
|
return isolate->heap()->undefined_value();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
RUNTIME_FUNCTION(MaybeObject*, Runtime_CheckIsBootstrapping) {
|
RUNTIME_FUNCTION(MaybeObject*, Runtime_CheckIsBootstrapping) {
|
||||||
SealHandleScope shs(isolate);
|
SealHandleScope shs(isolate);
|
||||||
RUNTIME_ASSERT(isolate->bootstrapper()->IsActive());
|
RUNTIME_ASSERT(isolate->bootstrapper()->IsActive());
|
||||||
@ -13639,7 +13641,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InternalDateFormat) {
|
|||||||
CONVERT_ARG_HANDLE_CHECKED(JSDate, date, 1);
|
CONVERT_ARG_HANDLE_CHECKED(JSDate, date, 1);
|
||||||
|
|
||||||
bool has_pending_exception = false;
|
bool has_pending_exception = false;
|
||||||
double millis = Execution::ToNumber(date, &has_pending_exception)->Number();
|
Handle<Object> value = Execution::ToNumber(date, &has_pending_exception);
|
||||||
if (has_pending_exception) {
|
if (has_pending_exception) {
|
||||||
ASSERT(isolate->has_pending_exception());
|
ASSERT(isolate->has_pending_exception());
|
||||||
return Failure::Exception();
|
return Failure::Exception();
|
||||||
@ -13650,7 +13652,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InternalDateFormat) {
|
|||||||
if (!date_format) return isolate->ThrowIllegalOperation();
|
if (!date_format) return isolate->ThrowIllegalOperation();
|
||||||
|
|
||||||
icu::UnicodeString result;
|
icu::UnicodeString result;
|
||||||
date_format->format(millis, result);
|
date_format->format(value->Number(), result);
|
||||||
|
|
||||||
return *isolate->factory()->NewStringFromTwoByte(
|
return *isolate->factory()->NewStringFromTwoByte(
|
||||||
Vector<const uint16_t>(
|
Vector<const uint16_t>(
|
||||||
@ -13743,7 +13745,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InternalNumberFormat) {
|
|||||||
CONVERT_ARG_HANDLE_CHECKED(Object, number, 1);
|
CONVERT_ARG_HANDLE_CHECKED(Object, number, 1);
|
||||||
|
|
||||||
bool has_pending_exception = false;
|
bool has_pending_exception = false;
|
||||||
double value = Execution::ToNumber(number, &has_pending_exception)->Number();
|
Handle<Object> value = Execution::ToNumber(number, &has_pending_exception);
|
||||||
if (has_pending_exception) {
|
if (has_pending_exception) {
|
||||||
ASSERT(isolate->has_pending_exception());
|
ASSERT(isolate->has_pending_exception());
|
||||||
return Failure::Exception();
|
return Failure::Exception();
|
||||||
@ -13754,7 +13756,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InternalNumberFormat) {
|
|||||||
if (!number_format) return isolate->ThrowIllegalOperation();
|
if (!number_format) return isolate->ThrowIllegalOperation();
|
||||||
|
|
||||||
icu::UnicodeString result;
|
icu::UnicodeString result;
|
||||||
number_format->format(value, result);
|
number_format->format(value->Number(), result);
|
||||||
|
|
||||||
return *isolate->factory()->NewStringFromTwoByte(
|
return *isolate->factory()->NewStringFromTwoByte(
|
||||||
Vector<const uint16_t>(
|
Vector<const uint16_t>(
|
||||||
@ -13993,6 +13995,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FlattenString) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
RUNTIME_FUNCTION(MaybeObject*, Runtime_NotifyContextDisposed) {
|
||||||
|
HandleScope scope(isolate);
|
||||||
|
ASSERT(args.length() == 0);
|
||||||
|
isolate->heap()->NotifyContextDisposed();
|
||||||
|
return isolate->heap()->undefined_value();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
RUNTIME_FUNCTION(MaybeObject*, Runtime_MigrateInstance) {
|
RUNTIME_FUNCTION(MaybeObject*, Runtime_MigrateInstance) {
|
||||||
HandleScope scope(isolate);
|
HandleScope scope(isolate);
|
||||||
ASSERT(args.length() == 1);
|
ASSERT(args.length() == 1);
|
||||||
|
2
deps/v8/src/runtime.h
vendored
2
deps/v8/src/runtime.h
vendored
@ -101,6 +101,7 @@ namespace internal {
|
|||||||
F(GetOptimizationStatus, -1, 1) \
|
F(GetOptimizationStatus, -1, 1) \
|
||||||
F(GetOptimizationCount, 1, 1) \
|
F(GetOptimizationCount, 1, 1) \
|
||||||
F(CompileForOnStackReplacement, 1, 1) \
|
F(CompileForOnStackReplacement, 1, 1) \
|
||||||
|
F(SetAllocationTimeout, 2, 1) \
|
||||||
F(AllocateInNewSpace, 1, 1) \
|
F(AllocateInNewSpace, 1, 1) \
|
||||||
F(AllocateInOldPointerSpace, 1, 1) \
|
F(AllocateInOldPointerSpace, 1, 1) \
|
||||||
F(AllocateInOldDataSpace, 1, 1) \
|
F(AllocateInOldDataSpace, 1, 1) \
|
||||||
@ -110,6 +111,7 @@ namespace internal {
|
|||||||
F(DebugPrepareStepInIfStepping, 1, 1) \
|
F(DebugPrepareStepInIfStepping, 1, 1) \
|
||||||
F(FlattenString, 1, 1) \
|
F(FlattenString, 1, 1) \
|
||||||
F(MigrateInstance, 1, 1) \
|
F(MigrateInstance, 1, 1) \
|
||||||
|
F(NotifyContextDisposed, 0, 1) \
|
||||||
\
|
\
|
||||||
/* Array join support */ \
|
/* Array join support */ \
|
||||||
F(PushIfAbsent, 2, 1) \
|
F(PushIfAbsent, 2, 1) \
|
||||||
|
2
deps/v8/src/version.cc
vendored
2
deps/v8/src/version.cc
vendored
@ -35,7 +35,7 @@
|
|||||||
#define MAJOR_VERSION 3
|
#define MAJOR_VERSION 3
|
||||||
#define MINOR_VERSION 20
|
#define MINOR_VERSION 20
|
||||||
#define BUILD_NUMBER 17
|
#define BUILD_NUMBER 17
|
||||||
#define PATCH_LEVEL 0
|
#define PATCH_LEVEL 7
|
||||||
// Use 1 for candidates and 0 otherwise.
|
// Use 1 for candidates and 0 otherwise.
|
||||||
// (Boolean macro values are not supported by all preprocessors.)
|
// (Boolean macro values are not supported by all preprocessors.)
|
||||||
#define IS_CANDIDATE_VERSION 0
|
#define IS_CANDIDATE_VERSION 0
|
||||||
|
2
deps/v8/src/x64/code-stubs-x64.cc
vendored
2
deps/v8/src/x64/code-stubs-x64.cc
vendored
@ -3568,6 +3568,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
|
|||||||
{
|
{
|
||||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||||
|
|
||||||
|
__ Integer32ToSmi(rax, rax);
|
||||||
__ push(rax);
|
__ push(rax);
|
||||||
__ push(rdi);
|
__ push(rdi);
|
||||||
__ push(rbx);
|
__ push(rbx);
|
||||||
@ -3578,6 +3579,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
|
|||||||
__ pop(rbx);
|
__ pop(rbx);
|
||||||
__ pop(rdi);
|
__ pop(rdi);
|
||||||
__ pop(rax);
|
__ pop(rax);
|
||||||
|
__ SmiToInteger32(rax, rax);
|
||||||
}
|
}
|
||||||
__ jmp(&done);
|
__ jmp(&done);
|
||||||
|
|
||||||
|
1
deps/v8/src/x64/lithium-codegen-x64.cc
vendored
1
deps/v8/src/x64/lithium-codegen-x64.cc
vendored
@ -1216,6 +1216,7 @@ void LCodeGen::DoDivI(LDivI* instr) {
|
|||||||
__ cmpl(dividend, Immediate(0));
|
__ cmpl(dividend, Immediate(0));
|
||||||
__ j(less, &negative, Label::kNear);
|
__ j(less, &negative, Label::kNear);
|
||||||
__ sarl(dividend, Immediate(power));
|
__ sarl(dividend, Immediate(power));
|
||||||
|
if (divisor < 0) __ negl(dividend);
|
||||||
__ jmp(&done, Label::kNear);
|
__ jmp(&done, Label::kNear);
|
||||||
|
|
||||||
__ bind(&negative);
|
__ bind(&negative);
|
||||||
|
7
deps/v8/src/x64/lithium-x64.cc
vendored
7
deps/v8/src/x64/lithium-x64.cc
vendored
@ -1778,13 +1778,6 @@ LInstruction* LChunkBuilder::DoBoundsCheckBaseIndexInformation(
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
|
|
||||||
// The control instruction marking the end of a block that completed
|
|
||||||
// abruptly (e.g., threw an exception). There is nothing specific to do.
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
|
LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
|
||||||
LOperand* value = UseFixed(instr->value(), rax);
|
LOperand* value = UseFixed(instr->value(), rax);
|
||||||
return MarkAsCall(new(zone()) LThrow(value), instr);
|
return MarkAsCall(new(zone()) LThrow(value), instr);
|
||||||
|
4
deps/v8/src/x64/macro-assembler-x64.cc
vendored
4
deps/v8/src/x64/macro-assembler-x64.cc
vendored
@ -3859,7 +3859,7 @@ void MacroAssembler::Allocate(int object_size,
|
|||||||
LoadAllocationTopHelper(result, scratch, flags);
|
LoadAllocationTopHelper(result, scratch, flags);
|
||||||
|
|
||||||
// Align the next allocation. Storing the filler map without checking top is
|
// Align the next allocation. Storing the filler map without checking top is
|
||||||
// always safe because the limit of the heap is always aligned.
|
// safe in new-space because the limit of the heap is aligned there.
|
||||||
if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
|
if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
|
||||||
testq(result, Immediate(kDoubleAlignmentMask));
|
testq(result, Immediate(kDoubleAlignmentMask));
|
||||||
Check(zero, kAllocationIsNotDoubleAligned);
|
Check(zero, kAllocationIsNotDoubleAligned);
|
||||||
@ -3938,7 +3938,7 @@ void MacroAssembler::Allocate(Register object_size,
|
|||||||
LoadAllocationTopHelper(result, scratch, flags);
|
LoadAllocationTopHelper(result, scratch, flags);
|
||||||
|
|
||||||
// Align the next allocation. Storing the filler map without checking top is
|
// Align the next allocation. Storing the filler map without checking top is
|
||||||
// always safe because the limit of the heap is always aligned.
|
// safe in new-space because the limit of the heap is aligned there.
|
||||||
if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
|
if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
|
||||||
testq(result, Immediate(kDoubleAlignmentMask));
|
testq(result, Immediate(kDoubleAlignmentMask));
|
||||||
Check(zero, kAllocationIsNotDoubleAligned);
|
Check(zero, kAllocationIsNotDoubleAligned);
|
||||||
|
65
deps/v8/test/mjsunit/compiler/regress-shared-deopt.js
vendored
Normal file
65
deps/v8/test/mjsunit/compiler/regress-shared-deopt.js
vendored
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
// Copyright 2013 the V8 project authors. All rights reserved.
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following
|
||||||
|
// disclaimer in the documentation and/or other materials provided
|
||||||
|
// with the distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived
|
||||||
|
// from this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
// Flags: --allow-natives-syntax
|
||||||
|
|
||||||
|
var soft = false;
|
||||||
|
|
||||||
|
// disable optimization of this global
|
||||||
|
soft = true;
|
||||||
|
soft = false;
|
||||||
|
soft = true;
|
||||||
|
soft = false;
|
||||||
|
|
||||||
|
function test() {
|
||||||
|
var f4 = makeF(4);
|
||||||
|
var f5 = makeF(5);
|
||||||
|
|
||||||
|
function makeF(i) {
|
||||||
|
return function f(x) {
|
||||||
|
if (x == 0) return i;
|
||||||
|
if (i == 4) if (soft) print("wahoo" + i);
|
||||||
|
return f4(x - 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
f4(9);
|
||||||
|
f4(11);
|
||||||
|
%OptimizeFunctionOnNextCall(f4);
|
||||||
|
f4(12);
|
||||||
|
|
||||||
|
f5(9);
|
||||||
|
f5(11);
|
||||||
|
%OptimizeFunctionOnNextCall(f5);
|
||||||
|
f5(12);
|
||||||
|
|
||||||
|
soft = true;
|
||||||
|
f4(1);
|
||||||
|
f5(9);
|
||||||
|
}
|
||||||
|
|
||||||
|
test();
|
38
deps/v8/test/mjsunit/compiler/type-feedback-after-throw.js
vendored
Normal file
38
deps/v8/test/mjsunit/compiler/type-feedback-after-throw.js
vendored
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
// Copyright 2013 the V8 project authors. All rights reserved.
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following
|
||||||
|
// disclaimer in the documentation and/or other materials provided
|
||||||
|
// with the distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived
|
||||||
|
// from this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
// Flags: --allow-natives-syntax
|
||||||
|
|
||||||
|
function foo() {
|
||||||
|
throw "Error";
|
||||||
|
return 1 > 5;
|
||||||
|
};
|
||||||
|
|
||||||
|
try { foo() } catch(e) {}
|
||||||
|
try { foo() } catch(e) {}
|
||||||
|
%OptimizeFunctionOnNextCall(foo);
|
||||||
|
try { foo() } catch(e) {}
|
40
deps/v8/test/mjsunit/regress/consolidated-holey-load.js
vendored
Normal file
40
deps/v8/test/mjsunit/regress/consolidated-holey-load.js
vendored
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
// Copyright 2013 the V8 project authors. All rights reserved.
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following
|
||||||
|
// disclaimer in the documentation and/or other materials provided
|
||||||
|
// with the distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived
|
||||||
|
// from this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
// Flags: --allow-natives-syntax
|
||||||
|
|
||||||
|
function foo(array) {
|
||||||
|
return array[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
var a = [1, 2, , 4]; // Holey Smi elements.
|
||||||
|
var b = ["abcd", 0]; // Fast elements.
|
||||||
|
foo(b); // Observe fast elements first, or the IC will transition without
|
||||||
|
foo(a); // going polymorphic.
|
||||||
|
%OptimizeFunctionOnNextCall(foo);
|
||||||
|
var c = [, 0];
|
||||||
|
assertEquals(undefined, foo(c)); // Elided hole check will leak the hole.
|
54
deps/v8/test/mjsunit/regress/debug-prepare-step-in.js
vendored
Normal file
54
deps/v8/test/mjsunit/regress/debug-prepare-step-in.js
vendored
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
// Copyright 2013 the V8 project authors. All rights reserved.
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following
|
||||||
|
// disclaimer in the documentation and/or other materials provided
|
||||||
|
// with the distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived
|
||||||
|
// from this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
// Flags: --expose-debug-as debug --allow-natives-syntax --expose-gc
|
||||||
|
// Get the Debug object exposed from the debug context global object.
|
||||||
|
Debug = debug.Debug
|
||||||
|
|
||||||
|
function breakListener(event, exec_state, event_data, data) {
|
||||||
|
exec_state.prepareStep(Debug.StepAction.StepIn, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
Debug.setListener(breakListener);
|
||||||
|
|
||||||
|
var o = {x:function() { return 10; }};
|
||||||
|
|
||||||
|
function f(o) {
|
||||||
|
var m = "x";
|
||||||
|
o[m]();
|
||||||
|
}
|
||||||
|
|
||||||
|
Debug.setBreakPoint(f, 2, 0);
|
||||||
|
|
||||||
|
f(o);
|
||||||
|
|
||||||
|
%NotifyContextDisposed();
|
||||||
|
function g() {
|
||||||
|
gc();
|
||||||
|
}
|
||||||
|
|
||||||
|
g();
|
31
deps/v8/test/mjsunit/regress/regress-2790.js
vendored
Normal file
31
deps/v8/test/mjsunit/regress/regress-2790.js
vendored
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
// Copyright 2013 the V8 project authors. All rights reserved.
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following
|
||||||
|
// disclaimer in the documentation and/or other materials provided
|
||||||
|
// with the distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived
|
||||||
|
// from this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
// Test that we can create arrays of any size.
|
||||||
|
for (var i = 1000; i < 1000000; i += 97) {
|
||||||
|
new Array(i);
|
||||||
|
}
|
45
deps/v8/test/mjsunit/regress/regress-2843.js
vendored
Normal file
45
deps/v8/test/mjsunit/regress/regress-2843.js
vendored
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
// Copyright 2013 the V8 project authors. All rights reserved.
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following
|
||||||
|
// disclaimer in the documentation and/or other materials provided
|
||||||
|
// with the distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived
|
||||||
|
// from this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
// Flags: --allow-natives-syntax
|
||||||
|
|
||||||
|
function bailout() { throw "bailout"; }
|
||||||
|
var global;
|
||||||
|
|
||||||
|
function foo(x, fun) {
|
||||||
|
var a = x + 1;
|
||||||
|
var b = x + 2; // Need another Simulate to fold the first one into.
|
||||||
|
global = true; // Need a side effect to deopt to.
|
||||||
|
fun();
|
||||||
|
return a;
|
||||||
|
}
|
||||||
|
|
||||||
|
assertThrows("foo(1, bailout)");
|
||||||
|
assertThrows("foo(1, bailout)");
|
||||||
|
%OptimizeFunctionOnNextCall(foo);
|
||||||
|
assertThrows("foo(1, bailout)");
|
||||||
|
assertEquals(2, foo(1, function() {}));
|
43
deps/v8/test/mjsunit/regress/regress-crbug-274438.js
vendored
Normal file
43
deps/v8/test/mjsunit/regress/regress-crbug-274438.js
vendored
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
// Copyright 2013 the V8 project authors. All rights reserved.
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following
|
||||||
|
// disclaimer in the documentation and/or other materials provided
|
||||||
|
// with the distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived
|
||||||
|
// from this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
// Flags: --allow-natives-syntax
|
||||||
|
|
||||||
|
function f(a, b) {
|
||||||
|
var x = { a:a };
|
||||||
|
switch(b) { case "string": }
|
||||||
|
var y = { b:b };
|
||||||
|
return y;
|
||||||
|
}
|
||||||
|
|
||||||
|
f("a", "b");
|
||||||
|
f("a", "b");
|
||||||
|
%OptimizeFunctionOnNextCall(f);
|
||||||
|
f("a", "b");
|
||||||
|
%SetAllocationTimeout(100, 0);
|
||||||
|
var killer = f("bang", "bo" + "om");
|
||||||
|
assertEquals("boom", killer.b);
|
47
deps/v8/test/mjsunit/regress/regress-crbug-280333.js
vendored
Normal file
47
deps/v8/test/mjsunit/regress/regress-crbug-280333.js
vendored
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
// Copyright 2013 the V8 project authors. All rights reserved.
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following
|
||||||
|
// disclaimer in the documentation and/or other materials provided
|
||||||
|
// with the distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived
|
||||||
|
// from this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
// Flags: --allow-natives-syntax
|
||||||
|
|
||||||
|
function funky() { return false; }
|
||||||
|
var global;
|
||||||
|
|
||||||
|
function foo(x, fun) {
|
||||||
|
var a = x + 1;
|
||||||
|
var b = x + 2; // Need another Simulate to fold the first one into.
|
||||||
|
global = true; // Need a side effect to deopt to.
|
||||||
|
if (fun()) {
|
||||||
|
return a;
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
assertEquals(0, foo(1, funky));
|
||||||
|
assertEquals(0, foo(1, funky));
|
||||||
|
%OptimizeFunctionOnNextCall(foo);
|
||||||
|
assertEquals(0, foo(1, funky));
|
||||||
|
assertEquals(2, foo(1, function() { return true; }));
|
59
deps/v8/test/mjsunit/regress/regress-crbug-282736.js
vendored
Normal file
59
deps/v8/test/mjsunit/regress/regress-crbug-282736.js
vendored
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
// Copyright 2013 the V8 project authors. All rights reserved.
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following
|
||||||
|
// disclaimer in the documentation and/or other materials provided
|
||||||
|
// with the distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived
|
||||||
|
// from this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
function funcify(obj) {
|
||||||
|
var type = typeof obj;
|
||||||
|
if (type === "object") {
|
||||||
|
var funcified = {}, foo = {};
|
||||||
|
for (var prop in obj) {
|
||||||
|
funcified[prop] = funcify(obj[prop]);
|
||||||
|
foo[prop] = true;
|
||||||
|
}
|
||||||
|
return funcified;
|
||||||
|
} else if (type === "function") {
|
||||||
|
return obj;
|
||||||
|
} else {
|
||||||
|
return function () { return obj; };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var obj = {};
|
||||||
|
|
||||||
|
obj.A = 1;
|
||||||
|
obj.B = function () { return 2; };
|
||||||
|
obj.C = 3;
|
||||||
|
obj.D = 4;
|
||||||
|
|
||||||
|
var funcified = funcify(obj);
|
||||||
|
|
||||||
|
assertEquals("function", typeof funcified.A);
|
||||||
|
assertEquals(1, funcified.A());
|
||||||
|
assertEquals("function", typeof funcified.B);
|
||||||
|
assertEquals(2, funcified.B());
|
||||||
|
assertEquals("function", typeof funcified.C);
|
||||||
|
assertEquals("function", typeof funcified.D);
|
||||||
|
assertEquals(4, funcified.D());
|
52
deps/v8/test/mjsunit/shift-for-integer-div.js
vendored
52
deps/v8/test/mjsunit/shift-for-integer-div.js
vendored
@ -25,35 +25,63 @@
|
|||||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
// Flags: --allow-natives-syntax
|
||||||
|
|
||||||
function divp4(x) {
|
function divp4(x) {
|
||||||
return x / 4;
|
return x / 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (var i = 0; i < 10000; i+=4) {
|
divp4(8);
|
||||||
assertEquals(i >> 2, divp4(i));
|
divp4(8);
|
||||||
}
|
%OptimizeFunctionOnNextCall(divp4);
|
||||||
|
assertEquals(2, divp4(8));
|
||||||
assertEquals(0.5, divp4(2));
|
assertEquals(0.5, divp4(2));
|
||||||
|
|
||||||
|
|
||||||
function divn4(x) {
|
function divn4(x) {
|
||||||
return x / (-4);
|
return x / (-4);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (var i = 0; i < 10000; i+=4) {
|
divn4(8);
|
||||||
assertEquals(-(i >> 2), divn4(i));
|
divn4(8);
|
||||||
}
|
%OptimizeFunctionOnNextCall(divn4);
|
||||||
|
assertEquals(-2, divn4(8));
|
||||||
|
// Check for (0 / -x)
|
||||||
assertEquals(-0, divn4(0));
|
assertEquals(-0, divn4(0));
|
||||||
|
|
||||||
|
|
||||||
|
// Check for (kMinInt / -1)
|
||||||
function divn1(x) {
|
function divn1(x) {
|
||||||
return x / (-1);
|
return x / (-1);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (var i = 0; i < 10000; i++) {
|
var two_31 = 1 << 31;
|
||||||
assertEquals(-i, divn1(i));
|
divn1(2);
|
||||||
|
divn1(2);
|
||||||
|
%OptimizeFunctionOnNextCall(divn1);
|
||||||
|
assertEquals(-2, divn1(2));
|
||||||
|
assertEquals(two_31, divn1(-two_31));
|
||||||
|
|
||||||
|
|
||||||
|
//Check for truncating to int32 case
|
||||||
|
function divp4t(x) {
|
||||||
|
return (x / 4) | 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
var min_int = -(0x7FFFFFFF)-1;
|
divp4t(8);
|
||||||
assertEquals(-min_int, divn1(min_int));
|
divp4t(8);
|
||||||
|
%OptimizeFunctionOnNextCall(divp4t);
|
||||||
|
assertEquals(-1, divp4t(-5));
|
||||||
|
assertEquals(1, divp4t(5));
|
||||||
|
assertOptimized(divp4t);
|
||||||
|
|
||||||
|
function divn4t(x) {
|
||||||
|
return (x / -4) | 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
divn4t(8);
|
||||||
|
divn4t(8);
|
||||||
|
%OptimizeFunctionOnNextCall(divn4t);
|
||||||
|
assertEquals(1, divn4t(-5));
|
||||||
|
assertEquals(-1, divn4t(5));
|
||||||
|
assertOptimized(divn4t);
|
||||||
|
Loading…
x
Reference in New Issue
Block a user