deps: update v8 to 3.20.17.7

This commit is contained in:
Fedor Indutny 2013-09-18 20:33:28 +04:00
parent 59a075e108
commit a1cf3ada62
47 changed files with 776 additions and 243 deletions

View File

@ -4430,6 +4430,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(r0);
__ push(r0);
__ push(r1);
__ push(r2);
@ -4440,6 +4441,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
__ pop(r2);
__ pop(r1);
__ pop(r0);
__ SmiUntag(r0);
}
__ b(&done);

View File

@ -1866,13 +1866,6 @@ LInstruction* LChunkBuilder::DoBoundsCheckBaseIndexInformation(
}
LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
// The control instruction marking the end of a block that completed
// abruptly (e.g., threw an exception). There is nothing specific to do.
return NULL;
}
LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
LOperand* value = UseFixed(instr->value(), r0);
return MarkAsCall(new(zone()) LThrow(value), instr);

View File

@ -1398,6 +1398,7 @@ void LCodeGen::DoDivI(LDivI* instr) {
__ rsb(dividend, dividend, Operand(0), LeaveCC, lt);
__ mov(dividend, Operand(dividend, ASR, power));
if (divisor > 0) __ rsb(dividend, dividend, Operand(0), LeaveCC, lt);
if (divisor < 0) __ rsb(dividend, dividend, Operand(0), LeaveCC, gt);
return; // Don't fall through to "__ rsb" below.
} else {
// Deoptimize if remainder is not 0.

View File

@ -1732,12 +1732,16 @@ void MacroAssembler::Allocate(int object_size,
if ((flags & DOUBLE_ALIGNMENT) != 0) {
// Align the next allocation. Storing the filler map without checking top is
// always safe because the limit of the heap is always aligned.
// safe in new-space because the limit of the heap is aligned there.
ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
and_(scratch2, result, Operand(kDoubleAlignmentMask), SetCC);
Label aligned;
b(eq, &aligned);
if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
cmp(result, Operand(ip));
b(hs, gc_required);
}
mov(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
str(scratch2, MemOperand(result, kDoubleSize / 2, PostIndex));
bind(&aligned);
@ -1830,12 +1834,16 @@ void MacroAssembler::Allocate(Register object_size,
if ((flags & DOUBLE_ALIGNMENT) != 0) {
// Align the next allocation. Storing the filler map without checking top is
// always safe because the limit of the heap is always aligned.
// safe in new-space because the limit of the heap is aligned there.
ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
and_(scratch2, result, Operand(kDoubleAlignmentMask), SetCC);
Label aligned;
b(eq, &aligned);
if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
cmp(result, Operand(ip));
b(hs, gc_required);
}
mov(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
str(scratch2, MemOperand(result, kDoubleSize / 2, PostIndex));
bind(&aligned);

4
deps/v8/src/ast.cc vendored
View File

@ -708,7 +708,9 @@ void AstVisitor::VisitDeclarations(ZoneList<Declaration*>* declarations) {
void AstVisitor::VisitStatements(ZoneList<Statement*>* statements) {
for (int i = 0; i < statements->length(); i++) {
Visit(statements->at(i));
Statement* stmt = statements->at(i);
Visit(stmt);
if (stmt->IsJump()) break;
}
}

View File

@ -1627,7 +1627,7 @@ bool Debug::StepNextContinue(BreakLocationIterator* break_location_iterator,
// object.
bool Debug::IsDebugBreak(Address addr) {
Code* code = Code::GetCodeFromTargetAddress(addr);
return code->is_debug_break();
return code->is_debug_stub() && code->extra_ic_state() == DEBUG_BREAK;
}

View File

@ -166,7 +166,9 @@ class Deoptimizer : public Malloced {
int output_count() const { return output_count_; }
Code::Kind compiled_code_kind() const { return compiled_code_->kind(); }
Handle<JSFunction> function() const { return Handle<JSFunction>(function_); }
Handle<Code> compiled_code() const { return Handle<Code>(compiled_code_); }
BailoutType bailout_type() const { return bailout_type_; }
// Number of created JS frames. Not all created frames are necessarily JS.
int jsframe_count() const { return jsframe_count_; }

View File

@ -439,6 +439,43 @@ AllocationSpace Heap::TargetSpaceId(InstanceType type) {
}
bool Heap::AllowedToBeMigrated(HeapObject* object, AllocationSpace dst) {
// Object migration is governed by the following rules:
//
// 1) Objects in new-space can be migrated to one of the old spaces
// that matches their target space or they stay in new-space.
// 2) Objects in old-space stay in the same space when migrating.
// 3) Fillers (two or more words) can migrate due to left-trimming of
// fixed arrays in new-space, old-data-space and old-pointer-space.
// 4) Fillers (one word) can never migrate, they are skipped by
// incremental marking explicitly to prevent invalid pattern.
//
// Since this function is used for debugging only, we do not place
// asserts here, but check everything explicitly.
if (object->map() == one_pointer_filler_map()) return false;
InstanceType type = object->map()->instance_type();
MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
AllocationSpace src = chunk->owner()->identity();
switch (src) {
case NEW_SPACE:
return dst == src || dst == TargetSpaceId(type);
case OLD_POINTER_SPACE:
return dst == src && (dst == TargetSpaceId(type) || object->IsFiller());
case OLD_DATA_SPACE:
return dst == src && dst == TargetSpaceId(type);
case CODE_SPACE:
return dst == src && type == CODE_TYPE;
case MAP_SPACE:
case CELL_SPACE:
case PROPERTY_CELL_SPACE:
case LO_SPACE:
return false;
}
UNREACHABLE();
return false;
}
void Heap::CopyBlock(Address dst, Address src, int byte_size) {
CopyWords(reinterpret_cast<Object**>(dst),
reinterpret_cast<Object**>(src),

15
deps/v8/src/heap.cc vendored
View File

@ -2088,10 +2088,13 @@ class ScavengingVisitor : public StaticVisitorBase {
MaybeObject* maybe_result;
if (object_contents == DATA_OBJECT) {
// TODO(mstarzinger): Turn this check into a regular assert soon!
CHECK(heap->AllowedToBeMigrated(object, OLD_DATA_SPACE));
maybe_result = heap->old_data_space()->AllocateRaw(allocation_size);
} else {
maybe_result =
heap->old_pointer_space()->AllocateRaw(allocation_size);
// TODO(mstarzinger): Turn this check into a regular assert soon!
CHECK(heap->AllowedToBeMigrated(object, OLD_POINTER_SPACE));
maybe_result = heap->old_pointer_space()->AllocateRaw(allocation_size);
}
Object* result = NULL; // Initialization to please compiler.
@ -2121,6 +2124,8 @@ class ScavengingVisitor : public StaticVisitorBase {
return;
}
}
// TODO(mstarzinger): Turn this check into a regular assert soon!
CHECK(heap->AllowedToBeMigrated(object, NEW_SPACE));
MaybeObject* allocation = heap->new_space()->AllocateRaw(allocation_size);
heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
Object* result = allocation->ToObjectUnchecked();
@ -4013,10 +4018,10 @@ MaybeObject* Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
return AllocateByteArray(length);
}
int size = ByteArray::SizeFor(length);
AllocationSpace space =
(size > Page::kMaxNonCodeHeapObjectSize) ? LO_SPACE : OLD_DATA_SPACE;
Object* result;
{ MaybeObject* maybe_result = (size <= Page::kMaxNonCodeHeapObjectSize)
? old_data_space_->AllocateRaw(size)
: lo_space_->AllocateRaw(size, NOT_EXECUTABLE);
{ MaybeObject* maybe_result = AllocateRaw(size, space, space);
if (!maybe_result->ToObject(&result)) return maybe_result;
}

8
deps/v8/src/heap.h vendored
View File

@ -1391,6 +1391,10 @@ class Heap {
inline OldSpace* TargetSpace(HeapObject* object);
static inline AllocationSpace TargetSpaceId(InstanceType type);
// Checks whether the given object is allowed to be migrated from it's
// current space into the given destination space. Used for debugging.
inline bool AllowedToBeMigrated(HeapObject* object, AllocationSpace dest);
// Sets the stub_cache_ (only used when expanding the dictionary).
void public_set_code_stubs(UnseededNumberDictionary* value) {
roots_[kCodeStubsRootIndex] = value;
@ -1490,6 +1494,10 @@ class Heap {
inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
#ifdef DEBUG
void set_allocation_timeout(int timeout) {
allocation_timeout_ = timeout;
}
bool disallow_allocation_failure() {
return disallow_allocation_failure_;
}

View File

@ -163,11 +163,7 @@ void HEnvironmentLivenessAnalysisPhase::UpdateLivenessAtInstruction(
live->Clear();
for (int i = 0; i < enter->return_targets()->length(); ++i) {
int return_id = enter->return_targets()->at(i)->block_id();
// When an AbnormalExit is involved, it can happen that the return
// target block doesn't actually exist.
if (return_id < live_at_block_start_.length()) {
live->Union(*live_at_block_start_[return_id]);
}
live->Union(*live_at_block_start_[return_id]);
}
last_simulate_ = NULL;
break;

View File

@ -63,7 +63,6 @@ class LChunkBuilder;
#define HYDROGEN_CONCRETE_INSTRUCTION_LIST(V) \
V(AbnormalExit) \
V(AccessArgumentsAt) \
V(Add) \
V(Allocate) \
@ -1426,16 +1425,6 @@ class HReturn: public HTemplateControlInstruction<0, 3> {
};
class HAbnormalExit: public HTemplateControlInstruction<0, 0> {
public:
virtual Representation RequiredInputRepresentation(int index) {
return Representation::None();
}
DECLARE_CONCRETE_INSTRUCTION(AbnormalExit)
};
class HUnaryOperation: public HTemplateInstruction<1> {
public:
HUnaryOperation(HValue* value, HType type = HType::Tagged())
@ -4081,6 +4070,7 @@ class HStringCompareAndBranch: public HTemplateControlInstruction<2, 3> {
SetOperandAt(1, left);
SetOperandAt(2, right);
set_representation(Representation::Tagged());
SetGVNFlag(kChangesNewSpacePromotion);
}
HValue* context() { return OperandAt(0); }

View File

@ -2729,16 +2729,6 @@ void TestContext::BuildBranch(HValue* value) {
if (value != NULL && value->CheckFlag(HValue::kIsArguments)) {
builder->Bailout(kArgumentsObjectValueInATestContext);
}
if (value->IsConstant()) {
HConstant* constant_value = HConstant::cast(value);
if (constant_value->BooleanValue()) {
builder->current_block()->Goto(if_true(), builder->function_state());
} else {
builder->current_block()->Goto(if_false(), builder->function_state());
}
builder->set_current_block(NULL);
return;
}
HBasicBlock* empty_true = builder->graph()->CreateBasicBlock();
HBasicBlock* empty_false = builder->graph()->CreateBasicBlock();
ToBooleanStub::Types expected(condition()->to_boolean_types());
@ -3083,7 +3073,9 @@ void HOptimizedGraphBuilder::SetUpScope(Scope* scope) {
void HOptimizedGraphBuilder::VisitStatements(ZoneList<Statement*>* statements) {
for (int i = 0; i < statements->length(); i++) {
CHECK_ALIVE(Visit(statements->at(i)));
Statement* stmt = statements->at(i);
CHECK_ALIVE(Visit(stmt));
if (stmt->IsJump()) break;
}
}
@ -5365,8 +5357,6 @@ void HOptimizedGraphBuilder::VisitThrow(Throw* expr) {
HThrow* instr = Add<HThrow>(value);
instr->set_position(expr->position());
Add<HSimulate>(expr->id());
current_block()->FinishExit(new(zone()) HAbnormalExit);
set_current_block(NULL);
}
@ -5536,6 +5526,7 @@ HInstruction* HOptimizedGraphBuilder::TryBuildConsolidatedElementLoad(
bool has_smi_or_object_maps = false;
bool has_js_array_access = false;
bool has_non_js_array_access = false;
bool has_seen_holey_elements = false;
Handle<Map> most_general_consolidated_map;
for (int i = 0; i < maps->length(); ++i) {
Handle<Map> map = maps->at(i);
@ -5558,6 +5549,10 @@ HInstruction* HOptimizedGraphBuilder::TryBuildConsolidatedElementLoad(
} else {
return NULL;
}
// Remember if we've ever seen holey elements.
if (IsHoleyElementsKind(map->elements_kind())) {
has_seen_holey_elements = true;
}
// Remember the most general elements kind, the code for its load will
// properly handle all of the more specific cases.
if ((i == 0) || IsMoreGeneralElementsKindTransition(
@ -5569,10 +5564,15 @@ HInstruction* HOptimizedGraphBuilder::TryBuildConsolidatedElementLoad(
if (!has_double_maps && !has_smi_or_object_maps) return NULL;
HCheckMaps* check_maps = Add<HCheckMaps>(object, maps);
// FAST_ELEMENTS is considered more general than FAST_HOLEY_SMI_ELEMENTS.
// If we've seen both, the consolidated load must use FAST_HOLEY_ELEMENTS.
ElementsKind consolidated_elements_kind = has_seen_holey_elements
? GetHoleyElementsKind(most_general_consolidated_map->elements_kind())
: most_general_consolidated_map->elements_kind();
HInstruction* instr = BuildUncheckedMonomorphicElementAccess(
object, key, val, check_maps,
most_general_consolidated_map->instance_type() == JS_ARRAY_TYPE,
most_general_consolidated_map->elements_kind(),
consolidated_elements_kind,
false, NEVER_RETURN_HOLE, STANDARD_STORE);
return instr;
}

View File

@ -4451,6 +4451,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(eax);
__ push(eax);
__ push(edi);
__ push(ebx);
@ -4461,6 +4462,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
__ pop(ebx);
__ pop(edi);
__ pop(eax);
__ SmiUntag(eax);
}
__ jmp(&done);

View File

@ -1441,6 +1441,7 @@ void LCodeGen::DoDivI(LDivI* instr) {
__ cmp(dividend, 0);
__ j(less, &negative, Label::kNear);
__ sar(dividend, power);
if (divisor < 0) __ neg(dividend);
__ jmp(&done, Label::kNear);
__ bind(&negative);

View File

@ -1880,13 +1880,6 @@ LInstruction* LChunkBuilder::DoBoundsCheckBaseIndexInformation(
}
LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
// The control instruction marking the end of a block that completed
// abruptly (e.g., threw an exception). There is nothing specific to do.
return NULL;
}
LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
LOperand* context = UseFixed(instr->context(), esi);
LOperand* value = UseFixed(instr->value(), eax);

View File

@ -1307,26 +1307,29 @@ void MacroAssembler::Allocate(int object_size,
// Load address of new object into result.
LoadAllocationTopHelper(result, scratch, flags);
ExternalReference allocation_limit =
AllocationUtils::GetAllocationLimitReference(isolate(), flags);
// Align the next allocation. Storing the filler map without checking top is
// always safe because the limit of the heap is always aligned.
// safe in new-space because the limit of the heap is aligned there.
if ((flags & DOUBLE_ALIGNMENT) != 0) {
ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
Label aligned;
test(result, Immediate(kDoubleAlignmentMask));
j(zero, &aligned, Label::kNear);
if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
cmp(result, Operand::StaticVariable(allocation_limit));
j(above_equal, gc_required);
}
mov(Operand(result, 0),
Immediate(isolate()->factory()->one_pointer_filler_map()));
add(result, Immediate(kDoubleSize / 2));
bind(&aligned);
}
Register top_reg = result_end.is_valid() ? result_end : result;
// Calculate new top and bail out if space is exhausted.
ExternalReference allocation_limit =
AllocationUtils::GetAllocationLimitReference(isolate(), flags);
Register top_reg = result_end.is_valid() ? result_end : result;
if (!top_reg.is(result)) {
mov(top_reg, result);
}
@ -1381,14 +1384,21 @@ void MacroAssembler::Allocate(int header_size,
// Load address of new object into result.
LoadAllocationTopHelper(result, scratch, flags);
ExternalReference allocation_limit =
AllocationUtils::GetAllocationLimitReference(isolate(), flags);
// Align the next allocation. Storing the filler map without checking top is
// always safe because the limit of the heap is always aligned.
// safe in new-space because the limit of the heap is aligned there.
if ((flags & DOUBLE_ALIGNMENT) != 0) {
ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
Label aligned;
test(result, Immediate(kDoubleAlignmentMask));
j(zero, &aligned, Label::kNear);
if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
cmp(result, Operand::StaticVariable(allocation_limit));
j(above_equal, gc_required);
}
mov(Operand(result, 0),
Immediate(isolate()->factory()->one_pointer_filler_map()));
add(result, Immediate(kDoubleSize / 2));
@ -1396,9 +1406,6 @@ void MacroAssembler::Allocate(int header_size,
}
// Calculate new top and bail out if space is exhausted.
ExternalReference allocation_limit =
AllocationUtils::GetAllocationLimitReference(isolate(), flags);
// We assume that element_count*element_size + header_size does not
// overflow.
if (element_count_type == REGISTER_VALUE_IS_SMI) {
@ -1452,14 +1459,21 @@ void MacroAssembler::Allocate(Register object_size,
// Load address of new object into result.
LoadAllocationTopHelper(result, scratch, flags);
ExternalReference allocation_limit =
AllocationUtils::GetAllocationLimitReference(isolate(), flags);
// Align the next allocation. Storing the filler map without checking top is
// always safe because the limit of the heap is always aligned.
// safe in new-space because the limit of the heap is aligned there.
if ((flags & DOUBLE_ALIGNMENT) != 0) {
ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
Label aligned;
test(result, Immediate(kDoubleAlignmentMask));
j(zero, &aligned, Label::kNear);
if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
cmp(result, Operand::StaticVariable(allocation_limit));
j(above_equal, gc_required);
}
mov(Operand(result, 0),
Immediate(isolate()->factory()->one_pointer_filler_map()));
add(result, Immediate(kDoubleSize / 2));
@ -1467,9 +1481,6 @@ void MacroAssembler::Allocate(Register object_size,
}
// Calculate new top and bail out if space is exhausted.
ExternalReference allocation_limit =
AllocationUtils::GetAllocationLimitReference(isolate(), flags);
if (!object_size.is(result_end)) {
mov(result_end, object_size);
}

2
deps/v8/src/ic.cc vendored
View File

@ -379,7 +379,7 @@ void IC::Clear(Address address) {
Code* target = GetTargetAtAddress(address);
// Don't clear debug break inline cache as it will remove the break point.
if (target->is_debug_break()) return;
if (target->is_debug_stub()) return;
switch (target->kind()) {
case Code::LOAD_IC: return LoadIC::Clear(address, target);

View File

@ -1691,7 +1691,7 @@ static const char* DropFrames(Vector<StackFrame*> frames,
Code* pre_top_frame_code = pre_top_frame->LookupCode();
bool frame_has_padding;
if (pre_top_frame_code->is_inline_cache_stub() &&
pre_top_frame_code->is_debug_break()) {
pre_top_frame_code->is_debug_stub()) {
// OK, we can drop inline cache calls.
*mode = Debug::FRAME_DROPPED_IN_IC_CALL;
frame_has_padding = Debug::FramePaddingLayout::kIsSupported;

View File

@ -2743,12 +2743,10 @@ void MarkCompactCollector::MigrateObject(Address dst,
int size,
AllocationSpace dest) {
HEAP_PROFILE(heap(), ObjectMoveEvent(src, dst));
// TODO(hpayer): Replace that check with an assert.
// TODO(hpayer): Replace these checks with asserts.
CHECK(heap()->AllowedToBeMigrated(HeapObject::FromAddress(src), dest));
CHECK(dest != LO_SPACE && size <= Page::kMaxNonCodeHeapObjectSize);
if (dest == OLD_POINTER_SPACE) {
// TODO(hpayer): Replace this check with an assert.
HeapObject* heap_object = HeapObject::FromAddress(src);
CHECK(heap_->TargetSpace(heap_object) == heap_->old_pointer_space());
Address src_slot = src;
Address dst_slot = dst;
ASSERT(IsAligned(size, kPointerSize));
@ -2794,13 +2792,6 @@ void MarkCompactCollector::MigrateObject(Address dst,
Code::cast(HeapObject::FromAddress(dst))->Relocate(dst - src);
} else {
ASSERT(dest == OLD_DATA_SPACE || dest == NEW_SPACE);
// Objects in old data space can just be moved by compaction to a different
// page in old data space.
// TODO(hpayer): Replace the following check with an assert.
CHECK(!heap_->old_data_space()->Contains(src) ||
(heap_->old_data_space()->Contains(dst) &&
heap_->TargetSpace(HeapObject::FromAddress(src)) ==
heap_->old_data_space()));
heap()->MoveBlock(dst, src, size);
}
Memory::Address_at(src) = dst;

View File

@ -4808,12 +4808,14 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
1 << 5 | // a1
1 << 6; // a2
__ SmiTag(a0);
__ MultiPush(kSavedRegs);
CreateAllocationSiteStub create_stub;
__ CallStub(&create_stub);
__ MultiPop(kSavedRegs);
__ SmiUntag(a0);
}
__ Branch(&done);

View File

@ -762,7 +762,7 @@ void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
}
void LCodeGen::DeoptimizeIf(Condition cc,
void LCodeGen::DeoptimizeIf(Condition condition,
LEnvironment* environment,
Deoptimizer::BailoutType bailout_type,
Register src1,
@ -789,16 +789,16 @@ void LCodeGen::DeoptimizeIf(Condition cc,
if (info()->ShouldTrapOnDeopt()) {
Label skip;
if (cc != al) {
__ Branch(&skip, NegateCondition(cc), src1, src2);
if (condition != al) {
__ Branch(&skip, NegateCondition(condition), src1, src2);
}
__ stop("trap_on_deopt");
__ bind(&skip);
}
ASSERT(info()->IsStub() || frame_is_built_);
if (cc == al && frame_is_built_) {
__ Call(entry, RelocInfo::RUNTIME_ENTRY, cc, src1, src2);
if (condition == al && frame_is_built_) {
__ Call(entry, RelocInfo::RUNTIME_ENTRY, condition, src1, src2);
} else {
// We often have several deopts to the same entry, reuse the last
// jump entry if this is the case.
@ -811,19 +811,19 @@ void LCodeGen::DeoptimizeIf(Condition cc,
!frame_is_built_);
deopt_jump_table_.Add(table_entry, zone());
}
__ Branch(&deopt_jump_table_.last().label, cc, src1, src2);
__ Branch(&deopt_jump_table_.last().label, condition, src1, src2);
}
}
void LCodeGen::DeoptimizeIf(Condition cc,
void LCodeGen::DeoptimizeIf(Condition condition,
LEnvironment* environment,
Register src1,
const Operand& src2) {
Deoptimizer::BailoutType bailout_type = info()->IsStub()
? Deoptimizer::LAZY
: Deoptimizer::EAGER;
DeoptimizeIf(cc, environment, bailout_type, src1, src2);
DeoptimizeIf(condition, environment, bailout_type, src1, src2);
}
@ -1993,20 +1993,22 @@ int LCodeGen::GetNextEmittedBlock() const {
template<class InstrType>
void LCodeGen::EmitBranch(InstrType instr,
Condition cc, Register src1, const Operand& src2) {
Condition condition,
Register src1,
const Operand& src2) {
int left_block = instr->TrueDestination(chunk_);
int right_block = instr->FalseDestination(chunk_);
int next_block = GetNextEmittedBlock();
if (right_block == left_block || cc == al) {
if (right_block == left_block || condition == al) {
EmitGoto(left_block);
} else if (left_block == next_block) {
__ Branch(chunk_->GetAssemblyLabel(right_block),
NegateCondition(cc), src1, src2);
NegateCondition(condition), src1, src2);
} else if (right_block == next_block) {
__ Branch(chunk_->GetAssemblyLabel(left_block), cc, src1, src2);
__ Branch(chunk_->GetAssemblyLabel(left_block), condition, src1, src2);
} else {
__ Branch(chunk_->GetAssemblyLabel(left_block), cc, src1, src2);
__ Branch(chunk_->GetAssemblyLabel(left_block), condition, src1, src2);
__ Branch(chunk_->GetAssemblyLabel(right_block));
}
}
@ -2014,7 +2016,9 @@ void LCodeGen::EmitBranch(InstrType instr,
template<class InstrType>
void LCodeGen::EmitBranchF(InstrType instr,
Condition cc, FPURegister src1, FPURegister src2) {
Condition condition,
FPURegister src1,
FPURegister src2) {
int right_block = instr->FalseDestination(chunk_);
int left_block = instr->TrueDestination(chunk_);
@ -2023,16 +2027,29 @@ void LCodeGen::EmitBranchF(InstrType instr,
EmitGoto(left_block);
} else if (left_block == next_block) {
__ BranchF(chunk_->GetAssemblyLabel(right_block), NULL,
NegateCondition(cc), src1, src2);
NegateCondition(condition), src1, src2);
} else if (right_block == next_block) {
__ BranchF(chunk_->GetAssemblyLabel(left_block), NULL, cc, src1, src2);
__ BranchF(chunk_->GetAssemblyLabel(left_block), NULL,
condition, src1, src2);
} else {
__ BranchF(chunk_->GetAssemblyLabel(left_block), NULL, cc, src1, src2);
__ BranchF(chunk_->GetAssemblyLabel(left_block), NULL,
condition, src1, src2);
__ Branch(chunk_->GetAssemblyLabel(right_block));
}
}
template<class InstrType>
void LCodeGen::EmitFalseBranchF(InstrType instr,
Condition condition,
FPURegister src1,
FPURegister src2) {
int false_block = instr->FalseDestination(chunk_);
__ BranchF(chunk_->GetAssemblyLabel(false_block), NULL,
condition, src1, src2);
}
void LCodeGen::DoDebugBreak(LDebugBreak* instr) {
__ stop("LDebugBreak");
}
@ -2293,6 +2310,23 @@ void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
}
void LCodeGen::DoCmpHoleAndBranch(LCmpHoleAndBranch* instr) {
if (instr->hydrogen()->representation().IsTagged()) {
Register input_reg = ToRegister(instr->object());
__ li(at, Operand(factory()->the_hole_value()));
EmitBranch(instr, eq, input_reg, Operand(at));
return;
}
DoubleRegister input_reg = ToDoubleRegister(instr->object());
EmitFalseBranchF(instr, eq, input_reg, input_reg);
Register scratch = scratch0();
__ FmoveHigh(scratch, input_reg);
EmitBranch(instr, eq, scratch, Operand(kHoleNanUpper32));
}
Condition LCodeGen::EmitIsObject(Register input,
Register temp1,
Register temp2,
@ -4149,17 +4183,17 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
}
void LCodeGen::ApplyCheckIf(Condition cc,
void LCodeGen::ApplyCheckIf(Condition condition,
LBoundsCheck* check,
Register src1,
const Operand& src2) {
if (FLAG_debug_code && check->hydrogen()->skip_check()) {
Label done;
__ Branch(&done, NegateCondition(cc), src1, src2);
__ Branch(&done, NegateCondition(condition), src1, src2);
__ stop("eliminated bounds check failed");
__ bind(&done);
} else {
DeoptimizeIf(cc, check->environment(), src1, src2);
DeoptimizeIf(condition, check->environment(), src1, src2);
}
}
@ -4702,29 +4736,6 @@ void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
Register temp1 = ToRegister(instr->temp());
Register temp2 = ToRegister(instr->temp2());
bool convert_hole = false;
HValue* change_input = instr->hydrogen()->value();
if (change_input->IsLoadKeyed()) {
HLoadKeyed* load = HLoadKeyed::cast(change_input);
convert_hole = load->UsesMustHandleHole();
}
Label no_special_nan_handling;
Label done;
if (convert_hole) {
DoubleRegister input_reg = ToDoubleRegister(instr->value());
__ BranchF(&no_special_nan_handling, NULL, eq, input_reg, input_reg);
__ Move(reg, scratch0(), input_reg);
Label canonicalize;
__ Branch(&canonicalize, ne, scratch0(), Operand(kHoleNanUpper32));
__ li(reg, factory()->undefined_value());
__ Branch(&done);
__ bind(&canonicalize);
__ Move(input_reg,
FixedDoubleArray::canonical_not_the_hole_nan_as_double());
}
__ bind(&no_special_nan_handling);
DeferredNumberTagD* deferred = new(zone()) DeferredNumberTagD(this, instr);
if (FLAG_inline_new) {
__ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex);
@ -4738,7 +4749,6 @@ void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
__ sdc1(input_reg, MemOperand(reg, HeapNumber::kValueOffset));
// Now that we have finished with the object's real address tag it
__ Addu(reg, reg, kHeapObjectTag);
__ bind(&done);
}
@ -4780,7 +4790,7 @@ void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
void LCodeGen::EmitNumberUntagD(Register input_reg,
DoubleRegister result_reg,
bool allow_undefined_as_nan,
bool can_convert_undefined_to_nan,
bool deoptimize_on_minus_zero,
LEnvironment* env,
NumberUntagDMode mode) {
@ -4788,16 +4798,14 @@ void LCodeGen::EmitNumberUntagD(Register input_reg,
Label load_smi, heap_number, done;
STATIC_ASSERT(NUMBER_CANDIDATE_IS_ANY_TAGGED_CONVERT_HOLE >
NUMBER_CANDIDATE_IS_ANY_TAGGED);
if (mode >= NUMBER_CANDIDATE_IS_ANY_TAGGED) {
if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) {
// Smi check.
__ UntagAndJumpIfSmi(scratch, input_reg, &load_smi);
// Heap number map check.
__ lw(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
__ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
if (!allow_undefined_as_nan) {
if (!can_convert_undefined_to_nan) {
DeoptimizeIf(ne, env, scratch, Operand(at));
} else {
Label heap_number, convert;
@ -4805,10 +4813,6 @@ void LCodeGen::EmitNumberUntagD(Register input_reg,
// Convert undefined (and hole) to NaN.
__ LoadRoot(at, Heap::kUndefinedValueRootIndex);
if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED_CONVERT_HOLE) {
__ Branch(&convert, eq, input_reg, Operand(at));
__ LoadRoot(at, Heap::kTheHoleValueRootIndex);
}
DeoptimizeIf(ne, env, input_reg, Operand(at));
__ bind(&convert);
@ -4956,21 +4960,12 @@ void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
Register input_reg = ToRegister(input);
DoubleRegister result_reg = ToDoubleRegister(result);
NumberUntagDMode mode = NUMBER_CANDIDATE_IS_ANY_TAGGED;
HValue* value = instr->hydrogen()->value();
if (value->type().IsSmi()) {
mode = NUMBER_CANDIDATE_IS_SMI;
} else if (value->IsLoadKeyed()) {
HLoadKeyed* load = HLoadKeyed::cast(value);
if (load->UsesMustHandleHole()) {
if (load->hole_mode() == ALLOW_RETURN_HOLE) {
mode = NUMBER_CANDIDATE_IS_ANY_TAGGED_CONVERT_HOLE;
}
}
}
NumberUntagDMode mode = value->representation().IsSmi()
? NUMBER_CANDIDATE_IS_SMI : NUMBER_CANDIDATE_IS_ANY_TAGGED;
EmitNumberUntagD(input_reg, result_reg,
instr->hydrogen()->allow_undefined_as_nan(),
instr->hydrogen()->can_convert_undefined_to_nan(),
instr->hydrogen()->deoptimize_on_minus_zero(),
instr->environment(),
mode);

View File

@ -277,16 +277,16 @@ class LCodeGen BASE_EMBEDDED {
void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
Safepoint::DeoptMode mode);
void DeoptimizeIf(Condition cc,
void DeoptimizeIf(Condition condition,
LEnvironment* environment,
Deoptimizer::BailoutType bailout_type,
Register src1 = zero_reg,
const Operand& src2 = Operand(zero_reg));
void DeoptimizeIf(Condition cc,
void DeoptimizeIf(Condition condition,
LEnvironment* environment,
Register src1 = zero_reg,
const Operand& src2 = Operand(zero_reg));
void ApplyCheckIf(Condition cc,
void ApplyCheckIf(Condition condition,
LBoundsCheck* check,
Register src1 = zero_reg,
const Operand& src2 = Operand(zero_reg));
@ -329,14 +329,19 @@ class LCodeGen BASE_EMBEDDED {
void EmitGoto(int block);
template<class InstrType>
void EmitBranch(InstrType instr,
Condition cc,
Condition condition,
Register src1,
const Operand& src2);
template<class InstrType>
void EmitBranchF(InstrType instr,
Condition cc,
Condition condition,
FPURegister src1,
FPURegister src2);
template<class InstrType>
void EmitFalseBranchF(InstrType instr,
Condition condition,
FPURegister src1,
FPURegister src2);
void EmitCmpI(LOperand* left, LOperand* right);
void EmitNumberUntagD(Register input,
DoubleRegister result,

View File

@ -1652,6 +1652,13 @@ LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
}
LInstruction* LChunkBuilder::DoCompareHoleAndBranch(
HCompareHoleAndBranch* instr) {
LOperand* object = UseRegisterAtStart(instr->object());
return new(zone()) LCmpHoleAndBranch(object);
}
LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
LOperand* temp = TempRegister();
@ -1778,13 +1785,6 @@ LInstruction* LChunkBuilder::DoBoundsCheckBaseIndexInformation(
}
LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
// The control instruction marking the end of a block that completed
// abruptly (e.g., threw an exception). There is nothing specific to do.
return NULL;
}
LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
LOperand* value = UseFixed(instr->value(), a0);
return MarkAsCall(new(zone()) LThrow(value), instr);

View File

@ -74,6 +74,7 @@ class LCodeGen;
V(ClassOfTestAndBranch) \
V(CompareNumericAndBranch) \
V(CmpObjectEqAndBranch) \
V(CmpHoleAndBranch) \
V(CmpMapAndBranch) \
V(CmpT) \
V(ConstantD) \
@ -887,12 +888,24 @@ class LCmpObjectEqAndBranch: public LControlInstruction<2, 0> {
LOperand* left() { return inputs_[0]; }
LOperand* right() { return inputs_[1]; }
DECLARE_CONCRETE_INSTRUCTION(CmpObjectEqAndBranch,
"cmp-object-eq-and-branch")
DECLARE_CONCRETE_INSTRUCTION(CmpObjectEqAndBranch, "cmp-object-eq-and-branch")
DECLARE_HYDROGEN_ACCESSOR(CompareObjectEqAndBranch)
};
class LCmpHoleAndBranch: public LControlInstruction<1, 0> {
public:
explicit LCmpHoleAndBranch(LOperand* object) {
inputs_[0] = object;
}
LOperand* object() { return inputs_[0]; }
DECLARE_CONCRETE_INSTRUCTION(CmpHoleAndBranch, "cmp-hole-and-branch")
DECLARE_HYDROGEN_ACCESSOR(CompareHoleAndBranch)
};
class LIsObjectAndBranch: public LControlInstruction<1, 1> {
public:
LIsObjectAndBranch(LOperand* value, LOperand* temp) {

View File

@ -2944,12 +2944,15 @@ void MacroAssembler::Allocate(int object_size,
if ((flags & DOUBLE_ALIGNMENT) != 0) {
// Align the next allocation. Storing the filler map without checking top is
// always safe because the limit of the heap is always aligned.
// safe in new-space because the limit of the heap is aligned there.
ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
And(scratch2, result, Operand(kDoubleAlignmentMask));
Label aligned;
Branch(&aligned, eq, scratch2, Operand(zero_reg));
if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
Branch(gc_required, Ugreater_equal, result, Operand(t9));
}
li(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
sw(scratch2, MemOperand(result));
Addu(result, result, Operand(kDoubleSize / 2));
@ -3028,12 +3031,15 @@ void MacroAssembler::Allocate(Register object_size,
if ((flags & DOUBLE_ALIGNMENT) != 0) {
// Align the next allocation. Storing the filler map without checking top is
// always safe because the limit of the heap is always aligned.
// safe in new-space because the limit of the heap is aligned there.
ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
And(scratch2, result, Operand(kDoubleAlignmentMask));
Label aligned;
Branch(&aligned, eq, scratch2, Operand(zero_reg));
if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
Branch(gc_required, Ugreater_equal, result, Operand(t9));
}
li(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
sw(scratch2, MemOperand(result));
Addu(result, result, Operand(kDoubleSize / 2));

View File

@ -235,6 +235,14 @@ class MacroAssembler: public Assembler {
mfc1(dst_high, FPURegister::from_code(src.code() + 1));
}
inline void FmoveHigh(Register dst_high, FPURegister src) {
mfc1(dst_high, FPURegister::from_code(src.code() + 1));
}
inline void FmoveLow(Register dst_low, FPURegister src) {
mfc1(dst_low, src);
}
inline void Move(FPURegister dst, Register src_low, Register src_high) {
mtc1(src_low, dst);
mtc1(src_high, FPURegister::from_code(dst.code() + 1));

View File

@ -4071,8 +4071,8 @@ bool Code::is_inline_cache_stub() {
}
bool Code::is_debug_break() {
return ic_state() == DEBUG_STUB && extra_ic_state() == DEBUG_BREAK;
bool Code::is_debug_stub() {
return ic_state() == DEBUG_STUB;
}

View File

@ -3909,9 +3909,9 @@ MaybeObject* JSObject::SetPropertyForResult(LookupResult* lookup,
lookup->holder()->MigrateToMap(Map::cast(back));
if (maybe_failure->IsFailure()) return maybe_failure;
}
DescriptorArray* desc = transition_map->instance_descriptors();
int descriptor = transition_map->LastAdded();
representation = desc->GetDetails(descriptor).representation();
descriptors = transition_map->instance_descriptors();
representation =
descriptors->GetDetails(descriptor).representation();
}
int field_index = descriptors->GetFieldIndex(descriptor);
result = lookup->holder()->AddFastPropertyUsingMap(
@ -4117,9 +4117,9 @@ MaybeObject* JSObject::SetLocalPropertyIgnoreAttributes(
MaybeObject* maybe_failure = self->MigrateToMap(Map::cast(back));
if (maybe_failure->IsFailure()) return maybe_failure;
}
DescriptorArray* desc = transition_map->instance_descriptors();
int descriptor = transition_map->LastAdded();
representation = desc->GetDetails(descriptor).representation();
descriptors = transition_map->instance_descriptors();
representation =
descriptors->GetDetails(descriptor).representation();
}
int field_index = descriptors->GetFieldIndex(descriptor);
result = self->AddFastPropertyUsingMap(

View File

@ -2719,7 +2719,11 @@ class JSObject: public JSReceiver {
// don't want to be wasteful with long lived objects.
static const int kMaxUncheckedOldFastElementsLength = 500;
static const int kInitialMaxFastElementArray = 100000;
// TODO(2790): HAllocate currently always allocates fast backing stores
// in new space, where on x64 we can only fit ~98K elements. Keep this
// limit lower than that until HAllocate is made smarter.
static const int kInitialMaxFastElementArray = 95000;
static const int kFastPropertiesSoftLimit = 12;
static const int kMaxFastProperties = 64;
static const int kMaxInstanceSize = 255 * kPointerSize;
@ -4909,7 +4913,7 @@ class Code: public HeapObject {
// Testers for IC stub kinds.
inline bool is_inline_cache_stub();
inline bool is_debug_break();
inline bool is_debug_stub();
inline bool is_load_stub() { return kind() == LOAD_IC; }
inline bool is_keyed_load_stub() { return kind() == KEYED_LOAD_IC; }
inline bool is_store_stub() { return kind() == STORE_IC; }

108
deps/v8/src/runtime.cc vendored
View File

@ -8294,26 +8294,24 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InstallRecompiledCode) {
class ActivationsFinder : public ThreadVisitor {
public:
explicit ActivationsFinder(JSFunction* function)
: function_(function), has_activations_(false) {}
Code* code_;
bool has_code_activations_;
explicit ActivationsFinder(Code* code)
: code_(code),
has_code_activations_(false) { }
void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
if (has_activations_) return;
for (JavaScriptFrameIterator it(isolate, top); !it.done(); it.Advance()) {
JavaScriptFrame* frame = it.frame();
if (frame->is_optimized() && frame->function() == function_) {
has_activations_ = true;
return;
}
}
JavaScriptFrameIterator it(isolate, top);
VisitFrames(&it);
}
bool has_activations() { return has_activations_; }
private:
JSFunction* function_;
bool has_activations_;
void VisitFrames(JavaScriptFrameIterator* it) {
for (; !it->done(); it->Advance()) {
JavaScriptFrame* frame = it->frame();
if (code_->contains(frame->pc())) has_code_activations_ = true;
}
}
};
@ -8336,7 +8334,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NotifyDeoptimized) {
Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
ASSERT(AllowHeapAllocation::IsAllowed());
ASSERT(deoptimizer->compiled_code_kind() == Code::OPTIMIZED_FUNCTION);
Handle<JSFunction> function = deoptimizer->function();
Handle<Code> optimized_code = deoptimizer->compiled_code();
ASSERT(optimized_code->kind() == Code::OPTIMIZED_FUNCTION);
ASSERT(type == deoptimizer->bailout_type());
// Make sure to materialize objects before causing any allocation.
JavaScriptFrameIterator it(isolate);
@ -8345,10 +8347,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NotifyDeoptimized) {
JavaScriptFrame* frame = it.frame();
RUNTIME_ASSERT(frame->function()->IsJSFunction());
Handle<JSFunction> function(frame->function(), isolate);
Handle<Code> optimized_code(function->code());
RUNTIME_ASSERT((type != Deoptimizer::EAGER &&
type != Deoptimizer::SOFT) || function->IsOptimized());
// Avoid doing too much work when running with --always-opt and keep
// the optimized code around.
@ -8356,33 +8354,24 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NotifyDeoptimized) {
return isolate->heap()->undefined_value();
}
// Find other optimized activations of the function or functions that
// share the same optimized code.
bool has_other_activations = false;
while (!it.done()) {
JavaScriptFrame* frame = it.frame();
JSFunction* other_function = frame->function();
if (frame->is_optimized() && other_function->code() == function->code()) {
has_other_activations = true;
break;
}
it.Advance();
}
// Search for other activations of the same function and code.
ActivationsFinder activations_finder(*optimized_code);
activations_finder.VisitFrames(&it);
isolate->thread_manager()->IterateArchivedThreads(&activations_finder);
if (!has_other_activations) {
ActivationsFinder activations_finder(*function);
isolate->thread_manager()->IterateArchivedThreads(&activations_finder);
has_other_activations = activations_finder.has_activations();
}
if (!has_other_activations) {
if (FLAG_trace_deopt) {
PrintF("[removing optimized code for: ");
function->PrintName();
PrintF("]\n");
if (!activations_finder.has_code_activations_) {
if (function->code() == *optimized_code) {
if (FLAG_trace_deopt) {
PrintF("[removing optimized code for: ");
function->PrintName();
PrintF("]\n");
}
function->ReplaceCode(function->shared()->code());
}
function->ReplaceCode(function->shared()->code());
} else {
// TODO(titzer): we should probably do DeoptimizeCodeList(code)
// unconditionally if the code is not already marked for deoptimization.
// If there is an index by shared function info, all the better.
Deoptimizer::DeoptimizeFunction(*function);
}
// Evict optimized code for this function from the cache so that it doesn't
@ -8635,6 +8624,19 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) {
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_SetAllocationTimeout) {
SealHandleScope shs(isolate);
ASSERT(args.length() == 2);
#ifdef DEBUG
CONVERT_SMI_ARG_CHECKED(interval, 0);
CONVERT_SMI_ARG_CHECKED(timeout, 1);
isolate->heap()->set_allocation_timeout(timeout);
FLAG_gc_interval = interval;
#endif
return isolate->heap()->undefined_value();
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_CheckIsBootstrapping) {
SealHandleScope shs(isolate);
RUNTIME_ASSERT(isolate->bootstrapper()->IsActive());
@ -13639,7 +13641,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InternalDateFormat) {
CONVERT_ARG_HANDLE_CHECKED(JSDate, date, 1);
bool has_pending_exception = false;
double millis = Execution::ToNumber(date, &has_pending_exception)->Number();
Handle<Object> value = Execution::ToNumber(date, &has_pending_exception);
if (has_pending_exception) {
ASSERT(isolate->has_pending_exception());
return Failure::Exception();
@ -13650,7 +13652,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InternalDateFormat) {
if (!date_format) return isolate->ThrowIllegalOperation();
icu::UnicodeString result;
date_format->format(millis, result);
date_format->format(value->Number(), result);
return *isolate->factory()->NewStringFromTwoByte(
Vector<const uint16_t>(
@ -13743,7 +13745,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InternalNumberFormat) {
CONVERT_ARG_HANDLE_CHECKED(Object, number, 1);
bool has_pending_exception = false;
double value = Execution::ToNumber(number, &has_pending_exception)->Number();
Handle<Object> value = Execution::ToNumber(number, &has_pending_exception);
if (has_pending_exception) {
ASSERT(isolate->has_pending_exception());
return Failure::Exception();
@ -13754,7 +13756,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InternalNumberFormat) {
if (!number_format) return isolate->ThrowIllegalOperation();
icu::UnicodeString result;
number_format->format(value, result);
number_format->format(value->Number(), result);
return *isolate->factory()->NewStringFromTwoByte(
Vector<const uint16_t>(
@ -13993,6 +13995,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FlattenString) {
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_NotifyContextDisposed) {
HandleScope scope(isolate);
ASSERT(args.length() == 0);
isolate->heap()->NotifyContextDisposed();
return isolate->heap()->undefined_value();
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_MigrateInstance) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);

View File

@ -101,6 +101,7 @@ namespace internal {
F(GetOptimizationStatus, -1, 1) \
F(GetOptimizationCount, 1, 1) \
F(CompileForOnStackReplacement, 1, 1) \
F(SetAllocationTimeout, 2, 1) \
F(AllocateInNewSpace, 1, 1) \
F(AllocateInOldPointerSpace, 1, 1) \
F(AllocateInOldDataSpace, 1, 1) \
@ -110,6 +111,7 @@ namespace internal {
F(DebugPrepareStepInIfStepping, 1, 1) \
F(FlattenString, 1, 1) \
F(MigrateInstance, 1, 1) \
F(NotifyContextDisposed, 0, 1) \
\
/* Array join support */ \
F(PushIfAbsent, 2, 1) \

View File

@ -35,7 +35,7 @@
#define MAJOR_VERSION 3
#define MINOR_VERSION 20
#define BUILD_NUMBER 17
#define PATCH_LEVEL 0
#define PATCH_LEVEL 7
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
#define IS_CANDIDATE_VERSION 0

View File

@ -3568,6 +3568,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Integer32ToSmi(rax, rax);
__ push(rax);
__ push(rdi);
__ push(rbx);
@ -3578,6 +3579,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
__ pop(rbx);
__ pop(rdi);
__ pop(rax);
__ SmiToInteger32(rax, rax);
}
__ jmp(&done);

View File

@ -1216,6 +1216,7 @@ void LCodeGen::DoDivI(LDivI* instr) {
__ cmpl(dividend, Immediate(0));
__ j(less, &negative, Label::kNear);
__ sarl(dividend, Immediate(power));
if (divisor < 0) __ negl(dividend);
__ jmp(&done, Label::kNear);
__ bind(&negative);

View File

@ -1778,13 +1778,6 @@ LInstruction* LChunkBuilder::DoBoundsCheckBaseIndexInformation(
}
LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
// The control instruction marking the end of a block that completed
// abruptly (e.g., threw an exception). There is nothing specific to do.
return NULL;
}
LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
LOperand* value = UseFixed(instr->value(), rax);
return MarkAsCall(new(zone()) LThrow(value), instr);

View File

@ -3859,7 +3859,7 @@ void MacroAssembler::Allocate(int object_size,
LoadAllocationTopHelper(result, scratch, flags);
// Align the next allocation. Storing the filler map without checking top is
// always safe because the limit of the heap is always aligned.
// safe in new-space because the limit of the heap is aligned there.
if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
testq(result, Immediate(kDoubleAlignmentMask));
Check(zero, kAllocationIsNotDoubleAligned);
@ -3938,7 +3938,7 @@ void MacroAssembler::Allocate(Register object_size,
LoadAllocationTopHelper(result, scratch, flags);
// Align the next allocation. Storing the filler map without checking top is
// always safe because the limit of the heap is always aligned.
// safe in new-space because the limit of the heap is aligned there.
if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
testq(result, Immediate(kDoubleAlignmentMask));
Check(zero, kAllocationIsNotDoubleAligned);

View File

@ -0,0 +1,65 @@
// Copyright 2013 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --allow-natives-syntax
var soft = false;
// disable optimization of this global
soft = true;
soft = false;
soft = true;
soft = false;
function test() {
var f4 = makeF(4);
var f5 = makeF(5);
function makeF(i) {
return function f(x) {
if (x == 0) return i;
if (i == 4) if (soft) print("wahoo" + i);
return f4(x - 1);
}
}
f4(9);
f4(11);
%OptimizeFunctionOnNextCall(f4);
f4(12);
f5(9);
f5(11);
%OptimizeFunctionOnNextCall(f5);
f5(12);
soft = true;
f4(1);
f5(9);
}
test();

View File

@ -0,0 +1,38 @@
// Copyright 2013 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --allow-natives-syntax
function foo() {
throw "Error";
return 1 > 5;
};
try { foo() } catch(e) {}
try { foo() } catch(e) {}
%OptimizeFunctionOnNextCall(foo);
try { foo() } catch(e) {}

View File

@ -0,0 +1,40 @@
// Copyright 2013 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --allow-natives-syntax
function foo(array) {
return array[0];
}
var a = [1, 2, , 4]; // Holey Smi elements.
var b = ["abcd", 0]; // Fast elements.
foo(b); // Observe fast elements first, or the IC will transition without
foo(a); // going polymorphic.
%OptimizeFunctionOnNextCall(foo);
var c = [, 0];
assertEquals(undefined, foo(c)); // Elided hole check will leak the hole.

View File

@ -0,0 +1,54 @@
// Copyright 2013 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --expose-debug-as debug --allow-natives-syntax --expose-gc
// Get the Debug object exposed from the debug context global object.
Debug = debug.Debug
function breakListener(event, exec_state, event_data, data) {
exec_state.prepareStep(Debug.StepAction.StepIn, 1);
}
Debug.setListener(breakListener);
var o = {x:function() { return 10; }};
function f(o) {
var m = "x";
o[m]();
}
Debug.setBreakPoint(f, 2, 0);
f(o);
%NotifyContextDisposed();
function g() {
gc();
}
g();

View File

@ -0,0 +1,31 @@
// Copyright 2013 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Test that we can create arrays of any size.
for (var i = 1000; i < 1000000; i += 97) {
new Array(i);
}

View File

@ -0,0 +1,45 @@
// Copyright 2013 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --allow-natives-syntax
function bailout() { throw "bailout"; }
var global;
function foo(x, fun) {
var a = x + 1;
var b = x + 2; // Need another Simulate to fold the first one into.
global = true; // Need a side effect to deopt to.
fun();
return a;
}
assertThrows("foo(1, bailout)");
assertThrows("foo(1, bailout)");
%OptimizeFunctionOnNextCall(foo);
assertThrows("foo(1, bailout)");
assertEquals(2, foo(1, function() {}));

View File

@ -0,0 +1,43 @@
// Copyright 2013 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --allow-natives-syntax
function f(a, b) {
var x = { a:a };
switch(b) { case "string": }
var y = { b:b };
return y;
}
f("a", "b");
f("a", "b");
%OptimizeFunctionOnNextCall(f);
f("a", "b");
%SetAllocationTimeout(100, 0);
var killer = f("bang", "bo" + "om");
assertEquals("boom", killer.b);

View File

@ -0,0 +1,47 @@
// Copyright 2013 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --allow-natives-syntax
function funky() { return false; }
var global;
function foo(x, fun) {
var a = x + 1;
var b = x + 2; // Need another Simulate to fold the first one into.
global = true; // Need a side effect to deopt to.
if (fun()) {
return a;
}
return 0;
}
assertEquals(0, foo(1, funky));
assertEquals(0, foo(1, funky));
%OptimizeFunctionOnNextCall(foo);
assertEquals(0, foo(1, funky));
assertEquals(2, foo(1, function() { return true; }));

View File

@ -0,0 +1,59 @@
// Copyright 2013 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
function funcify(obj) {
var type = typeof obj;
if (type === "object") {
var funcified = {}, foo = {};
for (var prop in obj) {
funcified[prop] = funcify(obj[prop]);
foo[prop] = true;
}
return funcified;
} else if (type === "function") {
return obj;
} else {
return function () { return obj; };
}
}
var obj = {};
obj.A = 1;
obj.B = function () { return 2; };
obj.C = 3;
obj.D = 4;
var funcified = funcify(obj);
assertEquals("function", typeof funcified.A);
assertEquals(1, funcified.A());
assertEquals("function", typeof funcified.B);
assertEquals(2, funcified.B());
assertEquals("function", typeof funcified.C);
assertEquals("function", typeof funcified.D);
assertEquals(4, funcified.D());

View File

@ -25,35 +25,63 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --allow-natives-syntax
function divp4(x) {
return x / 4;
}
for (var i = 0; i < 10000; i+=4) {
assertEquals(i >> 2, divp4(i));
}
divp4(8);
divp4(8);
%OptimizeFunctionOnNextCall(divp4);
assertEquals(2, divp4(8));
assertEquals(0.5, divp4(2));
function divn4(x) {
return x / (-4);
}
for (var i = 0; i < 10000; i+=4) {
assertEquals(-(i >> 2), divn4(i));
}
divn4(8);
divn4(8);
%OptimizeFunctionOnNextCall(divn4);
assertEquals(-2, divn4(8));
// Check for (0 / -x)
assertEquals(-0, divn4(0));
// Check for (kMinInt / -1)
function divn1(x) {
return x / (-1);
}
for (var i = 0; i < 10000; i++) {
assertEquals(-i, divn1(i));
var two_31 = 1 << 31;
divn1(2);
divn1(2);
%OptimizeFunctionOnNextCall(divn1);
assertEquals(-2, divn1(2));
assertEquals(two_31, divn1(-two_31));
//Check for truncating to int32 case
function divp4t(x) {
return (x / 4) | 0;
}
var min_int = -(0x7FFFFFFF)-1;
assertEquals(-min_int, divn1(min_int));
divp4t(8);
divp4t(8);
%OptimizeFunctionOnNextCall(divp4t);
assertEquals(-1, divp4t(-5));
assertEquals(1, divp4t(5));
assertOptimized(divp4t);
function divn4t(x) {
return (x / -4) | 0;
}
divn4t(8);
divn4t(8);
%OptimizeFunctionOnNextCall(divn4t);
assertEquals(1, divn4t(-5));
assertEquals(-1, divn4t(5));
assertOptimized(divn4t);