Revert "deps: update v8 to 3.20.17.11"

After the upgrade from 3.20.17.7 to 3.20.17.11, we've begun hitting
random assertions in V8 in memory-constrained / GC-heavy situations.

The assertions all seem to be related to heap allocations and garbage
collection but apart from that, they're all over the place.

This reverts commit 970bdccc38fee27e10ff1a3b68f3fea3417788a1.
This commit is contained in:
Ben Noordhuis 2013-09-24 16:13:05 +02:00
parent 42acbf809b
commit 14687ebfc0
19 changed files with 52 additions and 500 deletions

View File

@ -2890,7 +2890,7 @@ Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
TailCallBuiltin(masm(), MissBuiltin(kind()));
// Return the generated code.
return GetCode(kind(), Code::INTERCEPTOR, name);
return GetICCode(kind(), Code::INTERCEPTOR, name);
}

2
deps/v8/src/heap.cc vendored
View File

@ -4970,7 +4970,7 @@ MaybeObject* Heap::CopyJSObjectWithAllocationSite(
int object_size = map->instance_size();
Object* clone;
ASSERT(AllocationSite::CanTrack(map->instance_type()));
ASSERT(map->CanTrackAllocationSite());
ASSERT(map->instance_type() == JS_ARRAY_TYPE);
WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER;

View File

@ -2366,24 +2366,6 @@ HConstant::HConstant(Handle<Object> handle,
}
HConstant::HConstant(Handle<Map> handle,
UniqueValueId unique_id)
: HTemplateInstruction<0>(HType::Tagged()),
handle_(handle),
unique_id_(unique_id),
has_smi_value_(false),
has_int32_value_(false),
has_double_value_(false),
has_external_reference_value_(false),
is_internalized_string_(false),
is_not_in_new_space_(true),
is_cell_(false),
boolean_value_(false) {
ASSERT(!handle.is_null());
Initialize(Representation::Tagged());
}
HConstant::HConstant(int32_t integer_value,
Representation r,
bool is_not_in_new_space,
@ -3201,7 +3183,6 @@ Representation HUnaryMathOperation::RepresentationFromInputs() {
void HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
HValue* dominator) {
ASSERT(side_effect == kChangesNewSpacePromotion);
Zone* zone = block()->zone();
if (!FLAG_use_allocation_folding) return;
// Try to fold allocations together with their dominating allocations.
@ -3213,44 +3194,31 @@ void HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
return;
}
HAllocate* dominator_allocate = HAllocate::cast(dominator);
HValue* dominator_size = dominator_allocate->size();
HAllocate* dominator_allocate_instr = HAllocate::cast(dominator);
HValue* dominator_size = dominator_allocate_instr->size();
HValue* current_size = size();
// We can just fold allocations that are guaranteed in new space.
// TODO(hpayer): Add support for non-constant allocation in dominator.
if (!current_size->IsInteger32Constant() ||
if (!IsNewSpaceAllocation() || !current_size->IsInteger32Constant() ||
!dominator_allocate_instr->IsNewSpaceAllocation() ||
!dominator_size->IsInteger32Constant()) {
if (FLAG_trace_allocation_folding) {
PrintF("#%d (%s) cannot fold into #%d (%s), dynamic allocation size\n",
PrintF("#%d (%s) cannot fold into #%d (%s)\n",
id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
}
return;
}
dominator_allocate = GetFoldableDominator(dominator_allocate);
if (dominator_allocate == NULL) {
return;
}
ASSERT((IsNewSpaceAllocation() &&
dominator_allocate->IsNewSpaceAllocation()) ||
(IsOldDataSpaceAllocation() &&
dominator_allocate->IsOldDataSpaceAllocation()) ||
(IsOldPointerSpaceAllocation() &&
dominator_allocate->IsOldPointerSpaceAllocation()));
// First update the size of the dominator allocate instruction.
dominator_size = dominator_allocate->size();
int32_t original_object_size =
int32_t dominator_size_constant =
HConstant::cast(dominator_size)->GetInteger32Constant();
int32_t dominator_size_constant = original_object_size;
int32_t current_size_constant =
HConstant::cast(current_size)->GetInteger32Constant();
int32_t new_dominator_size = dominator_size_constant + current_size_constant;
if (MustAllocateDoubleAligned()) {
if (!dominator_allocate->MustAllocateDoubleAligned()) {
dominator_allocate->MakeDoubleAligned();
if (!dominator_allocate_instr->MustAllocateDoubleAligned()) {
dominator_allocate_instr->MakeDoubleAligned();
}
if ((dominator_size_constant & kDoubleAlignmentMask) != 0) {
dominator_size_constant += kDoubleSize / 2;
@ -3261,167 +3229,36 @@ void HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
if (new_dominator_size > Page::kMaxNonCodeHeapObjectSize) {
if (FLAG_trace_allocation_folding) {
PrintF("#%d (%s) cannot fold into #%d (%s) due to size: %d\n",
id(), Mnemonic(), dominator_allocate->id(),
dominator_allocate->Mnemonic(), new_dominator_size);
id(), Mnemonic(), dominator->id(), dominator->Mnemonic(),
new_dominator_size);
}
return;
}
HInstruction* new_dominator_size_constant = HConstant::CreateAndInsertBefore(
zone, context(), new_dominator_size, dominator_allocate);
dominator_allocate->UpdateSize(new_dominator_size_constant);
HBasicBlock* block = dominator->block();
Zone* zone = block->zone();
HInstruction* new_dominator_size_constant =
HConstant::New(zone, context(), new_dominator_size);
new_dominator_size_constant->InsertBefore(dominator_allocate_instr);
dominator_allocate_instr->UpdateSize(new_dominator_size_constant);
#ifdef VERIFY_HEAP
if (FLAG_verify_heap && dominator_allocate->IsNewSpaceAllocation()) {
dominator_allocate->MakePrefillWithFiller();
} else {
// TODO(hpayer): This is a short-term hack to make allocation mementos
// work again in new space.
ClearNextMapWord(original_object_size);
if (FLAG_verify_heap) {
dominator_allocate_instr->MakePrefillWithFiller();
}
#else
// TODO(hpayer): This is a short-term hack to make allocation mementos
// work again in new space.
ClearNextMapWord(original_object_size);
#endif
dominator_allocate->clear_next_map_word_ = clear_next_map_word_;
// After that replace the dominated allocate instruction.
HInstruction* dominated_allocate_instr =
HInnerAllocatedObject::New(zone,
context(),
dominator_allocate,
dominator_allocate_instr,
dominator_size_constant,
type());
dominated_allocate_instr->InsertBefore(this);
DeleteAndReplaceWith(dominated_allocate_instr);
if (FLAG_trace_allocation_folding) {
PrintF("#%d (%s) folded into #%d (%s)\n",
id(), Mnemonic(), dominator_allocate->id(),
dominator_allocate->Mnemonic());
}
}
HAllocate* HAllocate::GetFoldableDominator(HAllocate* dominator) {
if (!IsFoldable(dominator)) {
// We cannot hoist old space allocations over new space allocations.
if (IsNewSpaceAllocation() || dominator->IsNewSpaceAllocation()) {
if (FLAG_trace_allocation_folding) {
PrintF("#%d (%s) cannot fold into #%d (%s), new space hoisting\n",
id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
}
return NULL;
}
HAllocate* dominator_dominator = dominator->dominating_allocate_;
// We can hoist old data space allocations over an old pointer space
// allocation and vice versa. For that we have to check the dominator
// of the dominator allocate instruction.
if (dominator_dominator == NULL) {
dominating_allocate_ = dominator;
if (FLAG_trace_allocation_folding) {
PrintF("#%d (%s) cannot fold into #%d (%s), different spaces\n",
id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
}
return NULL;
}
// We can just fold old space allocations that are in the same basic block,
// since it is not guaranteed that we fill up the whole allocated old
// space memory.
// TODO(hpayer): Remove this limitation and add filler maps for each each
// allocation as soon as we have store elimination.
if (block()->block_id() != dominator_dominator->block()->block_id()) {
if (FLAG_trace_allocation_folding) {
PrintF("#%d (%s) cannot fold into #%d (%s), different basic blocks\n",
id(), Mnemonic(), dominator_dominator->id(),
dominator_dominator->Mnemonic());
}
return NULL;
}
ASSERT((IsOldDataSpaceAllocation() &&
dominator_dominator->IsOldDataSpaceAllocation()) ||
(IsOldPointerSpaceAllocation() &&
dominator_dominator->IsOldPointerSpaceAllocation()));
int32_t current_size = HConstant::cast(size())->GetInteger32Constant();
HStoreNamedField* dominator_free_space_size =
dominator->filler_free_space_size_;
if (dominator_free_space_size != NULL) {
// We already hoisted one old space allocation, i.e., we already installed
// a filler map. Hence, we just have to update the free space size.
dominator->UpdateFreeSpaceFiller(current_size);
} else {
// This is the first old space allocation that gets hoisted. We have to
// install a filler map since the follwing allocation may cause a GC.
dominator->CreateFreeSpaceFiller(current_size);
}
// We can hoist the old space allocation over the actual dominator.
return dominator_dominator;
}
return dominator;
}
void HAllocate::UpdateFreeSpaceFiller(int32_t free_space_size) {
ASSERT(filler_free_space_size_ != NULL);
Zone* zone = block()->zone();
HConstant* new_free_space_size = HConstant::CreateAndInsertBefore(
zone,
context(),
filler_free_space_size_->value()->GetInteger32Constant() +
free_space_size,
filler_free_space_size_);
filler_free_space_size_->UpdateValue(new_free_space_size);
}
void HAllocate::CreateFreeSpaceFiller(int32_t free_space_size) {
ASSERT(filler_free_space_size_ == NULL);
Zone* zone = block()->zone();
int32_t dominator_size =
HConstant::cast(dominating_allocate_->size())->GetInteger32Constant();
HInstruction* free_space_instr =
HInnerAllocatedObject::New(zone, context(), dominating_allocate_,
dominator_size, type());
free_space_instr->InsertBefore(this);
HConstant* filler_map = HConstant::New(
zone,
context(),
isolate()->factory()->free_space_map(),
UniqueValueId(isolate()->heap()->free_space_map()));
filler_map->InsertAfter(free_space_instr);
HInstruction* store_map = HStoreNamedField::New(zone, context(),
free_space_instr, HObjectAccess::ForMap(), filler_map);
store_map->SetFlag(HValue::kHasNoObservableSideEffects);
store_map->InsertAfter(filler_map);
HConstant* filler_size = HConstant::CreateAndInsertAfter(
zone, context(), free_space_size, store_map);
HObjectAccess access =
HObjectAccess::ForJSObjectOffset(FreeSpace::kSizeOffset);
HStoreNamedField* store_size = HStoreNamedField::New(zone, context(),
free_space_instr, access, filler_size);
store_size->SetFlag(HValue::kHasNoObservableSideEffects);
store_size->InsertAfter(filler_size);
filler_free_space_size_ = store_size;
}
void HAllocate::ClearNextMapWord(int offset) {
if (clear_next_map_word_) {
Zone* zone = block()->zone();
HObjectAccess access = HObjectAccess::ForJSObjectOffset(offset);
HStoreNamedField* clear_next_map =
HStoreNamedField::New(zone, context(), this, access,
block()->graph()->GetConstantNull());
clear_next_map->ClearAllSideEffects();
clear_next_map->InsertAfter(this);
id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
}
}

View File

@ -49,11 +49,11 @@ class HEnvironment;
class HInferRepresentationPhase;
class HInstruction;
class HLoopInformation;
class HStoreNamedField;
class HValue;
class LInstruction;
class LChunkBuilder;
#define HYDROGEN_ABSTRACT_INSTRUCTION_LIST(V) \
V(ArithmeticBinaryOperation) \
V(BinaryOperation) \
@ -3204,27 +3204,8 @@ class HConstant: public HTemplateInstruction<0> {
DECLARE_INSTRUCTION_FACTORY_P2(HConstant, int32_t, Representation);
DECLARE_INSTRUCTION_FACTORY_P1(HConstant, double);
DECLARE_INSTRUCTION_FACTORY_P1(HConstant, Handle<Object>);
DECLARE_INSTRUCTION_FACTORY_P2(HConstant, Handle<Map>, UniqueValueId);
DECLARE_INSTRUCTION_FACTORY_P1(HConstant, ExternalReference);
static HConstant* CreateAndInsertAfter(Zone* zone,
HValue* context,
int32_t value,
HInstruction* instruction) {
HConstant* new_constant = HConstant::New(zone, context, value);
new_constant->InsertAfter(instruction);
return new_constant;
}
static HConstant* CreateAndInsertBefore(Zone* zone,
HValue* context,
int32_t value,
HInstruction* instruction) {
HConstant* new_constant = HConstant::New(zone, context, value);
new_constant->InsertBefore(instruction);
return new_constant;
}
Handle<Object> handle() {
if (handle_.is_null()) {
Factory* factory = Isolate::Current()->factory();
@ -3427,8 +3408,6 @@ class HConstant: public HTemplateInstruction<0> {
bool is_not_in_new_space,
bool is_cell,
bool boolean_value);
HConstant(Handle<Map> handle,
UniqueValueId unique_id);
explicit HConstant(ExternalReference reference);
void Initialize(Representation r);
@ -5119,10 +5098,7 @@ class HAllocate: public HTemplateInstruction<2> {
HType type,
PretenureFlag pretenure_flag,
InstanceType instance_type)
: HTemplateInstruction<2>(type),
dominating_allocate_(NULL),
filler_free_space_size_(NULL),
clear_next_map_word_(false) {
: HTemplateInstruction<2>(type) {
SetOperandAt(0, context);
SetOperandAt(1, size);
set_representation(Representation::Tagged());
@ -5134,40 +5110,13 @@ class HAllocate: public HTemplateInstruction<2> {
? ALLOCATE_IN_OLD_POINTER_SPACE : ALLOCATE_IN_OLD_DATA_SPACE)
: ALLOCATE_IN_NEW_SPACE;
if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
flags_ = static_cast<HAllocate::Flags>(flags_ | ALLOCATE_DOUBLE_ALIGNED);
flags_ = static_cast<HAllocate::Flags>(flags_ |
ALLOCATE_DOUBLE_ALIGNED);
}
// We have to fill the allocated object with one word fillers if we do
// not use allocation folding since some allocations may depend on each
// other, i.e., have a pointer to each other. A GC in between these
// allocations may leave such objects behind in a not completely initialized
// state.
if (!FLAG_use_gvn || !FLAG_use_allocation_folding) {
flags_ = static_cast<HAllocate::Flags>(flags_ | PREFILL_WITH_FILLER);
}
clear_next_map_word_ = pretenure_flag == NOT_TENURED &&
AllocationSite::CanTrack(instance_type);
}
HAllocate* GetFoldableDominator(HAllocate* dominator);
void UpdateFreeSpaceFiller(int32_t filler_size);
void CreateFreeSpaceFiller(int32_t filler_size);
bool IsFoldable(HAllocate* allocate) {
return (IsNewSpaceAllocation() && allocate->IsNewSpaceAllocation()) ||
(IsOldDataSpaceAllocation() && allocate->IsOldDataSpaceAllocation()) ||
(IsOldPointerSpaceAllocation() &&
allocate->IsOldPointerSpaceAllocation());
}
void ClearNextMapWord(int offset);
Flags flags_;
Handle<Map> known_initial_map_;
HAllocate* dominating_allocate_;
HStoreNamedField* filler_free_space_size_;
bool clear_next_map_word_;
};
@ -6024,10 +5973,6 @@ class HStoreNamedField: public HTemplateInstruction<3> {
return access_.representation();
}
void UpdateValue(HValue* value) {
SetOperandAt(1, value);
}
private:
HStoreNamedField(HValue* obj,
HObjectAccess access,

View File

@ -8241,10 +8241,7 @@ HInstruction* HOptimizedGraphBuilder::BuildFastLiteral(
int pointer_size,
AllocationSiteMode mode) {
NoObservableSideEffectsScope no_effects(this);
InstanceType instance_type = boilerplate_object->map()->instance_type();
ASSERT(instance_type == JS_ARRAY_TYPE || instance_type == JS_OBJECT_TYPE);
HType type = instance_type == JS_ARRAY_TYPE
? HType::JSArray() : HType::JSObject();
HInstruction* target = NULL;
HInstruction* data_target = NULL;
@ -8261,11 +8258,14 @@ HInstruction* HOptimizedGraphBuilder::BuildFastLiteral(
}
if (pointer_size != 0) {
HValue* size_in_bytes = Add<HConstant>(pointer_size);
target = Add<HAllocate>(size_in_bytes, type, TENURED, instance_type);
target = Add<HAllocate>(size_in_bytes, HType::JSObject(), TENURED,
JS_OBJECT_TYPE);
}
} else {
InstanceType instance_type = boilerplate_object->map()->instance_type();
HValue* size_in_bytes = Add<HConstant>(data_size + pointer_size);
target = Add<HAllocate>(size_in_bytes, type, NOT_TENURED, instance_type);
target = Add<HAllocate>(size_in_bytes, HType::JSObject(), NOT_TENURED,
instance_type);
}
int offset = 0;
@ -8287,7 +8287,7 @@ void HOptimizedGraphBuilder::BuildEmitDeepCopy(
int* data_offset,
AllocationSiteMode mode) {
bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE &&
AllocationSite::CanTrack(boilerplate_object->map()->instance_type());
boilerplate_object->map()->CanTrackAllocationSite();
// If using allocation sites, then the payload on the site should already
// be filled in as a valid (boilerplate) array.
@ -8343,7 +8343,7 @@ void HOptimizedGraphBuilder::BuildEmitDeepCopy(
// Create allocation site info.
if (mode == TRACK_ALLOCATION_SITE &&
AllocationSite::CanTrack(boilerplate_object->map()->instance_type())) {
boilerplate_object->map()->CanTrackAllocationSite()) {
elements_offset += AllocationMemento::kSize;
*offset += AllocationMemento::kSize;
BuildCreateAllocationMemento(target, JSArray::kSize, allocation_site);

View File

@ -832,14 +832,14 @@ Vector<const uc16> BasicJsonStringifier::GetCharVector(Handle<String> string) {
void BasicJsonStringifier::SerializeString(Handle<String> object) {
object = FlattenGetString(object);
if (is_ascii_) {
if (object->IsOneByteRepresentationUnderneath()) {
if (object->IsOneByteRepresentation()) {
SerializeString_<true, uint8_t>(object);
} else {
ChangeEncoding();
SerializeString(object);
}
} else {
if (object->IsOneByteRepresentationUnderneath()) {
if (object->IsOneByteRepresentation()) {
SerializeString_<false, uint8_t>(object);
} else {
SerializeString_<false, uc16>(object);

View File

@ -2934,7 +2934,7 @@ Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
TailCallBuiltin(masm(), MissBuiltin(kind()));
// Return the generated code.
return GetCode(kind(), Code::INTERCEPTOR, name);
return GetICCode(kind(), Code::INTERCEPTOR, name);
}

View File

@ -1312,7 +1312,7 @@ void JSObject::ValidateElements() {
bool JSObject::ShouldTrackAllocationInfo() {
if (AllocationSite::CanTrack(map()->instance_type())) {
if (map()->CanTrackAllocationSite()) {
if (!IsJSArray()) {
return true;
}
@ -1349,11 +1349,6 @@ AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
}
inline bool AllocationSite::CanTrack(InstanceType type) {
return type == JS_ARRAY_TYPE;
}
MaybeObject* JSObject::EnsureCanContainHeapObjectElements() {
ValidateElements();
ElementsKind elements_kind = map()->elements_kind();
@ -3583,6 +3578,11 @@ Code::Flags Code::flags() {
}
inline bool Map::CanTrackAllocationSite() {
return instance_type() == JS_ARRAY_TYPE;
}
void Map::set_owns_descriptors(bool is_shared) {
set_bit_field3(OwnsDescriptors::update(bit_field3(), is_shared));
}

View File

@ -5760,6 +5760,7 @@ class Map: public HeapObject {
set_bit_field3(EnumLengthBits::update(bit_field3(), length));
}
inline bool CanTrackAllocationSite();
inline bool owns_descriptors();
inline void set_owns_descriptors(bool is_shared);
inline bool is_observed();
@ -7828,7 +7829,6 @@ class AllocationSite: public Struct {
static inline AllocationSiteMode GetMode(
ElementsKind boilerplate_elements_kind);
static inline AllocationSiteMode GetMode(ElementsKind from, ElementsKind to);
static inline bool CanTrack(InstanceType type);
static const int kTransitionInfoOffset = HeapObject::kHeaderSize;
static const int kWeakNextOffset = kTransitionInfoOffset + kPointerSize;

View File

@ -225,14 +225,14 @@ class LookupResult BASE_EMBEDDED {
void HandlerResult(JSProxy* proxy) {
lookup_type_ = HANDLER_TYPE;
holder_ = proxy;
details_ = PropertyDetails(NONE, HANDLER, Representation::Tagged());
details_ = PropertyDetails(NONE, HANDLER, Representation::None());
cacheable_ = false;
}
void InterceptorResult(JSObject* holder) {
lookup_type_ = INTERCEPTOR_TYPE;
holder_ = holder;
details_ = PropertyDetails(NONE, INTERCEPTOR, Representation::Tagged());
details_ = PropertyDetails(NONE, INTERCEPTOR, Representation::None());
}
void NotFound() {

View File

@ -2433,7 +2433,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_RegExpConstructResult) {
}
Object* new_object;
{ MaybeObject* maybe_new_object =
isolate->heap()->AllocateFixedArray(elements_count);
isolate->heap()->AllocateFixedArrayWithHoles(elements_count);
if (!maybe_new_object->ToObject(&new_object)) return maybe_new_object;
}
FixedArray* elements = FixedArray::cast(new_object);

View File

@ -35,7 +35,7 @@
#define MAJOR_VERSION 3
#define MINOR_VERSION 20
#define BUILD_NUMBER 17
#define PATCH_LEVEL 11
#define PATCH_LEVEL 7
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
#define IS_CANDIDATE_VERSION 0

View File

@ -1407,7 +1407,7 @@ void LCodeGen::DoBitI(LBitI* instr) {
break;
case Token::BIT_XOR:
if (right_operand == int32_t(~0)) {
__ notl(ToRegister(left));
__ not_(ToRegister(left));
} else {
__ xorl(ToRegister(left), Immediate(right_operand));
}

View File

@ -20173,21 +20173,4 @@ TEST(AccessCheckThrows) {
"other, 'x', null, null, 1)");
}
THREADED_TEST(Regress256330) {
i::FLAG_allow_natives_syntax = true;
LocalContext context;
v8::HandleScope scope(context->GetIsolate());
Handle<FunctionTemplate> templ = FunctionTemplate::New();
AddInterceptor(templ, EmptyInterceptorGetter, EmptyInterceptorSetter);
context->Global()->Set(v8_str("Bug"), templ->GetFunction());
CompileRun("\"use strict\"; var o = new Bug;"
"function f(o) { o.x = 10; };"
"f(o); f(o); f(o);"
"%OptimizeFunctionOnNextCall(f);"
"f(o);");
ExpectBoolean("%GetOptimizationStatus(f) != 2", true);
}
#endif // WIN32

View File

@ -2115,78 +2115,6 @@ TEST(OptimizedAllocationAlwaysInNewSpace) {
}
TEST(OptimizedPretenuringAllocationFolding) {
i::FLAG_allow_natives_syntax = true;
CcTest::InitializeVM();
if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
v8::HandleScope scope(CcTest::isolate());
HEAP->SetNewSpaceHighPromotionModeActive(true);
v8::Local<v8::Value> res = CompileRun(
"function DataObject() {"
" this.a = 1.1;"
" this.b = [{}];"
" this.c = 1.2;"
" this.d = [{}];"
" this.e = 1.3;"
" this.f = [{}];"
"}"
"function f() {"
" return new DataObject();"
"};"
"f(); f(); f();"
"%OptimizeFunctionOnNextCall(f);"
"f();");
Handle<JSObject> o =
v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
CHECK(HEAP->InOldDataSpace(o->RawFastPropertyAt(0)));
CHECK(HEAP->InOldPointerSpace(o->RawFastPropertyAt(1)));
CHECK(HEAP->InOldDataSpace(o->RawFastPropertyAt(2)));
CHECK(HEAP->InOldPointerSpace(o->RawFastPropertyAt(3)));
CHECK(HEAP->InOldDataSpace(o->RawFastPropertyAt(4)));
CHECK(HEAP->InOldPointerSpace(o->RawFastPropertyAt(5)));
}
TEST(OptimizedPretenuringAllocationFoldingBlocks) {
i::FLAG_allow_natives_syntax = true;
CcTest::InitializeVM();
if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
v8::HandleScope scope(CcTest::isolate());
HEAP->SetNewSpaceHighPromotionModeActive(true);
v8::Local<v8::Value> res = CompileRun(
"function DataObject() {"
" this.a = [{}];"
" this.b = [{}];"
" this.c = 1.1;"
" this.d = 1.2;"
" this.e = [{}];"
" this.f = 1.3;"
"}"
"function f() {"
" return new DataObject();"
"};"
"f(); f(); f();"
"%OptimizeFunctionOnNextCall(f);"
"f();");
Handle<JSObject> o =
v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
CHECK(HEAP->InOldPointerSpace(o->RawFastPropertyAt(0)));
CHECK(HEAP->InOldPointerSpace(o->RawFastPropertyAt(1)));
CHECK(HEAP->InOldDataSpace(o->RawFastPropertyAt(2)));
CHECK(HEAP->InOldDataSpace(o->RawFastPropertyAt(3)));
CHECK(HEAP->InOldPointerSpace(o->RawFastPropertyAt(4)));
CHECK(HEAP->InOldDataSpace(o->RawFastPropertyAt(5)));
}
TEST(OptimizedPretenuringObjectArrayLiterals) {
i::FLAG_allow_natives_syntax = true;
CcTest::InitializeVM();

View File

@ -1017,36 +1017,6 @@ TEST(ExternalShortStringAdd) {
}
TEST(JSONStringifySliceMadeExternal) {
Isolate* isolate = Isolate::Current();
Zone zone(isolate);
CcTest::InitializeVM();
// Create a sliced string from a one-byte string. The latter is turned
// into a two-byte external string. Check that JSON.stringify works.
v8::HandleScope handle_scope(CcTest::isolate());
v8::Handle<v8::String> underlying =
CompileRun("var underlying = 'abcdefghijklmnopqrstuvwxyz';"
"underlying")->ToString();
v8::Handle<v8::String> slice =
CompileRun("var slice = underlying.slice(1);"
"slice")->ToString();
CHECK(v8::Utils::OpenHandle(*slice)->IsSlicedString());
CHECK(v8::Utils::OpenHandle(*underlying)->IsSeqOneByteString());
int length = underlying->Length();
uc16* two_byte = zone.NewArray<uc16>(length + 1);
underlying->Write(two_byte);
Resource* resource =
new(&zone) Resource(Vector<const uc16>(two_byte, length));
CHECK(underlying->MakeExternal(resource));
CHECK(v8::Utils::OpenHandle(*slice)->IsSlicedString());
CHECK(v8::Utils::OpenHandle(*underlying)->IsExternalTwoByteString());
CHECK_EQ("\"bcdefghijklmnopqrstuvwxyz\"",
*v8::String::Utf8Value(CompileRun("JSON.stringify(slice)")));
}
TEST(CachedHashOverflow) {
// We incorrectly allowed strings to be tagged as array indices even if their
// values didn't fit in the hash field.

View File

@ -56,7 +56,7 @@ function doubles() {
doubles(); doubles(); doubles();
%OptimizeFunctionOnNextCall(doubles);
result = doubles();
var result = doubles();
gc();
@ -72,31 +72,8 @@ function doubles_int() {
doubles_int(); doubles_int(); doubles_int();
%OptimizeFunctionOnNextCall(doubles_int);
result = doubles_int();
var result = doubles_int();
gc();
assertEquals(result[1], 3.1);
// Test allocation folding over a branch.
function branch_int(left) {
var elem1 = [1, 2];
var elem2;
if (left) {
elem2 = [3, 4];
} else {
elem2 = [5, 6];
}
return elem2;
}
branch_int(1); branch_int(1); branch_int(1);
%OptimizeFunctionOnNextCall(branch_int);
result = branch_int(1);
var result2 = branch_int(0);
gc();
assertEquals(result[1], 4);
assertEquals(result2[1], 6);

View File

@ -1,43 +0,0 @@
// Copyright 2013 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --allow-natives-syntax
function inverted_index() {
return ~1;
}
%NeverOptimizeFunction(inverted_index);
function crash(array) {
return array[~inverted_index()] = 2;
}
assertEquals(2, crash(new Array(1)));
assertEquals(2, crash(new Array(1)));
%OptimizeFunctionOnNextCall(crash)
assertEquals(2, crash(new Array(1)));

View File

@ -1,45 +0,0 @@
// Copyright 2013 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Create a huge regexp with many alternative capture groups, most of
// which do not capture anything, but the corresponding capture slot
// in the result object has to exist, even though filled with undefined.
// Having a large result array helps stressing GC.
var num_captures = 1000;
var regexp_string = "(a)";
for (var i = 0; i < num_captures - 1; i++) {
regexp_string += "|(b)";
}
var regexp = new RegExp(regexp_string);
for (var i = 0; i < 10; i++) {
var matches = regexp.exec("a");
var count = 0;
matches.forEach(function() { count++; });
assertEquals(num_captures + 1, count);
}