diff options
author | Ben Noordhuis <info@bnoordhuis.nl> | 2011-11-02 16:58:08 +0100 |
---|---|---|
committer | Ben Noordhuis <info@bnoordhuis.nl> | 2011-11-02 16:58:35 +0100 |
commit | edea4122b1c725a9f7873c02fe04100995472ddc (patch) | |
tree | 3334347495150cfd3a68909489689c112457ae07 | |
parent | cc9223406837e7610b5f36b16b6a0e51861370cb (diff) | |
download | nodejs-edea4122b1c725a9f7873c02fe04100995472ddc.tar.gz nodejs-edea4122b1c725a9f7873c02fe04100995472ddc.tar.bz2 nodejs-edea4122b1c725a9f7873c02fe04100995472ddc.zip |
Revert "Upgrade V8 to 3.7.1"
This reverts commit 92f5a5d3caf01f382f90c235e9057590a5e76870.
V8 3.7.1 in debug mode on ia32 has a curious race-like bug where an fs.Stats
object is not fully formed until some time after it's created. This is easy
to demonstrate by running `make test-debug`.
V8 3.7.0 does not exhibit this behaviour so back we go.
Fixes #1981.
224 files changed, 6958 insertions, 13984 deletions
diff --git a/deps/v8/ChangeLog b/deps/v8/ChangeLog index 874fcba92..a95f3cc34 100644 --- a/deps/v8/ChangeLog +++ b/deps/v8/ChangeLog @@ -1,48 +1,3 @@ -2011-10-26: Version 3.7.1 - - Achieved 33% speedup in debug-mode tests. - - Removed special casing of calls to RegExp test and exec methods with no - argument. Now matches new JSC behaviour. crbug.com/75740. - - Return the empty string on cyclic references in toString (ES5 - conformance). - - Fixed bug triggered by JSBeautifier. crbug.com/100409. - - Made Math.random state per-context instead of per-process (issue 864). - - Fixed stack traces to skip native functions. - - Make snapshots (new contexts) smaller and faster. - - Fixed handling of Function.apply for non-array arguments. - - Fixed evaluation order in defineProperties to match FireFox. - - Fixed handling of non-object receivers for array builtins, - crbug.com/100702. - - Multiple fixes to improve compliance with test262. - - Fixed compatibility with older Android releases. - - Fixed compilation with gcc-4.5.3. - - Improved performance of WriteUtf8, issue 1665. - - Made native syntax an early error in the preparser. - - Fixed issues 793 and 893 relating to Function.prototype.bind. - - Improved let, const, Set and Map support and other Harmony features - (behind the --harmony flag). - - Changed evaluation order for > and <= to match ES5 instead of ES3. - - Bug fixes and performance improvements on all platforms. - - 2011-10-13: Version 3.7.0 Fixed array handling for Object.defineOwnProperty (ES5 conformance). diff --git a/deps/v8/preparser/preparser-process.cc b/deps/v8/preparser/preparser-process.cc index b0aeb81e2..e67851cbd 100644 --- a/deps/v8/preparser/preparser-process.cc +++ b/deps/v8/preparser/preparser-process.cc @@ -267,22 +267,34 @@ void CheckException(v8::PreParserData* data, ExceptionExpectation ParseExpectation(int argc, const char* argv[]) { - // Parse ["throws" [<exn-type> [<start> [<end>]]]]. ExceptionExpectation expects; + + // Parse exception expectations from (the remainder of) the command line. int arg_index = 0; - while (argc > arg_index && strncmp("throws", argv[arg_index], 7)) { - arg_index++; - } + // Skip any flags. + while (argc > arg_index && IsFlag(argv[arg_index])) arg_index++; if (argc > arg_index) { + if (strncmp("throws", argv[arg_index], 7)) { + // First argument after filename, if present, must be the verbatim + // "throws", marking that the preparsing should fail with an exception. + fail(NULL, "ERROR: Extra arguments not prefixed by \"throws\".\n"); + } expects.throws = true; - arg_index++; - if (argc > arg_index && !IsFlag(argv[arg_index])) { - expects.type = argv[arg_index]; + do { arg_index++; - if (argc > arg_index && !IsFlag(argv[arg_index])) { - expects.beg_pos = atoi(argv[arg_index]); // NOLINT + } while (argc > arg_index && IsFlag(argv[arg_index])); + if (argc > arg_index) { + // Next argument is the exception type identifier. + expects.type = argv[arg_index]; + do { arg_index++; - if (argc > arg_index && !IsFlag(argv[arg_index])) { + } while (argc > arg_index && IsFlag(argv[arg_index])); + if (argc > arg_index) { + expects.beg_pos = atoi(argv[arg_index]); // NOLINT + do { + arg_index++; + } while (argc > arg_index && IsFlag(argv[arg_index])); + if (argc > arg_index) { expects.end_pos = atoi(argv[arg_index]); // NOLINT } } @@ -296,8 +308,7 @@ int main(int argc, const char* argv[]) { // Parse command line. // Format: preparser (<scriptfile> | -e "<source>") // ["throws" [<exn-type> [<start> [<end>]]]] - // Any flags (except an initial -e) are ignored. - // Flags must not separate "throws" and its arguments. + // Any flags (except an initial -s) are ignored. // Check for mandatory filename argument. int arg_index = 1; diff --git a/deps/v8/src/SConscript b/deps/v8/src/SConscript index be4a8f01f..f3ae8078b 100755 --- a/deps/v8/src/SConscript +++ b/deps/v8/src/SConscript @@ -321,7 +321,7 @@ debug-debugger.js EXPERIMENTAL_LIBRARY_FILES = ''' proxy.js -collection.js +weakmap.js '''.split() diff --git a/deps/v8/src/accessors.cc b/deps/v8/src/accessors.cc index 02998f9b8..951209d96 100644 --- a/deps/v8/src/accessors.cc +++ b/deps/v8/src/accessors.cc @@ -527,9 +527,7 @@ MaybeObject* Accessors::FunctionGetLength(Object* object, void*) { // correctly yet. Compile it now and return the right length. HandleScope scope; Handle<JSFunction> handle(function); - if (!JSFunction::CompileLazy(handle, KEEP_EXCEPTION)) { - return Failure::Exception(); - } + if (!CompileLazy(handle, KEEP_EXCEPTION)) return Failure::Exception(); return Smi::FromInt(handle->shared()->length()); } else { return Smi::FromInt(function->shared()->length()); @@ -761,12 +759,7 @@ MaybeObject* Accessors::FunctionGetCaller(Object* object, void*) { caller = potential_caller; potential_caller = it.next(); } - // If caller is bound, return null. This is compatible with JSC, and - // allows us to make bound functions use the strict function map - // and its associated throwing caller and arguments. - if (caller->shared()->bound()) { - return isolate->heap()->null_value(); - } + return CheckNonStrictCallerOrThrow(isolate, caller); } diff --git a/deps/v8/src/api.cc b/deps/v8/src/api.cc index ac4f07fd5..a03b7411c 100644 --- a/deps/v8/src/api.cc +++ b/deps/v8/src/api.cc @@ -2794,7 +2794,7 @@ Local<Value> v8::Object::Get(uint32_t index) { ENTER_V8(isolate); i::Handle<i::JSObject> self = Utils::OpenHandle(this); EXCEPTION_PREAMBLE(isolate); - i::Handle<i::Object> result = i::Object::GetElement(self, index); + i::Handle<i::Object> result = i::GetElement(self, index); has_pending_exception = result.is_null(); EXCEPTION_BAILOUT_CHECK(isolate, Local<Value>()); return Utils::ToLocal(result); @@ -2874,10 +2874,8 @@ Local<Array> v8::Object::GetPropertyNames() { ENTER_V8(isolate); i::HandleScope scope(isolate); i::Handle<i::JSObject> self = Utils::OpenHandle(this); - bool threw = false; i::Handle<i::FixedArray> value = - i::GetKeysInFixedArrayFor(self, i::INCLUDE_PROTOS, &threw); - if (threw) return Local<v8::Array>(); + i::GetKeysInFixedArrayFor(self, i::INCLUDE_PROTOS); // Because we use caching to speed up enumeration it is important // to never change the result of the basic enumeration function so // we clone the result. @@ -2895,10 +2893,8 @@ Local<Array> v8::Object::GetOwnPropertyNames() { ENTER_V8(isolate); i::HandleScope scope(isolate); i::Handle<i::JSObject> self = Utils::OpenHandle(this); - bool threw = false; i::Handle<i::FixedArray> value = - i::GetKeysInFixedArrayFor(self, i::LOCAL_ONLY, &threw); - if (threw) return Local<v8::Array>(); + i::GetKeysInFixedArrayFor(self, i::LOCAL_ONLY); // Because we use caching to speed up enumeration it is important // to never change the result of the basic enumeration function so // we clone the result. @@ -3097,10 +3093,7 @@ static Local<Value> GetPropertyByLookup(i::Isolate* isolate, // If the property being looked up is a callback, it can throw // an exception. EXCEPTION_PREAMBLE(isolate); - PropertyAttributes ignored; - i::Handle<i::Object> result = - i::Object::GetProperty(receiver, receiver, lookup, name, - &ignored); + i::Handle<i::Object> result = i::GetProperty(receiver, name, lookup); has_pending_exception = result.is_null(); EXCEPTION_BAILOUT_CHECK(isolate, Local<Value>()); @@ -3117,7 +3110,7 @@ Local<Value> v8::Object::GetRealNamedPropertyInPrototypeChain( ENTER_V8(isolate); i::Handle<i::JSObject> self_obj = Utils::OpenHandle(this); i::Handle<i::String> key_obj = Utils::OpenHandle(*key); - i::LookupResult lookup(isolate); + i::LookupResult lookup; self_obj->LookupRealNamedPropertyInPrototypes(*key_obj, &lookup); return GetPropertyByLookup(isolate, self_obj, key_obj, &lookup); } @@ -3130,7 +3123,7 @@ Local<Value> v8::Object::GetRealNamedProperty(Handle<String> key) { ENTER_V8(isolate); i::Handle<i::JSObject> self_obj = Utils::OpenHandle(this); i::Handle<i::String> key_obj = Utils::OpenHandle(*key); - i::LookupResult lookup(isolate); + i::LookupResult lookup; self_obj->LookupRealNamedProperty(*key_obj, &lookup); return GetPropertyByLookup(isolate, self_obj, key_obj, &lookup); } @@ -3641,30 +3634,13 @@ int String::WriteUtf8(char* buffer, if (IsDeadCheck(isolate, "v8::String::WriteUtf8()")) return 0; LOG_API(isolate, "String::WriteUtf8"); ENTER_V8(isolate); - i::Handle<i::String> str = Utils::OpenHandle(this); - if (str->IsAsciiRepresentation()) { - int len; - if (capacity == -1) { - capacity = str->length() + 1; - len = str->length(); - } else { - len = i::Min(capacity, str->length()); - } - i::String::WriteToFlat(*str, buffer, 0, len); - if (nchars_ref != NULL) *nchars_ref = len; - if (!(options & NO_NULL_TERMINATION) && capacity > len) { - buffer[len] = '\0'; - return len + 1; - } - return len; - } - i::StringInputBuffer& write_input_buffer = *isolate->write_input_buffer(); + i::Handle<i::String> str = Utils::OpenHandle(this); isolate->string_tracker()->RecordWrite(str); if (options & HINT_MANY_WRITES_EXPECTED) { // Flatten the string for efficiency. This applies whether we are // using StringInputBuffer or Get(i) to access the characters. - FlattenString(str); + str->TryFlatten(); } write_input_buffer.Reset(0, *str); int len = str->length(); @@ -3985,15 +3961,6 @@ HeapStatistics::HeapStatistics(): total_heap_size_(0), void v8::V8::GetHeapStatistics(HeapStatistics* heap_statistics) { - if (!i::Isolate::Current()->IsInitialized()) { - // Isolate is unitialized thus heap is not configured yet. - heap_statistics->set_total_heap_size(0); - heap_statistics->set_total_heap_size_executable(0); - heap_statistics->set_used_heap_size(0); - heap_statistics->set_heap_size_limit(0); - return; - } - i::Heap* heap = i::Isolate::Current()->heap(); heap_statistics->set_total_heap_size(heap->CommittedMemory()); heap_statistics->set_total_heap_size_executable( @@ -4006,15 +3973,14 @@ void v8::V8::GetHeapStatistics(HeapStatistics* heap_statistics) { bool v8::V8::IdleNotification() { // Returning true tells the caller that it need not // continue to call IdleNotification. - i::Isolate* isolate = i::Isolate::Current(); - if (isolate == NULL || !isolate->IsInitialized()) return true; + if (!i::Isolate::Current()->IsInitialized()) return true; return i::V8::IdleNotification(); } void v8::V8::LowMemoryNotification() { i::Isolate* isolate = i::Isolate::Current(); - if (isolate == NULL || !isolate->IsInitialized()) return; + if (!isolate->IsInitialized()) return; isolate->heap()->CollectAllAvailableGarbage(); } @@ -4109,9 +4075,8 @@ Persistent<Context> v8::Context::New( } // Leave V8. - if (env.is_null()) { + if (env.is_null()) return Persistent<Context>(); - } return Persistent<Context>(Utils::ToLocal(env)); } diff --git a/deps/v8/src/arm/assembler-arm-inl.h b/deps/v8/src/arm/assembler-arm-inl.h index 7f9f4cebe..93cecf52b 100644 --- a/deps/v8/src/arm/assembler-arm-inl.h +++ b/deps/v8/src/arm/assembler-arm-inl.h @@ -74,10 +74,10 @@ int RelocInfo::target_address_size() { } -void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) { +void RelocInfo::set_target_address(Address target) { ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY); Assembler::set_target_address_at(pc_, target); - if (mode == UPDATE_WRITE_BARRIER && host() != NULL && IsCodeTarget(rmode_)) { + if (host() != NULL && IsCodeTarget(rmode_)) { Object* target_code = Code::GetCodeFromTargetAddress(target); host()->GetHeap()->incremental_marking()->RecordWriteIntoCode( host(), this, HeapObject::cast(target_code)); @@ -103,12 +103,10 @@ Object** RelocInfo::target_object_address() { } -void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) { +void RelocInfo::set_target_object(Object* target) { ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); Assembler::set_target_address_at(pc_, reinterpret_cast<Address>(target)); - if (mode == UPDATE_WRITE_BARRIER && - host() != NULL && - target->IsHeapObject()) { + if (host() != NULL && target->IsHeapObject()) { host()->GetHeap()->incremental_marking()->RecordWrite( host(), &Memory::Object_at(pc_), HeapObject::cast(target)); } @@ -138,12 +136,11 @@ JSGlobalPropertyCell* RelocInfo::target_cell() { } -void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell, - WriteBarrierMode mode) { +void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell) { ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL); Address address = cell->address() + JSGlobalPropertyCell::kValueOffset; Memory::Address_at(pc_) = address; - if (mode == UPDATE_WRITE_BARRIER && host() != NULL) { + if (host() != NULL) { // TODO(1550) We are passing NULL as a slot because cell can never be on // evacuation candidate. host()->GetHeap()->incremental_marking()->RecordWrite( diff --git a/deps/v8/src/arm/assembler-arm.h b/deps/v8/src/arm/assembler-arm.h index 247479d73..d19b64da5 100644 --- a/deps/v8/src/arm/assembler-arm.h +++ b/deps/v8/src/arm/assembler-arm.h @@ -304,9 +304,9 @@ const DwVfpRegister d14 = { 14 }; const DwVfpRegister d15 = { 15 }; // Aliases for double registers. -static const DwVfpRegister& kFirstCalleeSavedDoubleReg = d8; -static const DwVfpRegister& kLastCalleeSavedDoubleReg = d15; -static const DwVfpRegister& kDoubleRegZero = d14; +const DwVfpRegister kFirstCalleeSavedDoubleReg = d8; +const DwVfpRegister kLastCalleeSavedDoubleReg = d15; +const DwVfpRegister kDoubleRegZero = d14; // Coprocessor register diff --git a/deps/v8/src/arm/builtins-arm.cc b/deps/v8/src/arm/builtins-arm.cc index 29bf19028..32b7896a5 100644 --- a/deps/v8/src/arm/builtins-arm.cc +++ b/deps/v8/src/arm/builtins-arm.cc @@ -86,6 +86,12 @@ static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) { } +// This constant has the same value as JSArray::kPreallocatedArrayElements and +// if JSArray::kPreallocatedArrayElements is changed handling of loop unfolding +// below should be reconsidered. +static const int kLoopUnfoldLimit = 4; + + // Allocate an empty JSArray. The allocated array is put into the result // register. An elements backing store is allocated with size initial_capacity // and filled with the hole values. @@ -95,9 +101,9 @@ static void AllocateEmptyJSArray(MacroAssembler* masm, Register scratch1, Register scratch2, Register scratch3, + int initial_capacity, Label* gc_required) { - const int initial_capacity = JSArray::kPreallocatedArrayElements; - STATIC_ASSERT(initial_capacity >= 0); + ASSERT(initial_capacity > 0); // Load the initial map from the array function. __ ldr(scratch1, FieldMemOperand(array_function, JSFunction::kPrototypeOrInitialMapOffset)); @@ -147,24 +153,12 @@ static void AllocateEmptyJSArray(MacroAssembler* masm, ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset); __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex)); - // Fill the FixedArray with the hole value. Inline the code if short. - if (initial_capacity == 0) return; + // Fill the FixedArray with the hole value. ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize); + ASSERT(initial_capacity <= kLoopUnfoldLimit); __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex); - static const int kLoopUnfoldLimit = 4; - if (initial_capacity <= kLoopUnfoldLimit) { - for (int i = 0; i < initial_capacity; i++) { - __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex)); - } - } else { - Label loop, entry; - __ add(scratch2, scratch1, Operand(initial_capacity * kPointerSize)); - __ b(&entry); - __ bind(&loop); + for (int i = 0; i < initial_capacity; i++) { __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex)); - __ bind(&entry); - __ cmp(scratch1, scratch2); - __ b(lt, &loop); } } @@ -179,7 +173,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm, // register elements_array_storage is scratched. static void AllocateJSArray(MacroAssembler* masm, Register array_function, // Array function. - Register array_size, // As a smi, cannot be 0. + Register array_size, // As a smi. Register result, Register elements_array_storage, Register elements_array_end, @@ -187,18 +181,32 @@ static void AllocateJSArray(MacroAssembler* masm, Register scratch2, bool fill_with_hole, Label* gc_required) { + Label not_empty, allocated; + // Load the initial map from the array function. __ ldr(elements_array_storage, FieldMemOperand(array_function, JSFunction::kPrototypeOrInitialMapOffset)); - if (FLAG_debug_code) { // Assert that array size is not zero. - __ tst(array_size, array_size); - __ Assert(ne, "array size is unexpectedly 0"); - } + // Check whether an empty sized array is requested. + __ tst(array_size, array_size); + __ b(ne, ¬_empty); + + // If an empty array is requested allocate a small elements array anyway. This + // keeps the code below free of special casing for the empty array. + int size = JSArray::kSize + + FixedArray::SizeFor(JSArray::kPreallocatedArrayElements); + __ AllocateInNewSpace(size, + result, + elements_array_end, + scratch1, + gc_required, + TAG_OBJECT); + __ jmp(&allocated); // Allocate the JSArray object together with space for a FixedArray with the // requested number of elements. + __ bind(¬_empty); STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0); __ mov(elements_array_end, Operand((JSArray::kSize + FixedArray::kHeaderSize) / kPointerSize)); @@ -218,6 +226,7 @@ static void AllocateJSArray(MacroAssembler* masm, // result: JSObject // elements_array_storage: initial map // array_size: size of array (smi) + __ bind(&allocated); __ str(elements_array_storage, FieldMemOperand(result, JSObject::kMapOffset)); __ LoadRoot(elements_array_storage, Heap::kEmptyFixedArrayRootIndex); __ str(elements_array_storage, @@ -247,6 +256,14 @@ static void AllocateJSArray(MacroAssembler* masm, ASSERT_EQ(0 * kPointerSize, FixedArray::kMapOffset); __ str(scratch1, MemOperand(elements_array_storage, kPointerSize, PostIndex)); STATIC_ASSERT(kSmiTag == 0); + __ tst(array_size, array_size); + // Length of the FixedArray is the number of pre-allocated elements if + // the actual JSArray has length 0 and the size of the JSArray for non-empty + // JSArrays. The length of a FixedArray is stored as a smi. + __ mov(array_size, + Operand(Smi::FromInt(JSArray::kPreallocatedArrayElements)), + LeaveCC, + eq); ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset); __ str(array_size, MemOperand(elements_array_storage, kPointerSize, PostIndex)); @@ -294,20 +311,20 @@ static void AllocateJSArray(MacroAssembler* masm, static void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code) { Counters* counters = masm->isolate()->counters(); - Label argc_one_or_more, argc_two_or_more, not_empty_array, empty_array; + Label argc_one_or_more, argc_two_or_more; // Check for array construction with zero arguments or one. __ cmp(r0, Operand(0, RelocInfo::NONE)); __ b(ne, &argc_one_or_more); // Handle construction of an empty array. - __ bind(&empty_array); AllocateEmptyJSArray(masm, r1, r2, r3, r4, r5, + JSArray::kPreallocatedArrayElements, call_generic_code); __ IncrementCounter(counters->array_function_native(), 1, r3, r4); // Setup return value, remove receiver from stack and return. @@ -322,13 +339,6 @@ static void ArrayNativeCode(MacroAssembler* masm, __ b(ne, &argc_two_or_more); STATIC_ASSERT(kSmiTag == 0); __ ldr(r2, MemOperand(sp)); // Get the argument from the stack. - __ tst(r2, r2); - __ b(ne, ¬_empty_array); - __ Drop(1); // Adjust stack. - __ mov(r0, Operand(0)); // Treat this as a call with argc of zero. - __ b(&empty_array); - - __ bind(¬_empty_array); __ and_(r3, r2, Operand(kIntptrSignBit | kSmiTagMask), SetCC); __ b(ne, call_generic_code); @@ -1017,9 +1027,9 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); // Set up the roots register. - ExternalReference roots_array_start = - ExternalReference::roots_array_start(masm->isolate()); - __ mov(r10, Operand(roots_array_start)); + ExternalReference roots_address = + ExternalReference::roots_address(masm->isolate()); + __ mov(r10, Operand(roots_address)); // Push the function and the receiver onto the stack. __ push(r1); diff --git a/deps/v8/src/arm/code-stubs-arm.cc b/deps/v8/src/arm/code-stubs-arm.cc index 412ba00fc..44923a184 100644 --- a/deps/v8/src/arm/code-stubs-arm.cc +++ b/deps/v8/src/arm/code-stubs-arm.cc @@ -263,12 +263,7 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { // [sp + (2 * kPointerSize)]: literals array. // All sizes here are multiples of kPointerSize. - int elements_size = 0; - if (length_ > 0) { - elements_size = mode_ == CLONE_DOUBLE_ELEMENTS - ? FixedDoubleArray::SizeFor(length_) - : FixedArray::SizeFor(length_); - } + int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0; int size = JSArray::kSize + elements_size; // Load boilerplate object into r3 and check if we need to create a @@ -288,9 +283,6 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { if (mode_ == CLONE_ELEMENTS) { message = "Expected (writable) fixed array"; expected_map_index = Heap::kFixedArrayMapRootIndex; - } else if (mode_ == CLONE_DOUBLE_ELEMENTS) { - message = "Expected (writable) fixed double array"; - expected_map_index = Heap::kFixedDoubleArrayMapRootIndex; } else { ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS); message = "Expected copy-on-write fixed array"; @@ -330,7 +322,6 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { __ str(r2, FieldMemOperand(r0, JSArray::kElementsOffset)); // Copy the elements array. - ASSERT((elements_size % kPointerSize) == 0); __ CopyFields(r2, r3, r1.bit(), elements_size / kPointerSize); } @@ -3922,7 +3913,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) { } // Get the prototype of the function. - __ TryGetFunctionPrototype(function, prototype, scratch, &slow, true); + __ TryGetFunctionPrototype(function, prototype, scratch, &slow); // Check that the function prototype is a JS object. __ JumpIfSmi(prototype, &slow); @@ -6677,82 +6668,7 @@ void DirectCEntryStub::GenerateCall(MacroAssembler* masm, } -void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm, - Label* miss, - Label* done, - Register receiver, - Register properties, - Handle<String> name, - Register scratch0) { - // If names of slots in range from 1 to kProbes - 1 for the hash value are - // not equal to the name and kProbes-th slot is not used (its name is the - // undefined value), it guarantees the hash table doesn't contain the - // property. It's true even if some slots represent deleted properties - // (their names are the null value). - for (int i = 0; i < kInlinedProbes; i++) { - // scratch0 points to properties hash. - // Compute the masked index: (hash + i + i * i) & mask. - Register index = scratch0; - // Capacity is smi 2^n. - __ ldr(index, FieldMemOperand(properties, kCapacityOffset)); - __ sub(index, index, Operand(1)); - __ and_(index, index, Operand( - Smi::FromInt(name->Hash() + StringDictionary::GetProbeOffset(i)))); - - // Scale the index by multiplying by the entry size. - ASSERT(StringDictionary::kEntrySize == 3); - __ add(index, index, Operand(index, LSL, 1)); // index *= 3. - - Register entity_name = scratch0; - // Having undefined at this place means the name is not contained. - ASSERT_EQ(kSmiTagSize, 1); - Register tmp = properties; - __ add(tmp, properties, Operand(index, LSL, 1)); - __ ldr(entity_name, FieldMemOperand(tmp, kElementsStartOffset)); - - ASSERT(!tmp.is(entity_name)); - __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex); - __ cmp(entity_name, tmp); - __ b(eq, done); - - if (i != kInlinedProbes - 1) { - // Stop if found the property. - __ cmp(entity_name, Operand(Handle<String>(name))); - __ b(eq, miss); - - // Check if the entry name is not a symbol. - __ ldr(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset)); - __ ldrb(entity_name, - FieldMemOperand(entity_name, Map::kInstanceTypeOffset)); - __ tst(entity_name, Operand(kIsSymbolMask)); - __ b(eq, miss); - - // Restore the properties. - __ ldr(properties, - FieldMemOperand(receiver, JSObject::kPropertiesOffset)); - } - } - - const int spill_mask = - (lr.bit() | r6.bit() | r5.bit() | r4.bit() | r3.bit() | - r2.bit() | r1.bit() | r0.bit()); - - __ stm(db_w, sp, spill_mask); - __ ldr(r0, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); - __ mov(r1, Operand(Handle<String>(name))); - StringDictionaryLookupStub stub(NEGATIVE_LOOKUP); - __ CallStub(&stub); - __ tst(r0, Operand(r0)); - __ ldm(ia_w, sp, spill_mask); - - __ b(eq, done); - __ b(ne, miss); -} - - -// TODO(kmillikin): Eliminate this function when the stub cache is fully -// handlified. -MaybeObject* StringDictionaryLookupStub::TryGenerateNegativeLookup( +MaybeObject* StringDictionaryLookupStub::GenerateNegativeLookup( MacroAssembler* masm, Label* miss, Label* done, @@ -7011,13 +6927,6 @@ struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = { { r3, r1, r2, EMIT_REMEMBERED_SET }, // KeyedStoreStubCompiler::GenerateStoreFastElement. { r4, r2, r3, EMIT_REMEMBERED_SET }, - // ElementsTransitionGenerator::GenerateSmiOnlyToObject - // and ElementsTransitionGenerator::GenerateSmiOnlyToDouble - // and ElementsTransitionGenerator::GenerateDoubleToObject - { r2, r3, r9, EMIT_REMEMBERED_SET }, - // ElementsTransitionGenerator::GenerateDoubleToObject - { r6, r2, r0, EMIT_REMEMBERED_SET }, - { r2, r6, r9, EMIT_REMEMBERED_SET }, // Null termination. { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET} }; @@ -7254,6 +7163,7 @@ void RecordWriteStub::CheckNeedsToInformIncrementalMarker( // Fall through when we need to inform the incremental marker. } + #undef __ } } // namespace v8::internal diff --git a/deps/v8/src/arm/code-stubs-arm.h b/deps/v8/src/arm/code-stubs-arm.h index 647fc8d15..3ba75bab1 100644 --- a/deps/v8/src/arm/code-stubs-arm.h +++ b/deps/v8/src/arm/code-stubs-arm.h @@ -799,17 +799,7 @@ class StringDictionaryLookupStub: public CodeStub { void Generate(MacroAssembler* masm); - static void GenerateNegativeLookup(MacroAssembler* masm, - Label* miss, - Label* done, - Register receiver, - Register properties, - Handle<String> name, - Register scratch0); - - // TODO(kmillikin): Eliminate this function when the stub cache is fully - // handlified. - MUST_USE_RESULT static MaybeObject* TryGenerateNegativeLookup( + MUST_USE_RESULT static MaybeObject* GenerateNegativeLookup( MacroAssembler* masm, Label* miss, Label* done, diff --git a/deps/v8/src/arm/codegen-arm.cc b/deps/v8/src/arm/codegen-arm.cc index 508d830bd..3993ed02b 100644 --- a/deps/v8/src/arm/codegen-arm.cc +++ b/deps/v8/src/arm/codegen-arm.cc @@ -30,13 +30,10 @@ #if defined(V8_TARGET_ARCH_ARM) #include "codegen.h" -#include "macro-assembler.h" namespace v8 { namespace internal { -#define __ ACCESS_MASM(masm) - // ------------------------------------------------------------------------- // Platform-specific RuntimeCallHelper functions. @@ -54,252 +51,6 @@ void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const { } -// ------------------------------------------------------------------------- -// Code generators - -void ElementsTransitionGenerator::GenerateSmiOnlyToObject( - MacroAssembler* masm) { - // ----------- S t a t e ------------- - // -- r0 : value - // -- r1 : key - // -- r2 : receiver - // -- lr : return address - // -- r3 : target map, scratch for subsequent call - // -- r4 : scratch (elements) - // ----------------------------------- - // Set transitioned map. - __ str(r3, FieldMemOperand(r2, HeapObject::kMapOffset)); - __ RecordWriteField(r2, - HeapObject::kMapOffset, - r3, - r9, - kLRHasNotBeenSaved, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); -} - - -void ElementsTransitionGenerator::GenerateSmiOnlyToDouble( - MacroAssembler* masm, Label* fail) { - // ----------- S t a t e ------------- - // -- r0 : value - // -- r1 : key - // -- r2 : receiver - // -- lr : return address - // -- r3 : target map, scratch for subsequent call - // -- r4 : scratch (elements) - // ----------------------------------- - Label loop, entry, convert_hole, gc_required; - bool vfp3_supported = CpuFeatures::IsSupported(VFP3); - __ push(lr); - - __ ldr(r4, FieldMemOperand(r2, JSObject::kElementsOffset)); - __ ldr(r5, FieldMemOperand(r4, FixedArray::kLengthOffset)); - // r4: source FixedArray - // r5: number of elements (smi-tagged) - - // Allocate new FixedDoubleArray. - __ mov(lr, Operand(FixedDoubleArray::kHeaderSize)); - __ add(lr, lr, Operand(r5, LSL, 2)); - __ AllocateInNewSpace(lr, r6, r7, r9, &gc_required, NO_ALLOCATION_FLAGS); - // r6: destination FixedDoubleArray, not tagged as heap object - __ LoadRoot(r9, Heap::kFixedDoubleArrayMapRootIndex); - __ str(r9, MemOperand(r6, HeapObject::kMapOffset)); - // Set destination FixedDoubleArray's length. - __ str(r5, MemOperand(r6, FixedDoubleArray::kLengthOffset)); - // Update receiver's map. - - __ str(r3, FieldMemOperand(r2, HeapObject::kMapOffset)); - __ RecordWriteField(r2, - HeapObject::kMapOffset, - r3, - r9, - kLRHasBeenSaved, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - // Replace receiver's backing store with newly created FixedDoubleArray. - __ add(r3, r6, Operand(kHeapObjectTag)); - __ str(r3, FieldMemOperand(r2, JSObject::kElementsOffset)); - __ RecordWriteField(r2, - JSObject::kElementsOffset, - r3, - r9, - kLRHasBeenSaved, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - - // Prepare for conversion loop. - __ add(r3, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); - __ add(r7, r6, Operand(FixedDoubleArray::kHeaderSize)); - __ add(r6, r7, Operand(r5, LSL, 2)); - __ mov(r4, Operand(kHoleNanLower32)); - __ mov(r5, Operand(kHoleNanUpper32)); - // r3: begin of source FixedArray element fields, not tagged - // r4: kHoleNanLower32 - // r5: kHoleNanUpper32 - // r6: end of destination FixedDoubleArray, not tagged - // r7: begin of FixedDoubleArray element fields, not tagged - if (!vfp3_supported) __ Push(r1, r0); - - __ b(&entry); - - // Call into runtime if GC is required. - __ bind(&gc_required); - __ pop(lr); - __ b(fail); - - // Convert and copy elements. - __ bind(&loop); - __ ldr(r9, MemOperand(r3, 4, PostIndex)); - // r9: current element - __ JumpIfNotSmi(r9, &convert_hole); - - // Normal smi, convert to double and store. - __ SmiUntag(r9); - if (vfp3_supported) { - CpuFeatures::Scope scope(VFP3); - __ vmov(s0, r9); - __ vcvt_f64_s32(d0, s0); - __ vstr(d0, r7, 0); - __ add(r7, r7, Operand(8)); - } else { - FloatingPointHelper::ConvertIntToDouble(masm, - r9, - FloatingPointHelper::kCoreRegisters, - d0, - r0, - r1, - lr, - s0); - __ Strd(r0, r1, MemOperand(r7, 8, PostIndex)); - } - __ b(&entry); - - // Hole found, store the-hole NaN. - __ bind(&convert_hole); - __ Strd(r4, r5, MemOperand(r7, 8, PostIndex)); - - __ bind(&entry); - __ cmp(r7, r6); - __ b(lt, &loop); - - if (!vfp3_supported) __ Pop(r1, r0); - __ pop(lr); -} - - -void ElementsTransitionGenerator::GenerateDoubleToObject( - MacroAssembler* masm, Label* fail) { - // ----------- S t a t e ------------- - // -- r0 : value - // -- r1 : key - // -- r2 : receiver - // -- lr : return address - // -- r3 : target map, scratch for subsequent call - // -- r4 : scratch (elements) - // ----------------------------------- - Label entry, loop, convert_hole, gc_required; - - __ push(lr); - __ Push(r3, r2, r1, r0); - - __ ldr(r4, FieldMemOperand(r2, JSObject::kElementsOffset)); - __ ldr(r5, FieldMemOperand(r4, FixedArray::kLengthOffset)); - // r4: source FixedDoubleArray - // r5: number of elements (smi-tagged) - - // Allocate new FixedArray. - __ mov(r0, Operand(FixedDoubleArray::kHeaderSize)); - __ add(r0, r0, Operand(r5, LSL, 1)); - __ AllocateInNewSpace(r0, r6, r7, r9, &gc_required, NO_ALLOCATION_FLAGS); - // r6: destination FixedArray, not tagged as heap object - __ LoadRoot(r9, Heap::kFixedArrayMapRootIndex); - __ str(r9, MemOperand(r6, HeapObject::kMapOffset)); - // Set destination FixedDoubleArray's length. - __ str(r5, MemOperand(r6, FixedDoubleArray::kLengthOffset)); - - // Prepare for conversion loop. - __ add(r4, r4, Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag + 4)); - __ add(r3, r6, Operand(FixedArray::kHeaderSize)); - __ add(r6, r6, Operand(kHeapObjectTag)); - __ add(r5, r3, Operand(r5, LSL, 1)); - __ LoadRoot(r7, Heap::kTheHoleValueRootIndex); - __ LoadRoot(r9, Heap::kHeapNumberMapRootIndex); - // Using offsetted addresses in r4 to fully take advantage of post-indexing. - // r3: begin of destination FixedArray element fields, not tagged - // r4: begin of source FixedDoubleArray element fields, not tagged, +4 - // r5: end of destination FixedArray, not tagged - // r6: destination FixedArray - // r7: the-hole pointer - // r9: heap number map - __ b(&entry); - - // Call into runtime if GC is required. - __ bind(&gc_required); - __ Pop(r3, r2, r1, r0); - __ pop(lr); - __ b(fail); - - __ bind(&loop); - __ ldr(r1, MemOperand(r4, 8, PostIndex)); - // lr: current element's upper 32 bit - // r4: address of next element's upper 32 bit - __ cmp(r1, Operand(kHoleNanUpper32)); - __ b(eq, &convert_hole); - - // Non-hole double, copy value into a heap number. - __ AllocateHeapNumber(r2, r0, lr, r9, &gc_required); - // r2: new heap number - __ ldr(r0, MemOperand(r4, 12, NegOffset)); - __ Strd(r0, r1, FieldMemOperand(r2, HeapNumber::kValueOffset)); - __ mov(r0, r3); - __ str(r2, MemOperand(r3, 4, PostIndex)); - __ RecordWrite(r6, - r0, - r2, - kLRHasBeenSaved, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - __ b(&entry); - - // Replace the-hole NaN with the-hole pointer. - __ bind(&convert_hole); - __ str(r7, MemOperand(r3, 4, PostIndex)); - - __ bind(&entry); - __ cmp(r3, r5); - __ b(lt, &loop); - - __ Pop(r3, r2, r1, r0); - // Update receiver's map. - __ str(r3, FieldMemOperand(r2, HeapObject::kMapOffset)); - __ RecordWriteField(r2, - HeapObject::kMapOffset, - r3, - r9, - kLRHasBeenSaved, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - // Replace receiver's backing store with newly created and filled FixedArray. - __ str(r6, FieldMemOperand(r2, JSObject::kElementsOffset)); - __ RecordWriteField(r2, - JSObject::kElementsOffset, - r6, - r9, - kLRHasBeenSaved, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - __ pop(lr); -} - -#undef __ - } } // namespace v8::internal #endif // V8_TARGET_ARCH_ARM diff --git a/deps/v8/src/arm/codegen-arm.h b/deps/v8/src/arm/codegen-arm.h index f54231c92..1c0d508d2 100644 --- a/deps/v8/src/arm/codegen-arm.h +++ b/deps/v8/src/arm/codegen-arm.h @@ -29,6 +29,7 @@ #define V8_ARM_CODEGEN_ARM_H_ #include "ast.h" +#include "code-stubs-arm.h" #include "ic-inl.h" namespace v8 { diff --git a/deps/v8/src/arm/deoptimizer-arm.cc b/deps/v8/src/arm/deoptimizer-arm.cc index 8505c7dfc..bb03d740d 100644 --- a/deps/v8/src/arm/deoptimizer-arm.cc +++ b/deps/v8/src/arm/deoptimizer-arm.cc @@ -100,6 +100,7 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) { } } + #ifdef DEBUG // Destroy the code which is not supposed to be run again. int instructions = @@ -177,13 +178,16 @@ void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code, Memory::uint32_at(stack_check_address_pointer) = reinterpret_cast<uint32_t>(replacement_code->entry()); - unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( - unoptimized_code, pc_after - 2 * kInstrSize, replacement_code); + RelocInfo rinfo(pc_after - 2 * kInstrSize, + RelocInfo::CODE_TARGET, + 0, + unoptimized_code); + unoptimized_code->GetHeap()->incremental_marking()->RecordWriteIntoCode( + unoptimized_code, &rinfo, replacement_code); } -void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code, - Address pc_after, +void Deoptimizer::RevertStackCheckCodeAt(Address pc_after, Code* check_code, Code* replacement_code) { const int kInstrSize = Assembler::kInstrSize; @@ -205,8 +209,8 @@ void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code, Memory::uint32_at(stack_check_address_pointer) = reinterpret_cast<uint32_t>(check_code->entry()); - check_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( - unoptimized_code, pc_after - 2 * kInstrSize, check_code); + check_code->GetHeap()->incremental_marking()-> + RecordCodeTargetPatch(pc_after - 2 * kInstrSize, check_code); } @@ -723,6 +727,7 @@ void Deoptimizer::EntryGenerator::Generate() { __ ldr(r3, MemOperand(r2, FrameDescription::frame_size_offset())); __ bind(&inner_push_loop); __ sub(r3, r3, Operand(sizeof(uint32_t))); + // __ add(r6, r2, Operand(r3, LSL, 1)); __ add(r6, r2, Operand(r3)); __ ldr(r7, MemOperand(r6, FrameDescription::frame_content_offset())); __ push(r7); @@ -756,9 +761,8 @@ void Deoptimizer::EntryGenerator::Generate() { __ pop(ip); // remove lr // Set up the roots register. - ExternalReference roots_array_start = - ExternalReference::roots_array_start(isolate); - __ mov(r10, Operand(roots_array_start)); + ExternalReference roots_address = ExternalReference::roots_address(isolate); + __ mov(r10, Operand(roots_address)); __ pop(ip); // remove pc __ pop(r7); // get continuation, leave pc on stack diff --git a/deps/v8/src/arm/full-codegen-arm.cc b/deps/v8/src/arm/full-codegen-arm.cc index 497a29546..353ce5b10 100644 --- a/deps/v8/src/arm/full-codegen-arm.cc +++ b/deps/v8/src/arm/full-codegen-arm.cc @@ -269,10 +269,7 @@ void FullCodeGenerator::Generate(CompilationInfo* info) { // constant. if (scope()->is_function_scope() && scope()->function() != NULL) { int ignored = 0; - VariableProxy* proxy = scope()->function(); - ASSERT(proxy->var()->mode() == CONST || - proxy->var()->mode() == CONST_HARMONY); - EmitDeclaration(proxy, proxy->var()->mode(), NULL, &ignored); + EmitDeclaration(scope()->function(), CONST, NULL, &ignored); } VisitDeclarations(scope()->declarations()); } @@ -721,8 +718,6 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, // need to "declare" it at runtime to make sure it actually exists in the // local context. Variable* variable = proxy->var(); - bool binding_needs_init = - mode == CONST || mode == CONST_HARMONY || mode == LET; switch (variable->location()) { case Variable::UNALLOCATED: ++(*global_count); @@ -734,7 +729,7 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, Comment cmnt(masm_, "[ Declaration"); VisitForAccumulatorValue(function); __ str(result_register(), StackOperand(variable)); - } else if (binding_needs_init) { + } else if (mode == CONST || mode == LET) { Comment cmnt(masm_, "[ Declaration"); __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); __ str(ip, StackOperand(variable)); @@ -768,7 +763,7 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); PrepareForBailoutForId(proxy->id(), NO_REGISTERS); - } else if (binding_needs_init) { + } else if (mode == CONST || mode == LET) { Comment cmnt(masm_, "[ Declaration"); __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); __ str(ip, ContextOperand(cp, variable->index())); @@ -780,13 +775,9 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, case Variable::LOOKUP: { Comment cmnt(masm_, "[ Declaration"); __ mov(r2, Operand(variable->name())); - // Declaration nodes are always introduced in one of four modes. - ASSERT(mode == VAR || - mode == CONST || - mode == CONST_HARMONY || - mode == LET); - PropertyAttributes attr = (mode == CONST || mode == CONST_HARMONY) - ? READ_ONLY : NONE; + // Declaration nodes are always introduced in one of three modes. + ASSERT(mode == VAR || mode == CONST || mode == LET); + PropertyAttributes attr = (mode == CONST) ? READ_ONLY : NONE; __ mov(r1, Operand(Smi::FromInt(attr))); // Push initial value, if any. // Note: For variables we must not push an initial value (such as @@ -796,7 +787,7 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, __ Push(cp, r2, r1); // Push initial value for function declaration. VisitForStackValue(function); - } else if (binding_needs_init) { + } else if (mode == CONST || mode == LET) { __ LoadRoot(r0, Heap::kTheHoleValueRootIndex); __ Push(cp, r2, r1, r0); } else { @@ -938,17 +929,11 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { __ bind(&done_convert); __ push(r0); - // Check for proxies. - Label call_runtime; - STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); - __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE); - __ b(le, &call_runtime); - // Check cache validity in generated code. This is a fast case for // the JSObject::IsSimpleEnum cache validity checks. If we cannot // guarantee cache validity, call the runtime system to check cache // validity or get the property names in a fixed array. - Label next; + Label next, call_runtime; // Preload a couple of values used in the loop. Register empty_fixed_array_value = r6; __ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex); @@ -1027,16 +1012,9 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { __ jmp(&loop); // We got a fixed array in register r0. Iterate through that. - Label non_proxy; __ bind(&fixed_array); - __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check - __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object - STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); - __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE); - __ b(gt, &non_proxy); - __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy - __ bind(&non_proxy); - __ Push(r1, r0); // Smi and array + __ mov(r1, Operand(Smi::FromInt(0))); // Map (0) - force slow check. + __ Push(r1, r0); __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset)); __ mov(r0, Operand(Smi::FromInt(0))); __ Push(r1, r0); // Fixed array length (as smi) and initial index. @@ -1053,23 +1031,18 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); __ ldr(r3, MemOperand(r2, r0, LSL, kPointerSizeLog2 - kSmiTagSize)); - // Get the expected map from the stack or a smi in the + // Get the expected map from the stack or a zero map in the // permanent slow case into register r2. __ ldr(r2, MemOperand(sp, 3 * kPointerSize)); // Check if the expected map still matches that of the enumerable. - // If not, we may have to filter the key. + // If not, we have to filter the key. Label update_each; __ ldr(r1, MemOperand(sp, 4 * kPointerSize)); __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset)); __ cmp(r4, Operand(r2)); __ b(eq, &update_each); - // For proxies, no filtering is done. - // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. - __ cmp(r2, Operand(Smi::FromInt(0))); - __ b(eq, &update_each); - // Convert the entry to a string or (smi) 0 if it isn't a property // any more. If the property has been removed while iterating, we // just skip it. @@ -1124,7 +1097,7 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, !pretenure && scope()->is_function_scope() && info->num_literals() == 0) { - FastNewClosureStub stub(info->strict_mode_flag()); + FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode); __ mov(r0, Operand(info)); __ push(r0); __ CallStub(&stub); @@ -1155,7 +1128,7 @@ void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var, Scope* s = scope(); while (s != NULL) { if (s->num_heap_slots() > 0) { - if (s->calls_non_strict_eval()) { + if (s->calls_eval()) { // Check that extension is NULL. __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX)); __ tst(temp, temp); @@ -1168,7 +1141,7 @@ void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var, } // If no outer scope calls eval, we do not need to check more // context extensions. - if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break; + if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break; s = s->outer_scope(); } @@ -1212,7 +1185,7 @@ MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { if (s->num_heap_slots() > 0) { - if (s->calls_non_strict_eval()) { + if (s->calls_eval()) { // Check that extension is NULL. __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); __ tst(temp, temp); @@ -1251,12 +1224,11 @@ void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var, Variable* local = var->local_if_not_shadowed(); __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow)); if (local->mode() == CONST || - local->mode() == CONST_HARMONY || local->mode() == LET) { __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); if (local->mode() == CONST) { __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); - } else { // LET || CONST_HARMONY + } else { // LET __ b(ne, done); __ mov(r0, Operand(var->name())); __ push(r0); @@ -1294,15 +1266,13 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { Comment cmnt(masm_, var->IsContextSlot() ? "Context variable" : "Stack variable"); - if (!var->binding_needs_init()) { + if (var->mode() != LET && var->mode() != CONST) { context()->Plug(var); } else { // Let and const need a read barrier. GetVar(r0, var); __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); - if (var->mode() == LET || var->mode() == CONST_HARMONY) { - // Throw a reference error when using an uninitialized let/const - // binding in harmony mode. + if (var->mode() == LET) { Label done; __ b(ne, &done); __ mov(r0, Operand(var->name())); @@ -1310,8 +1280,6 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { __ CallRuntime(Runtime::kThrowReferenceError, 1); __ bind(&done); } else { - // Uninitalized const bindings outside of harmony mode are unholed. - ASSERT(var->mode() == CONST); __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); } context()->Plug(r0); @@ -1499,19 +1467,13 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { ZoneList<Expression*>* subexprs = expr->values(); int length = subexprs->length(); - Handle<FixedArray> constant_elements = expr->constant_elements(); - ASSERT_EQ(2, constant_elements->length()); - ElementsKind constant_elements_kind = - static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value()); - Handle<FixedArrayBase> constant_elements_values( - FixedArrayBase::cast(constant_elements->get(1))); __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); __ mov(r2, Operand(Smi::FromInt(expr->literal_index()))); - __ mov(r1, Operand(constant_elements)); + __ mov(r1, Operand(expr->constant_elements())); __ Push(r3, r2, r1); - if (constant_elements_values->map() == + if (expr->constant_elements()->map() == isolate()->heap()->fixed_cow_array_map()) { FastCloneShallowArrayStub stub( FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length); @@ -1523,14 +1485,8 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); } else { - ASSERT(constant_elements_kind == FAST_ELEMENTS || - constant_elements_kind == FAST_SMI_ONLY_ELEMENTS || - FLAG_smi_only_arrays); - FastCloneShallowArrayStub::Mode mode = - constant_elements_kind == FAST_DOUBLE_ELEMENTS - ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS - : FastCloneShallowArrayStub::CLONE_ELEMENTS; - FastCloneShallowArrayStub stub(mode, length); + FastCloneShallowArrayStub stub( + FastCloneShallowArrayStub::CLONE_ELEMENTS, length); __ CallStub(&stub); } @@ -1553,56 +1509,24 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { } VisitForAccumulatorValue(subexpr); + // Store the subexpression value in the array's elements. __ ldr(r6, MemOperand(sp)); // Copy of array literal. __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset)); - __ ldr(r2, FieldMemOperand(r6, JSObject::kMapOffset)); int offset = FixedArray::kHeaderSize + (i * kPointerSize); - - Label element_done; - Label double_elements; - Label smi_element; - Label slow_elements; - Label fast_elements; - __ CheckFastElements(r2, r3, &double_elements); - - // FAST_SMI_ONLY_ELEMENTS or FAST_ELEMENTS - __ JumpIfSmi(result_register(), &smi_element); - __ CheckFastSmiOnlyElements(r2, r3, &fast_elements); - - // Store into the array literal requires a elements transition. Call into - // the runtime. - __ bind(&slow_elements); - __ push(r6); // Copy of array literal. - __ mov(r1, Operand(Smi::FromInt(i))); - __ mov(r2, Operand(Smi::FromInt(NONE))); // PropertyAttributes - __ mov(r3, Operand(Smi::FromInt(strict_mode_flag()))); // Strict mode. - __ Push(r1, result_register(), r2, r3); - __ CallRuntime(Runtime::kSetProperty, 5); - __ b(&element_done); - - // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS. - __ bind(&double_elements); - __ mov(r3, Operand(Smi::FromInt(i))); - __ StoreNumberToDoubleElements(result_register(), r3, r6, r1, r4, r5, r9, - r7, &slow_elements); - __ b(&element_done); - - // Array literal has ElementsKind of FAST_ELEMENTS and value is an object. - __ bind(&fast_elements); __ str(result_register(), FieldMemOperand(r1, offset)); - // Update the write barrier for the array store. + + Label no_map_change; + __ JumpIfSmi(result_register(), &no_map_change); + // Update the write barrier for the array store with r0 as the scratch + // register. __ RecordWriteField( r1, offset, result_register(), r2, kLRHasBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); - __ b(&element_done); - - // Array literal has ElementsKind of FAST_SMI_ONLY_ELEMENTS or - // FAST_ELEMENTS, and value is Smi. - __ bind(&smi_element); - __ str(result_register(), FieldMemOperand(r1, offset)); - // Fall through - - __ bind(&element_done); + __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset)); + __ CheckFastSmiOnlyElements(r3, r2, &no_map_change); + __ push(r6); // Copy of array literal. + __ CallRuntime(Runtime::kNonSmiElementStored, 1); + __ bind(&no_map_change); PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); } @@ -1979,9 +1903,8 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, } } - } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) { - // Assignment to var or initializing assignment to let/const - // in harmony mode. + } else if (var->mode() != CONST) { + // Assignment to var or initializing assignment to let. if (var->IsStackAllocated() || var->IsContextSlot()) { MemOperand location = VarOperand(var, r1); if (FLAG_debug_code && op == Token::INIT_LET) { @@ -2861,8 +2784,7 @@ void FullCodeGenerator::EmitRandomHeapNumber(ZoneList<Expression*>* args) { // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). if (CpuFeatures::IsSupported(VFP3)) { __ PrepareCallCFunction(1, r0); - __ ldr(r0, ContextOperand(context_register(), Context::GLOBAL_INDEX)); - __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalContextOffset)); + __ mov(r0, Operand(ExternalReference::isolate_address())); __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1); CpuFeatures::Scope scope(VFP3); @@ -2882,9 +2804,8 @@ void FullCodeGenerator::EmitRandomHeapNumber(ZoneList<Expression*>* args) { __ mov(r0, r4); } else { __ PrepareCallCFunction(2, r0); - __ ldr(r1, ContextOperand(context_register(), Context::GLOBAL_INDEX)); __ mov(r0, Operand(r4)); - __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalContextOffset)); + __ mov(r1, Operand(ExternalReference::isolate_address())); __ CallCFunction( ExternalReference::fill_heap_number_with_random_function(isolate()), 2); } @@ -4150,25 +4071,33 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { case Token::EQ_STRICT: case Token::EQ: cond = eq; + __ pop(r1); break; case Token::LT: cond = lt; + __ pop(r1); break; case Token::GT: - cond = gt; + // Reverse left and right sides to obtain ECMA-262 conversion order. + cond = lt; + __ mov(r1, result_register()); + __ pop(r0); break; case Token::LTE: - cond = le; + // Reverse left and right sides to obtain ECMA-262 conversion order. + cond = ge; + __ mov(r1, result_register()); + __ pop(r0); break; case Token::GTE: cond = ge; + __ pop(r1); break; case Token::IN: case Token::INSTANCEOF: default: UNREACHABLE(); } - __ pop(r1); bool inline_smi_code = ShouldInlineSmiCase(op); JumpPatchSite patch_site(masm_); diff --git a/deps/v8/src/arm/ic-arm.cc b/deps/v8/src/arm/ic-arm.cc index 18d4a9fa8..6e0badca1 100644 --- a/deps/v8/src/arm/ic-arm.cc +++ b/deps/v8/src/arm/ic-arm.cc @@ -382,10 +382,10 @@ Object* CallIC_Miss(Arguments args); // The generated code does not accept smi keys. // The generated code falls through if both probes miss. -void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm, - int argc, - Code::Kind kind, - Code::ExtraICState extra_state) { +static void GenerateMonomorphicCacheProbe(MacroAssembler* masm, + int argc, + Code::Kind kind, + Code::ExtraICState extra_ic_state) { // ----------- S t a t e ------------- // -- r1 : receiver // -- r2 : name @@ -395,7 +395,7 @@ void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm, // Probe the stub cache. Code::Flags flags = Code::ComputeFlags(kind, MONOMORPHIC, - extra_state, + extra_ic_state, NORMAL, argc); Isolate::Current()->stub_cache()->GenerateProbe( @@ -464,7 +464,7 @@ static void GenerateFunctionTailCall(MacroAssembler* masm, } -void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) { +static void GenerateCallNormal(MacroAssembler* masm, int argc) { // ----------- S t a t e ------------- // -- r2 : name // -- lr : return address @@ -486,10 +486,10 @@ void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) { } -void CallICBase::GenerateMiss(MacroAssembler* masm, - int argc, - IC::UtilityId id, - Code::ExtraICState extra_state) { +static void GenerateCallMiss(MacroAssembler* masm, + int argc, + IC::UtilityId id, + Code::ExtraICState extra_ic_state) { // ----------- S t a t e ------------- // -- r2 : name // -- lr : return address @@ -541,7 +541,7 @@ void CallICBase::GenerateMiss(MacroAssembler* masm, } // Invoke the function. - CallKind call_kind = CallICBase::Contextual::decode(extra_state) + CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state) ? CALL_AS_FUNCTION : CALL_AS_METHOD; ParameterCount actual(argc); @@ -553,6 +553,18 @@ void CallICBase::GenerateMiss(MacroAssembler* masm, } +void CallIC::GenerateMiss(MacroAssembler* masm, + int argc, + Code::ExtraICState extra_ic_state) { + // ----------- S t a t e ------------- + // -- r2 : name + // -- lr : return address + // ----------------------------------- + + GenerateCallMiss(masm, argc, IC::kCallIC_Miss, extra_ic_state); +} + + void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc, Code::ExtraICState extra_ic_state) { @@ -568,6 +580,27 @@ void CallIC::GenerateMegamorphic(MacroAssembler* masm, } +void CallIC::GenerateNormal(MacroAssembler* masm, int argc) { + // ----------- S t a t e ------------- + // -- r2 : name + // -- lr : return address + // ----------------------------------- + + GenerateCallNormal(masm, argc); + GenerateMiss(masm, argc, Code::kNoExtraICState); +} + + +void KeyedCallIC::GenerateMiss(MacroAssembler* masm, int argc) { + // ----------- S t a t e ------------- + // -- r2 : name + // -- lr : return address + // ----------------------------------- + + GenerateCallMiss(masm, argc, IC::kKeyedCallIC_Miss, Code::kNoExtraICState); +} + + void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) { // ----------- S t a t e ------------- // -- r2 : name @@ -685,7 +718,7 @@ void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) { __ JumpIfSmi(r2, &miss); __ IsObjectJSStringType(r2, r0, &miss); - CallICBase::GenerateNormal(masm, argc); + GenerateCallNormal(masm, argc); __ bind(&miss); GenerateMiss(masm, argc); } @@ -1211,47 +1244,6 @@ void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) { } -void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) { - // ---------- S t a t e -------------- - // -- r2 : receiver - // -- r3 : target map - // -- lr : return address - // ----------------------------------- - // Must return the modified receiver in r0. - if (!FLAG_trace_elements_transitions) { - Label fail; - ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &fail); - __ mov(r0, r2); - __ Ret(); - __ bind(&fail); - } - - __ push(r2); - __ TailCallRuntime(Runtime::kTransitionElementsSmiToDouble, 1, 1); -} - - -void KeyedStoreIC::GenerateTransitionElementsDoubleToObject( - MacroAssembler* masm) { - // ---------- S t a t e -------------- - // -- r2 : receiver - // -- r3 : target map - // -- lr : return address - // ----------------------------------- - // Must return the modified receiver in r0. - if (!FLAG_trace_elements_transitions) { - Label fail; - ElementsTransitionGenerator::GenerateDoubleToObject(masm, &fail); - __ mov(r0, r2); - __ Ret(); - __ bind(&fail); - } - - __ push(r2); - __ TailCallRuntime(Runtime::kTransitionElementsDoubleToObject, 1, 1); -} - - void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm, StrictModeFlag strict_mode) { // ---------- S t a t e -------------- @@ -1567,9 +1559,11 @@ Condition CompareIC::ComputeCondition(Token::Value op) { case Token::LT: return lt; case Token::GT: - return gt; + // Reverse left and right operands to obtain ECMA-262 conversion order. + return lt; case Token::LTE: - return le; + // Reverse left and right operands to obtain ECMA-262 conversion order. + return ge; case Token::GTE: return ge; default: diff --git a/deps/v8/src/arm/lithium-arm.cc b/deps/v8/src/arm/lithium-arm.cc index 51978421d..84959397b 100644 --- a/deps/v8/src/arm/lithium-arm.cc +++ b/deps/v8/src/arm/lithium-arm.cc @@ -391,12 +391,6 @@ void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) { } -void LTransitionElementsKind::PrintDataTo(StringStream* stream) { - object()->PrintTo(stream); - stream->Add(" %p -> %p", *original_map(), *transitioned_map()); -} - - LChunk::LChunk(CompilationInfo* info, HGraph* graph) : spill_slot_count_(0), info_(info), @@ -1410,10 +1404,12 @@ LInstruction* LChunkBuilder::DoPower(HPower* instr) { LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) { + Token::Value op = instr->token(); ASSERT(instr->left()->representation().IsTagged()); ASSERT(instr->right()->representation().IsTagged()); - LOperand* left = UseFixed(instr->left(), r1); - LOperand* right = UseFixed(instr->right(), r0); + bool reversed = (op == Token::GT || op == Token::LTE); + LOperand* left = UseFixed(instr->left(), reversed ? r0 : r1); + LOperand* right = UseFixed(instr->right(), reversed ? r1 : r0); LCmpT* result = new LCmpT(left, right); return MarkAsCall(DefineFixed(result, r0), instr); } @@ -1425,8 +1421,8 @@ LInstruction* LChunkBuilder::DoCompareIDAndBranch( if (r.IsInteger32()) { ASSERT(instr->left()->representation().IsInteger32()); ASSERT(instr->right()->representation().IsInteger32()); - LOperand* left = UseRegisterOrConstantAtStart(instr->left()); - LOperand* right = UseRegisterOrConstantAtStart(instr->right()); + LOperand* left = UseRegisterAtStart(instr->left()); + LOperand* right = UseRegisterAtStart(instr->right()); return new LCmpIDAndBranch(left, right); } else { ASSERT(r.IsDouble()); @@ -1974,26 +1970,6 @@ LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) { } -LInstruction* LChunkBuilder::DoTransitionElementsKind( - HTransitionElementsKind* instr) { - if (instr->original_map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS && - instr->transitioned_map()->elements_kind() == FAST_ELEMENTS) { - LOperand* object = UseRegister(instr->object()); - LOperand* new_map_reg = TempRegister(); - LTransitionElementsKind* result = - new LTransitionElementsKind(object, new_map_reg, NULL); - return DefineSameAsFirst(result); - } else { - LOperand* object = UseFixed(instr->object(), r0); - LOperand* fixed_object_reg = FixedTemp(r2); - LOperand* new_map_reg = FixedTemp(r3); - LTransitionElementsKind* result = - new LTransitionElementsKind(object, new_map_reg, fixed_object_reg); - return MarkAsCall(DefineFixed(result, r0), instr); - } -} - - LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) { bool needs_write_barrier = instr->NeedsWriteBarrier(); diff --git a/deps/v8/src/arm/lithium-arm.h b/deps/v8/src/arm/lithium-arm.h index 5733bd08d..73c7e459c 100644 --- a/deps/v8/src/arm/lithium-arm.h +++ b/deps/v8/src/arm/lithium-arm.h @@ -162,7 +162,6 @@ class LCodeGen; V(ThisFunction) \ V(Throw) \ V(ToFastProperties) \ - V(TransitionElementsKind) \ V(Typeof) \ V(TypeofIsAndBranch) \ V(UnaryMathOperation) \ @@ -1261,6 +1260,7 @@ class LStoreContextSlot: public LTemplateInstruction<0, 2, 0> { LOperand* context() { return InputAt(0); } LOperand* value() { return InputAt(1); } int slot_index() { return hydrogen()->slot_index(); } + int needs_write_barrier() { return hydrogen()->NeedsWriteBarrier(); } virtual void PrintDataTo(StringStream* stream); }; @@ -1277,9 +1277,7 @@ class LPushArgument: public LTemplateInstruction<0, 1, 0> { class LThisFunction: public LTemplateInstruction<1, 0, 0> { - public: DECLARE_CONCRETE_INSTRUCTION(ThisFunction, "this-function") - DECLARE_HYDROGEN_ACCESSOR(ThisFunction) }; @@ -1563,6 +1561,7 @@ class LStoreNamedField: public LTemplateInstruction<0, 2, 0> { Handle<Object> name() const { return hydrogen()->name(); } bool is_in_object() { return hydrogen()->is_in_object(); } int offset() { return hydrogen()->offset(); } + bool needs_write_barrier() { return hydrogen()->NeedsWriteBarrier(); } Handle<Map> transition() const { return hydrogen()->transition(); } }; @@ -1582,8 +1581,7 @@ class LStoreNamedGeneric: public LTemplateInstruction<0, 2, 0> { LOperand* object() { return inputs_[0]; } LOperand* value() { return inputs_[1]; } Handle<Object> name() const { return hydrogen()->name(); } - StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); } - bool strict_mode() { return strict_mode_flag() == kStrictMode; } + bool strict_mode() { return hydrogen()->strict_mode(); } }; @@ -1671,30 +1669,6 @@ class LStoreKeyedSpecializedArrayElement: public LTemplateInstruction<0, 3, 0> { }; -class LTransitionElementsKind: public LTemplateInstruction<1, 1, 2> { - public: - LTransitionElementsKind(LOperand* object, - LOperand* new_map_temp, - LOperand* temp_reg) { - inputs_[0] = object; - temps_[0] = new_map_temp; - temps_[1] = temp_reg; - } - - DECLARE_CONCRETE_INSTRUCTION(TransitionElementsKind, - "transition-elements-kind") - DECLARE_HYDROGEN_ACCESSOR(TransitionElementsKind) - - virtual void PrintDataTo(StringStream* stream); - - LOperand* object() { return inputs_[0]; } - LOperand* new_map_reg() { return temps_[0]; } - LOperand* temp_reg() { return temps_[1]; } - Handle<Map> original_map() { return hydrogen()->original_map(); } - Handle<Map> transitioned_map() { return hydrogen()->transitioned_map(); } -}; - - class LStringAdd: public LTemplateInstruction<1, 2, 0> { public: LStringAdd(LOperand* left, LOperand* right) { diff --git a/deps/v8/src/arm/lithium-codegen-arm.cc b/deps/v8/src/arm/lithium-codegen-arm.cc index 4cf7df4c7..70ef88481 100644 --- a/deps/v8/src/arm/lithium-codegen-arm.cc +++ b/deps/v8/src/arm/lithium-codegen-arm.cc @@ -410,12 +410,6 @@ int LCodeGen::ToInteger32(LConstantOperand* op) const { } -double LCodeGen::ToDouble(LConstantOperand* op) const { - Handle<Object> value = chunk_->LookupLiteral(op); - return value->Number(); -} - - Operand LCodeGen::ToOperand(LOperand* op) { if (op->IsConstantOperand()) { LConstantOperand* const_op = LConstantOperand::cast(op); @@ -1711,44 +1705,30 @@ Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) { } +void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) { + __ cmp(ToRegister(left), ToRegister(right)); +} + + void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) { LOperand* left = instr->InputAt(0); LOperand* right = instr->InputAt(1); int false_block = chunk_->LookupDestination(instr->false_block_id()); int true_block = chunk_->LookupDestination(instr->true_block_id()); - Condition cond = TokenToCondition(instr->op(), false); - - if (left->IsConstantOperand() && right->IsConstantOperand()) { - // We can statically evaluate the comparison. - double left_val = ToDouble(LConstantOperand::cast(left)); - double right_val = ToDouble(LConstantOperand::cast(right)); - int next_block = - EvalComparison(instr->op(), left_val, right_val) ? true_block - : false_block; - EmitGoto(next_block); + + if (instr->is_double()) { + // Compare left and right as doubles and load the + // resulting flags into the normal status register. + __ VFPCompareAndSetFlags(ToDoubleRegister(left), ToDoubleRegister(right)); + // If a NaN is involved, i.e. the result is unordered (V set), + // jump to false block label. + __ b(vs, chunk_->GetAssemblyLabel(false_block)); } else { - if (instr->is_double()) { - // Compare left and right operands as doubles and load the - // resulting flags into the normal status register. - __ VFPCompareAndSetFlags(ToDoubleRegister(left), ToDoubleRegister(right)); - // If a NaN is involved, i.e. the result is unordered (V set), - // jump to false block label. - __ b(vs, chunk_->GetAssemblyLabel(false_block)); - } else { - if (right->IsConstantOperand()) { - __ cmp(ToRegister(left), - Operand(ToInteger32(LConstantOperand::cast(right)))); - } else if (left->IsConstantOperand()) { - __ cmp(ToRegister(right), - Operand(ToInteger32(LConstantOperand::cast(left)))); - // We transposed the operands. Reverse the condition. - cond = ReverseCondition(cond); - } else { - __ cmp(ToRegister(left), ToRegister(right)); - } - } - EmitBranch(true_block, false_block, cond); + EmitCmpI(left, right); } + + Condition cc = TokenToCondition(instr->op(), instr->is_double()); + EmitBranch(true_block, false_block, cc); } @@ -2196,6 +2176,9 @@ void LCodeGen::DoCmpT(LCmpT* instr) { __ cmp(r0, Operand(0)); // This instruction also signals no smi code inlined. Condition condition = ComputeCompareCondition(op); + if (op == Token::GT || op == Token::LTE) { + condition = ReverseCondition(condition); + } __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex, condition); @@ -2268,19 +2251,13 @@ void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) { __ str(value, FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset)); // Cells are always in the remembered set. - if (instr->hydrogen()->NeedsWriteBarrier()) { - HType type = instr->hydrogen()->value()->type(); - SmiCheck check_needed = - type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; - __ RecordWriteField(scratch, - JSGlobalPropertyCell::kValueOffset, - value, - scratch2, - kLRHasBeenSaved, - kSaveFPRegs, - OMIT_REMEMBERED_SET, - check_needed); - } + __ RecordWriteField(scratch, + JSGlobalPropertyCell::kValueOffset, + value, + scratch2, + kLRHasBeenSaved, + kSaveFPRegs, + OMIT_REMEMBERED_SET); } @@ -2308,18 +2285,13 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { Register value = ToRegister(instr->value()); MemOperand target = ContextOperand(context, instr->slot_index()); __ str(value, target); - if (instr->hydrogen()->NeedsWriteBarrier()) { - HType type = instr->hydrogen()->value()->type(); - SmiCheck check_needed = - type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; + if (instr->needs_write_barrier()) { __ RecordWriteContextSlot(context, target.offset(), value, scratch0(), kLRHasBeenSaved, - kSaveFPRegs, - EMIT_REMEMBERED_SET, - check_needed); + kSaveFPRegs); } } @@ -2340,7 +2312,7 @@ void LCodeGen::EmitLoadFieldOrConstantFunction(Register result, Register object, Handle<Map> type, Handle<String> name) { - LookupResult lookup(isolate()); + LookupResult lookup; type->LookupInDescriptors(NULL, *name, &lookup); ASSERT(lookup.IsProperty() && (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION)); @@ -2806,7 +2778,7 @@ void LCodeGen::DoPushArgument(LPushArgument* instr) { void LCodeGen::DoThisFunction(LThisFunction* instr) { Register result = ToRegister(instr->result()); - LoadHeapObject(result, instr->hydrogen()->closure()); + __ ldr(result, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); } @@ -3325,36 +3297,21 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { } // Do the store. - HType type = instr->hydrogen()->value()->type(); - SmiCheck check_needed = - type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; if (instr->is_in_object()) { __ str(value, FieldMemOperand(object, offset)); - if (instr->hydrogen()->NeedsWriteBarrier()) { + if (instr->needs_write_barrier()) { // Update the write barrier for the object for in-object properties. - __ RecordWriteField(object, - offset, - value, - scratch, - kLRHasBeenSaved, - kSaveFPRegs, - EMIT_REMEMBERED_SET, - check_needed); + __ RecordWriteField( + object, offset, value, scratch, kLRHasBeenSaved, kSaveFPRegs); } } else { __ ldr(scratch, FieldMemOperand(object, JSObject::kPropertiesOffset)); __ str(value, FieldMemOperand(scratch, offset)); - if (instr->hydrogen()->NeedsWriteBarrier()) { + if (instr->needs_write_barrier()) { // Update the write barrier for the properties array. // object is used as a scratch register. - __ RecordWriteField(scratch, - offset, - value, - object, - kLRHasBeenSaved, - kSaveFPRegs, - EMIT_REMEMBERED_SET, - check_needed); + __ RecordWriteField( + scratch, offset, value, object, kLRHasBeenSaved, kSaveFPRegs); } } } @@ -3405,18 +3362,9 @@ void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) { } if (instr->hydrogen()->NeedsWriteBarrier()) { - HType type = instr->hydrogen()->value()->type(); - SmiCheck check_needed = - type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; // Compute address of modified element and store it into key register. __ add(key, scratch, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); - __ RecordWrite(elements, - key, - value, - kLRHasBeenSaved, - kSaveFPRegs, - EMIT_REMEMBERED_SET, - check_needed); + __ RecordWrite(elements, key, value, kLRHasBeenSaved, kSaveFPRegs); } } @@ -3539,48 +3487,6 @@ void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { } -void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { - Register object_reg = ToRegister(instr->object()); - Register new_map_reg = ToRegister(instr->new_map_reg()); - Register scratch = scratch0(); - - Handle<Map> from_map = instr->original_map(); - Handle<Map> to_map = instr->transitioned_map(); - ElementsKind from_kind = from_map->elements_kind(); - ElementsKind to_kind = to_map->elements_kind(); - - Label not_applicable; - __ ldr(scratch, FieldMemOperand(object_reg, HeapObject::kMapOffset)); - __ cmp(scratch, Operand(from_map)); - __ b(ne, ¬_applicable); - __ mov(new_map_reg, Operand(to_map)); - if (from_kind == FAST_SMI_ONLY_ELEMENTS && to_kind == FAST_ELEMENTS) { - __ str(new_map_reg, FieldMemOperand(object_reg, HeapObject::kMapOffset)); - // Write barrier. - __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, - scratch, kLRHasBeenSaved, kDontSaveFPRegs); - } else if (from_kind == FAST_SMI_ONLY_ELEMENTS && - to_kind == FAST_DOUBLE_ELEMENTS) { - Register fixed_object_reg = ToRegister(instr->temp_reg()); - ASSERT(fixed_object_reg.is(r2)); - ASSERT(new_map_reg.is(r3)); - __ mov(fixed_object_reg, object_reg); - CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(), - RelocInfo::CODE_TARGET, instr); - } else if (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS) { - Register fixed_object_reg = ToRegister(instr->temp_reg()); - ASSERT(fixed_object_reg.is(r2)); - ASSERT(new_map_reg.is(r3)); - __ mov(fixed_object_reg, object_reg); - CallCode(isolate()->builtins()->TransitionElementsDoubleToObject(), - RelocInfo::CODE_TARGET, instr); - } else { - UNREACHABLE(); - } - __ bind(¬_applicable); -} - - void LCodeGen::DoStringAdd(LStringAdd* instr) { __ push(ToRegister(instr->left())); __ push(ToRegister(instr->right())); @@ -4297,15 +4203,10 @@ void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) { - Handle<FixedArray> constant_elements = instr->hydrogen()->constant_elements(); - ASSERT_EQ(2, constant_elements->length()); - ElementsKind constant_elements_kind = - static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value()); - __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); __ mov(r2, Operand(Smi::FromInt(instr->hydrogen()->literal_index()))); - __ mov(r1, Operand(constant_elements)); + __ mov(r1, Operand(instr->hydrogen()->constant_elements())); __ Push(r3, r2, r1); // Pick the right runtime function or stub to call. @@ -4322,9 +4223,7 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) { CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr); } else { FastCloneShallowArrayStub::Mode mode = - constant_elements_kind == FAST_DOUBLE_ELEMENTS - ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS - : FastCloneShallowArrayStub::CLONE_ELEMENTS; + FastCloneShallowArrayStub::CLONE_ELEMENTS; FastCloneShallowArrayStub stub(mode, length); CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); } @@ -4416,7 +4315,8 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { Handle<SharedFunctionInfo> shared_info = instr->shared_info(); bool pretenure = instr->hydrogen()->pretenure(); if (!pretenure && shared_info->num_literals() == 0) { - FastNewClosureStub stub(shared_info->strict_mode_flag()); + FastNewClosureStub stub( + shared_info->strict_mode() ? kStrictMode : kNonStrictMode); __ mov(r1, Operand(shared_info)); __ push(r1); CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); @@ -4449,9 +4349,8 @@ void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) { false_label, input, instr->type_literal()); - if (final_branch_condition != kNoCondition) { - EmitBranch(true_block, false_block, final_branch_condition); - } + + EmitBranch(true_block, false_block, final_branch_condition); } @@ -4521,7 +4420,9 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label, final_branch_condition = eq; } else { + final_branch_condition = ne; __ b(false_label); + // A dead branch instruction will be generated after this point. } return final_branch_condition; diff --git a/deps/v8/src/arm/lithium-codegen-arm.h b/deps/v8/src/arm/lithium-codegen-arm.h index b01e4967d..711e4595e 100644 --- a/deps/v8/src/arm/lithium-codegen-arm.h +++ b/deps/v8/src/arm/lithium-codegen-arm.h @@ -86,7 +86,6 @@ class LCodeGen BASE_EMBEDDED { SwVfpRegister flt_scratch, DoubleRegister dbl_scratch); int ToInteger32(LConstantOperand* op) const; - double ToDouble(LConstantOperand* op) const; Operand ToOperand(LOperand* op); MemOperand ToMemOperand(LOperand* op) const; // Returns a MemOperand pointing to the high word of a DoubleStackSlot. @@ -140,8 +139,8 @@ class LCodeGen BASE_EMBEDDED { bool is_done() const { return status_ == DONE; } bool is_aborted() const { return status_ == ABORTED; } - StrictModeFlag strict_mode_flag() const { - return info()->strict_mode_flag(); + int strict_mode_flag() const { + return info()->is_strict_mode() ? kStrictMode : kNonStrictMode; } LChunk* chunk() const { return chunk_; } @@ -207,7 +206,7 @@ class LCodeGen BASE_EMBEDDED { LInstruction* instr); // Generate a direct call to a known function. Expects the function - // to be in r1. + // to be in edi. void CallKnownFunction(Handle<JSFunction> function, int arity, LInstruction* instr, @@ -264,6 +263,7 @@ class LCodeGen BASE_EMBEDDED { static Condition TokenToCondition(Token::Value op, bool is_unsigned); void EmitGoto(int block); void EmitBranch(int left_block, int right_block, Condition cc); + void EmitCmpI(LOperand* left, LOperand* right); void EmitNumberUntagD(Register input, DoubleRegister result, bool deoptimize_on_undefined, @@ -272,10 +272,8 @@ class LCodeGen BASE_EMBEDDED { // Emits optimized code for typeof x == "y". Modifies input register. // Returns the condition on which a final split to // true and false label should be made, to optimize fallthrough. - Condition EmitTypeofIs(Label* true_label, - Label* false_label, - Register input, - Handle<String> type_name); + Condition EmitTypeofIs(Label* true_label, Label* false_label, + Register input, Handle<String> type_name); // Emits optimized code for %_IsObject(x). Preserves input register. // Returns the condition on which a final split to diff --git a/deps/v8/src/arm/macro-assembler-arm.cc b/deps/v8/src/arm/macro-assembler-arm.cc index cf4258c83..918f9ebe0 100644 --- a/deps/v8/src/arm/macro-assembler-arm.cc +++ b/deps/v8/src/arm/macro-assembler-arm.cc @@ -1101,16 +1101,24 @@ void MacroAssembler::InvokeFunction(JSFunction* function, // You can't call a function without a valid frame. ASSERT(flag == JUMP_FUNCTION || has_frame()); + ASSERT(function->is_compiled()); + // Get the function and setup the context. mov(r1, Operand(Handle<JSFunction>(function))); ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); + // Invoke the cached code. + Handle<Code> code(function->code()); ParameterCount expected(function->shared()->formal_parameter_count()); - // We call indirectly through the code field in the function to - // allow recompilation to take effect without changing any of the - // call sites. - ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); - InvokeCode(r3, expected, actual, flag, NullCallWrapper(), call_kind); + if (V8::UseCrankshaft()) { + // TODO(kasperl): For now, we always call indirectly through the + // code field in the function to allow recompilation to take effect + // without changing any of the call sites. + ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); + InvokeCode(r3, expected, actual, flag, NullCallWrapper(), call_kind); + } else { + InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag, call_kind); + } } @@ -1594,7 +1602,6 @@ void MacroAssembler::AllocateInNewSpace(Register object_size, ASSERT(!result.is(scratch1)); ASSERT(!result.is(scratch2)); ASSERT(!scratch1.is(scratch2)); - ASSERT(!object_size.is(ip)); ASSERT(!result.is(ip)); ASSERT(!scratch1.is(ip)); ASSERT(!scratch2.is(ip)); @@ -2023,8 +2030,7 @@ void MacroAssembler::DispatchMap(Register obj, void MacroAssembler::TryGetFunctionPrototype(Register function, Register result, Register scratch, - Label* miss, - bool miss_on_bound_function) { + Label* miss) { // Check that the receiver isn't a smi. JumpIfSmi(function, miss); @@ -2032,16 +2038,6 @@ void MacroAssembler::TryGetFunctionPrototype(Register function, CompareObjectType(function, result, scratch, JS_FUNCTION_TYPE); b(ne, miss); - if (miss_on_bound_function) { - ldr(scratch, - FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset)); - ldr(scratch, - FieldMemOperand(scratch, SharedFunctionInfo::kCompilerHintsOffset)); - tst(scratch, - Operand(Smi::FromInt(1 << SharedFunctionInfo::kBoundFunction))); - b(ne, miss); - } - // Make sure that the function has an instance prototype. Label non_instance; ldrb(scratch, FieldMemOperand(result, Map::kBitFieldOffset)); @@ -3151,10 +3147,8 @@ void MacroAssembler::CountLeadingZeros(Register zeros, // Answer. #ifdef CAN_USE_ARMV5_INSTRUCTIONS clz(zeros, source); // This instruction is only supported after ARM5. #else - // Order of the next two lines is important: zeros register - // can be the same as source register. - Move(scratch, source); mov(zeros, Operand(0, RelocInfo::NONE)); + Move(scratch, source); // Top 16. tst(scratch, Operand(0xffff0000)); add(zeros, zeros, Operand(16), LeaveCC, eq); diff --git a/deps/v8/src/arm/macro-assembler-arm.h b/deps/v8/src/arm/macro-assembler-arm.h index 90c4b3754..8ee468a91 100644 --- a/deps/v8/src/arm/macro-assembler-arm.h +++ b/deps/v8/src/arm/macro-assembler-arm.h @@ -320,11 +320,8 @@ class MacroAssembler: public Assembler { } // Push four registers. Pushes leftmost register first (to highest address). - void Push(Register src1, - Register src2, - Register src3, - Register src4, - Condition cond = al) { + void Push(Register src1, Register src2, + Register src3, Register src4, Condition cond = al) { ASSERT(!src1.is(src2)); ASSERT(!src2.is(src3)); ASSERT(!src1.is(src3)); @@ -363,57 +360,6 @@ class MacroAssembler: public Assembler { } } - // Pop three registers. Pops rightmost register first (from lower address). - void Pop(Register src1, Register src2, Register src3, Condition cond = al) { - ASSERT(!src1.is(src2)); - ASSERT(!src2.is(src3)); - ASSERT(!src1.is(src3)); - if (src1.code() > src2.code()) { - if (src2.code() > src3.code()) { - ldm(ia_w, sp, src1.bit() | src2.bit() | src3.bit(), cond); - } else { - ldr(src3, MemOperand(sp, 4, PostIndex), cond); - ldm(ia_w, sp, src1.bit() | src2.bit(), cond); - } - } else { - Pop(src2, src3, cond); - str(src1, MemOperand(sp, 4, PostIndex), cond); - } - } - - // Pop four registers. Pops rightmost register first (from lower address). - void Pop(Register src1, - Register src2, - Register src3, - Register src4, - Condition cond = al) { - ASSERT(!src1.is(src2)); - ASSERT(!src2.is(src3)); - ASSERT(!src1.is(src3)); - ASSERT(!src1.is(src4)); - ASSERT(!src2.is(src4)); - ASSERT(!src3.is(src4)); - if (src1.code() > src2.code()) { - if (src2.code() > src3.code()) { - if (src3.code() > src4.code()) { - ldm(ia_w, - sp, - src1.bit() | src2.bit() | src3.bit() | src4.bit(), - cond); - } else { - ldr(src4, MemOperand(sp, 4, PostIndex), cond); - ldm(ia_w, sp, src1.bit() | src2.bit() | src3.bit(), cond); - } - } else { - Pop(src3, src4, cond); - ldm(ia_w, sp, src1.bit() | src2.bit(), cond); - } - } else { - Pop(src2, src3, src4, cond); - ldr(src1, MemOperand(sp, 4, PostIndex), cond); - } - } - // Push and pop the registers that can hold pointers, as defined by the // RegList constant kSafepointSavedRegisters. void PushSafepointRegisters(); @@ -726,8 +672,7 @@ class MacroAssembler: public Assembler { void TryGetFunctionPrototype(Register function, Register result, Register scratch, - Label* miss, - bool miss_on_bound_function = false); + Label* miss); // Compare object type for heap object. heap_object contains a non-Smi // whose object type should be compared with the given type. This both diff --git a/deps/v8/src/arm/regexp-macro-assembler-arm.cc b/deps/v8/src/arm/regexp-macro-assembler-arm.cc index b212f9f6e..c87646793 100644 --- a/deps/v8/src/arm/regexp-macro-assembler-arm.cc +++ b/deps/v8/src/arm/regexp-macro-assembler-arm.cc @@ -1111,11 +1111,6 @@ int RegExpMacroAssemblerARM::CheckStackGuardState(Address* return_address, frame_entry<const String*>(re_frame, kInputString) = *subject; frame_entry<const byte*>(re_frame, kInputStart) = new_address; frame_entry<const byte*>(re_frame, kInputEnd) = new_address + byte_length; - } else if (frame_entry<const String*>(re_frame, kInputString) != *subject) { - // Subject string might have been a ConsString that underwent - // short-circuiting during GC. That will not change start_address but - // will change pointer inside the subject handle. - frame_entry<const String*>(re_frame, kInputString) = *subject; } return 0; diff --git a/deps/v8/src/arm/simulator-arm.cc b/deps/v8/src/arm/simulator-arm.cc index 542cc302d..570420262 100644 --- a/deps/v8/src/arm/simulator-arm.cc +++ b/deps/v8/src/arm/simulator-arm.cc @@ -1268,9 +1268,9 @@ void Simulator::WriteDW(int32_t addr, int32_t value1, int32_t value2) { // Returns the limit of the stack area to enable checking for stack overflows. uintptr_t Simulator::StackLimit() const { - // Leave a safety margin of 512 bytes to prevent overrunning the stack when + // Leave a safety margin of 256 bytes to prevent overrunning the stack when // pushing values. - return reinterpret_cast<uintptr_t>(stack_) + 512; + return reinterpret_cast<uintptr_t>(stack_) + 256; } diff --git a/deps/v8/src/arm/stub-cache-arm.cc b/deps/v8/src/arm/stub-cache-arm.cc index f9a10c4f2..4558afe68 100644 --- a/deps/v8/src/arm/stub-cache-arm.cc +++ b/deps/v8/src/arm/stub-cache-arm.cc @@ -95,63 +95,7 @@ static void ProbeTable(Isolate* isolate, // must always call a backup property check that is complete. // This function is safe to call if the receiver has fast properties. // Name must be a symbol and receiver must be a heap object. -static void GenerateDictionaryNegativeLookup(MacroAssembler* masm, - Label* miss_label, - Register receiver, - Handle<String> name, - Register scratch0, - Register scratch1) { - ASSERT(name->IsSymbol()); - Counters* counters = masm->isolate()->counters(); - __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1); - __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); - - Label done; - - const int kInterceptorOrAccessCheckNeededMask = - (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); - - // Bail out if the receiver has a named interceptor or requires access checks. - Register map = scratch1; - __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); - __ ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset)); - __ tst(scratch0, Operand(kInterceptorOrAccessCheckNeededMask)); - __ b(ne, miss_label); - - // Check that receiver is a JSObject. - __ ldrb(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset)); - __ cmp(scratch0, Operand(FIRST_SPEC_OBJECT_TYPE)); - __ b(lt, miss_label); - - // Load properties array. - Register properties = scratch0; - __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); - // Check that the properties array is a dictionary. - __ ldr(map, FieldMemOperand(properties, HeapObject::kMapOffset)); - Register tmp = properties; - __ LoadRoot(tmp, Heap::kHashTableMapRootIndex); - __ cmp(map, tmp); - __ b(ne, miss_label); - - // Restore the temporarily used register. - __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); - - - StringDictionaryLookupStub::GenerateNegativeLookup(masm, - miss_label, - &done, - receiver, - properties, - name, - scratch1); - __ bind(&done); - __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); -} - - -// TODO(kmillikin): Eliminate this function when the stub cache is fully -// handlified. -MUST_USE_RESULT static MaybeObject* TryGenerateDictionaryNegativeLookup( +MUST_USE_RESULT static MaybeObject* GenerateDictionaryNegativeLookup( MacroAssembler* masm, Label* miss_label, Register receiver, @@ -194,7 +138,7 @@ MUST_USE_RESULT static MaybeObject* TryGenerateDictionaryNegativeLookup( __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); - MaybeObject* result = StringDictionaryLookupStub::TryGenerateNegativeLookup( + MaybeObject* result = StringDictionaryLookupStub::GenerateNegativeLookup( masm, miss_label, &done, @@ -315,10 +259,8 @@ void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype( // are loaded directly otherwise the property is loaded from the properties // fixed array. void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, - Register dst, - Register src, - Handle<JSObject> holder, - int index) { + Register dst, Register src, + JSObject* holder, int index) { // Adjust for the number of properties stored in the holder. index -= holder->map()->inobject_properties(); if (index < 0) { @@ -425,9 +367,9 @@ void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm, // may be clobbered. Upon branch to miss_label, the receiver and name // registers have their original values. void StubCompiler::GenerateStoreField(MacroAssembler* masm, - Handle<JSObject> object, + JSObject* object, int index, - Handle<Map> transition, + Map* transition, Register receiver_reg, Register name_reg, Register scratch, @@ -453,11 +395,11 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm, ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); // Perform map transition for the receiver if necessary. - if (!transition.is_null() && (object->map()->unused_property_fields() == 0)) { + if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) { // The properties must be extended before we can store the value. // We jump to a runtime call that extends the properties array. __ push(receiver_reg); - __ mov(r2, Operand(transition)); + __ mov(r2, Operand(Handle<Map>(transition))); __ Push(r2, r0); __ TailCallExternalReference( ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage), @@ -467,10 +409,10 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm, return; } - if (!transition.is_null()) { + if (transition != NULL) { // Update the map of the object; no write barrier updating is // needed because the map is never in new space. - __ mov(ip, Operand(transition)); + __ mov(ip, Operand(Handle<Map>(transition))); __ str(ip, FieldMemOperand(receiver_reg, HeapObject::kMapOffset)); } @@ -525,15 +467,20 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm, void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) { ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC); - Handle<Code> code = (kind == Code::LOAD_IC) - ? masm->isolate()->builtins()->LoadIC_Miss() - : masm->isolate()->builtins()->KeyedLoadIC_Miss(); - __ Jump(code, RelocInfo::CODE_TARGET); + Code* code = NULL; + if (kind == Code::LOAD_IC) { + code = masm->isolate()->builtins()->builtin(Builtins::kLoadIC_Miss); + } else { + code = masm->isolate()->builtins()->builtin(Builtins::kKeyedLoadIC_Miss); + } + + Handle<Code> ic(code); + __ Jump(ic, RelocInfo::CODE_TARGET); } static void GenerateCallFunction(MacroAssembler* masm, - Handle<Object> object, + Object* object, const ParameterCount& arguments, Label* miss, Code::ExtraICState extra_ic_state) { @@ -921,26 +868,7 @@ class CallInterceptorCompiler BASE_EMBEDDED { // Generate code to check that a global property cell is empty. Create // the property cell at compilation time if no cell exists for the // property. -static void GenerateCheckPropertyCell(MacroAssembler* masm, - Handle<GlobalObject> global, - Handle<String> name, - Register scratch, - Label* miss) { - Handle<JSGlobalPropertyCell> cell = - GlobalObject::EnsurePropertyCell(global, name); - ASSERT(cell->value()->IsTheHole()); - __ mov(scratch, Operand(cell)); - __ ldr(scratch, - FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset)); - __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); - __ cmp(scratch, ip); - __ b(ne, miss); -} - - -// TODO(kmillikin): Eliminate this function when the stub cache is fully -// handlified. -MUST_USE_RESULT static MaybeObject* TryGenerateCheckPropertyCell( +MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell( MacroAssembler* masm, GlobalObject* global, String* name, @@ -961,32 +889,9 @@ MUST_USE_RESULT static MaybeObject* TryGenerateCheckPropertyCell( return cell; } - // Calls GenerateCheckPropertyCell for each global object in the prototype chain // from object to (but not including) holder. -static void GenerateCheckPropertyCells(MacroAssembler* masm, - Handle<JSObject> object, - Handle<JSObject> holder, - Handle<String> name, - Register scratch, - Label* miss) { - Handle<JSObject> current = object; - while (!current.is_identical_to(holder)) { - if (current->IsGlobalObject()) { - GenerateCheckPropertyCell(masm, - Handle<GlobalObject>::cast(current), - name, - scratch, - miss); - } - current = Handle<JSObject>(JSObject::cast(current->GetPrototype())); - } -} - - -// TODO(kmillikin): Eliminate this function when the stub cache is fully -// handlified. -MUST_USE_RESULT static MaybeObject* TryGenerateCheckPropertyCells( +MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCells( MacroAssembler* masm, JSObject* object, JSObject* holder, @@ -997,7 +902,7 @@ MUST_USE_RESULT static MaybeObject* TryGenerateCheckPropertyCells( while (current != holder) { if (current->IsGlobalObject()) { // Returns a cell or a failure. - MaybeObject* result = TryGenerateCheckPropertyCell( + MaybeObject* result = GenerateCheckPropertyCell( masm, GlobalObject::cast(current), name, @@ -1122,112 +1027,6 @@ static void GenerateUInt2Double(MacroAssembler* masm, #define __ ACCESS_MASM(masm()) -Register StubCompiler::CheckPrototypes(Handle<JSObject> object, - Register object_reg, - Handle<JSObject> holder, - Register holder_reg, - Register scratch1, - Register scratch2, - Handle<String> name, - int save_at_depth, - Label* miss) { - // Make sure there's no overlap between holder and object registers. - ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg)); - ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg) - && !scratch2.is(scratch1)); - - // Keep track of the current object in register reg. - Register reg = object_reg; - int depth = 0; - - if (save_at_depth == depth) { - __ str(reg, MemOperand(sp)); - } - - // Check the maps in the prototype chain. - // Traverse the prototype chain from the object and do map checks. - Handle<JSObject> current = object; - while (!current.is_identical_to(holder)) { - ++depth; - - // Only global objects and objects that do not require access - // checks are allowed in stubs. - ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded()); - - Handle<JSObject> prototype(JSObject::cast(current->GetPrototype())); - if (!current->HasFastProperties() && - !current->IsJSGlobalObject() && - !current->IsJSGlobalProxy()) { - if (!name->IsSymbol()) { - name = factory()->LookupSymbol(name); - } - ASSERT(current->property_dictionary()->FindEntry(*name) == - StringDictionary::kNotFound); - - GenerateDictionaryNegativeLookup(masm(), miss, reg, name, - scratch1, scratch2); - - __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); - reg = holder_reg; // From now on the object will be in holder_reg. - __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset)); - } else { - Handle<Map> current_map(current->map()); - __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); - __ cmp(scratch1, Operand(current_map)); - // Branch on the result of the map check. - __ b(ne, miss); - // Check access rights to the global object. This has to happen after - // the map check so that we know that the object is actually a global - // object. - if (current->IsJSGlobalProxy()) { - __ CheckAccessGlobalProxy(reg, scratch2, miss); - } - reg = holder_reg; // From now on the object will be in holder_reg. - - if (heap()->InNewSpace(*prototype)) { - // The prototype is in new space; we cannot store a reference to it - // in the code. Load it from the map. - __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset)); - } else { - // The prototype is in old space; load it directly. - __ mov(reg, Operand(prototype)); - } - } - - if (save_at_depth == depth) { - __ str(reg, MemOperand(sp)); - } - - // Go to the next object in the prototype chain. - current = prototype; - } - - // Log the check depth. - LOG(masm()->isolate(), IntEvent("check-maps-depth", depth + 1)); - - // Check the holder map. - __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); - __ cmp(scratch1, Operand(Handle<Map>(current->map()))); - __ b(ne, miss); - - // Perform security check for access to the global object. - ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded()); - if (holder->IsJSGlobalProxy()) { - __ CheckAccessGlobalProxy(reg, scratch1, miss); - } - - // If we've skipped any global objects, it's not enough to verify that - // their maps haven't changed. We also need to check that the property - // cell for the property is still empty. - GenerateCheckPropertyCells(masm(), object, holder, name, scratch1, miss); - - // Return the register containing the holder. - return reg; -} - - -// TODO(kmillikin): Eliminate this function when the stub cache is fully -// handlified. Register StubCompiler::CheckPrototypes(JSObject* object, Register object_reg, JSObject* holder, @@ -1277,13 +1076,12 @@ Register StubCompiler::CheckPrototypes(JSObject* object, ASSERT(current->property_dictionary()->FindEntry(name) == StringDictionary::kNotFound); - MaybeObject* negative_lookup = - TryGenerateDictionaryNegativeLookup(masm(), - miss, - reg, - name, - scratch1, - scratch2); + MaybeObject* negative_lookup = GenerateDictionaryNegativeLookup(masm(), + miss, + reg, + name, + scratch1, + scratch2); if (negative_lookup->IsFailure()) { set_failure(Failure::cast(negative_lookup)); return reg; @@ -1352,17 +1150,17 @@ Register StubCompiler::CheckPrototypes(JSObject* object, ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded()); if (holder->IsJSGlobalProxy()) { __ CheckAccessGlobalProxy(reg, scratch1, miss); - } + }; // If we've skipped any global objects, it's not enough to verify // that their maps haven't changed. We also need to check that the // property cell for the property is still empty. - MaybeObject* result = TryGenerateCheckPropertyCells(masm(), - object, - holder, - name, - scratch1, - miss); + MaybeObject* result = GenerateCheckPropertyCells(masm(), + object, + holder, + name, + scratch1, + miss); if (result->IsFailure()) set_failure(Failure::cast(result)); // Return the register containing the holder. @@ -1370,44 +1168,45 @@ Register StubCompiler::CheckPrototypes(JSObject* object, } -void StubCompiler::GenerateLoadField(Handle<JSObject> object, - Handle<JSObject> holder, +void StubCompiler::GenerateLoadField(JSObject* object, + JSObject* holder, Register receiver, Register scratch1, Register scratch2, Register scratch3, int index, - Handle<String> name, + String* name, Label* miss) { // Check that the receiver isn't a smi. __ JumpIfSmi(receiver, miss); // Check that the maps haven't changed. - Register reg = CheckPrototypes( - object, receiver, holder, scratch1, scratch2, scratch3, name, miss); + Register reg = + CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3, + name, miss); GenerateFastPropertyLoad(masm(), r0, reg, holder, index); __ Ret(); } -void StubCompiler::GenerateLoadConstant(Handle<JSObject> object, - Handle<JSObject> holder, +void StubCompiler::GenerateLoadConstant(JSObject* object, + JSObject* holder, Register receiver, Register scratch1, Register scratch2, Register scratch3, - Handle<Object> value, - Handle<String> name, + Object* value, + String* name, Label* miss) { // Check that the receiver isn't a smi. __ JumpIfSmi(receiver, miss); // Check that the maps haven't changed. - CheckPrototypes( - object, receiver, holder, scratch1, scratch2, scratch3, name, miss); + CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3, name, + miss); // Return the constant value. - __ mov(r0, Operand(value)); + __ mov(r0, Operand(Handle<Object>(value))); __ Ret(); } @@ -1566,8 +1365,7 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object, // We found FIELD property in prototype chain of interceptor's holder. // Retrieve a field from field's holder. GenerateFastPropertyLoad(masm(), r0, holder_reg, - Handle<JSObject>(lookup->holder()), - lookup->GetFieldIndex()); + lookup->holder(), lookup->GetFieldIndex()); __ Ret(); } else { // We found CALLBACKS property in prototype chain of interceptor's @@ -1618,9 +1416,9 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object, } -void CallStubCompiler::GenerateNameCheck(Handle<String> name, Label* miss) { +void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) { if (kind_ == Code::KEYED_CALL_IC) { - __ cmp(r2, Operand(name)); + __ cmp(r2, Operand(Handle<String>(name))); __ b(ne, miss); } } @@ -1680,22 +1478,11 @@ void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell, } -void CallStubCompiler::GenerateMissBranch() { - Handle<Code> code = +MaybeObject* CallStubCompiler::GenerateMissBranch() { + MaybeObject* maybe_obj = isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(), kind_, - extra_state_); - __ Jump(code, RelocInfo::CODE_TARGET); -} - - -// TODO(kmillikin): Eliminate this function when the stub cache is fully -// handlified. -MaybeObject* CallStubCompiler::TryGenerateMissBranch() { - MaybeObject* maybe_obj = - isolate()->stub_cache()->TryComputeCallMiss(arguments().immediate(), - kind_, - extra_state_); + extra_ic_state_); Object* obj; if (!maybe_obj->ToObject(&obj)) return maybe_obj; __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET); @@ -1703,10 +1490,10 @@ MaybeObject* CallStubCompiler::TryGenerateMissBranch() { } -Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object, - Handle<JSObject> holder, +MaybeObject* CallStubCompiler::CompileCallField(JSObject* object, + JSObject* holder, int index, - Handle<String> name) { + String* name) { // ----------- S t a t e ------------- // -- r2 : name // -- lr : return address @@ -1726,11 +1513,12 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object, Register reg = CheckPrototypes(object, r0, holder, r1, r3, r4, name, &miss); GenerateFastPropertyLoad(masm(), r1, reg, holder, index); - GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_); + GenerateCallFunction(masm(), object, arguments(), &miss, extra_ic_state_); // Handle call cache miss. __ bind(&miss); - GenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); + if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. return GetCode(FIELD, name); @@ -1755,7 +1543,7 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object, Label miss; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); Register receiver = r1; @@ -1831,7 +1619,7 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object, __ bind(&with_write_barrier); __ ldr(r6, FieldMemOperand(receiver, HeapObject::kMapOffset)); - __ CheckFastObjectElements(r6, r6, &call_builtin); + __ CheckFastSmiOnlyElements(r6, r6, &call_builtin); // Save new length. __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset)); @@ -1921,11 +1709,11 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object, // Handle call cache miss. __ bind(&miss); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(function); + return GetCode(function); } @@ -1950,7 +1738,7 @@ MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object, Register receiver = r1; Register elements = r3; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); // Get the receiver from the stack const int argc = arguments().immediate(); @@ -2010,11 +1798,11 @@ MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object, // Handle call cache miss. __ bind(&miss); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(function); + return GetCode(function); } @@ -2043,12 +1831,12 @@ MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall( Label* index_out_of_range_label = &index_out_of_range; if (kind_ == Code::CALL_IC && - (CallICBase::StringStubState::decode(extra_state_) == + (CallICBase::StringStubState::decode(extra_ic_state_) == DEFAULT_STRING_STUB)) { index_out_of_range_label = &miss; } - GenerateNameCheck(Handle<String>(name), &name_miss); + GenerateNameCheck(name, &name_miss); // Check that the maps starting from the prototype haven't changed. GenerateDirectLoadGlobalFunctionPrototype(masm(), @@ -2096,11 +1884,11 @@ MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall( // Restore function name in r2. __ Move(r2, Handle<String>(name)); __ bind(&name_miss); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(function); + return GetCode(function); } @@ -2129,12 +1917,12 @@ MaybeObject* CallStubCompiler::CompileStringCharAtCall( Label* index_out_of_range_label = &index_out_of_range; if (kind_ == Code::CALL_IC && - (CallICBase::StringStubState::decode(extra_state_) == + (CallICBase::StringStubState::decode(extra_ic_state_) == DEFAULT_STRING_STUB)) { index_out_of_range_label = &miss; } - GenerateNameCheck(Handle<String>(name), &name_miss); + GenerateNameCheck(name, &name_miss); // Check that the maps starting from the prototype haven't changed. GenerateDirectLoadGlobalFunctionPrototype(masm(), @@ -2184,11 +1972,11 @@ MaybeObject* CallStubCompiler::CompileStringCharAtCall( // Restore function name in r2. __ Move(r2, Handle<String>(name)); __ bind(&name_miss); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(function); + return GetCode(function); } @@ -2213,7 +2001,7 @@ MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall( if (!object->IsJSObject() || argc != 1) return heap()->undefined_value(); Label miss; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); if (cell == NULL) { __ ldr(r1, MemOperand(sp, 1 * kPointerSize)); @@ -2256,11 +2044,11 @@ MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall( __ bind(&miss); // r2: function name. - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return (cell == NULL) ? TryGetCode(function) : TryGetCode(NORMAL, name); + return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name); } @@ -2290,7 +2078,7 @@ MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object, if (!object->IsJSObject() || argc != 1) return heap()->undefined_value(); Label miss, slow; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); if (cell == NULL) { __ ldr(r1, MemOperand(sp, 1 * kPointerSize)); @@ -2404,11 +2192,11 @@ MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object, __ bind(&miss); // r2: function name. - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return (cell == NULL) ? TryGetCode(function) : TryGetCode(NORMAL, name); + return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name); } @@ -2432,7 +2220,7 @@ MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object, if (!object->IsJSObject() || argc != 1) return heap()->undefined_value(); Label miss; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); if (cell == NULL) { __ ldr(r1, MemOperand(sp, 1 * kPointerSize)); @@ -2505,11 +2293,11 @@ MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object, __ bind(&miss); // r2: function name. - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return (cell == NULL) ? TryGetCode(function) : TryGetCode(NORMAL, name); + return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name); } @@ -2534,7 +2322,7 @@ MaybeObject* CallStubCompiler::CompileFastApiCall( Label miss, miss_before_stack_reserved; - GenerateNameCheck(Handle<String>(name), &miss_before_stack_reserved); + GenerateNameCheck(name, &miss_before_stack_reserved); // Get the receiver from the stack. const int argc = arguments().immediate(); @@ -2559,11 +2347,11 @@ MaybeObject* CallStubCompiler::CompileFastApiCall( FreeSpaceForFastApiCall(masm()); __ bind(&miss_before_stack_reserved); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(function); + return GetCode(function); } @@ -2587,7 +2375,7 @@ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object, Label miss; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); // Get the receiver from the stack const int argc = arguments().immediate(); @@ -2686,18 +2474,18 @@ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object, UNREACHABLE(); } - CallKind call_kind = CallICBase::Contextual::decode(extra_state_) + CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_) ? CALL_AS_FUNCTION : CALL_AS_METHOD; __ InvokeFunction(function, arguments(), JUMP_FUNCTION, call_kind); // Handle call cache miss. __ bind(&miss); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(function); + return GetCode(function); } @@ -2711,18 +2499,18 @@ MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object, Label miss; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); // Get the number of arguments. const int argc = arguments().immediate(); - LookupResult lookup(isolate()); + LookupResult lookup; LookupPostInterceptor(holder, name, &lookup); // Get the receiver from the stack. __ ldr(r1, MemOperand(sp, argc * kPointerSize)); - CallInterceptorCompiler compiler(this, arguments(), r2, extra_state_); + CallInterceptorCompiler compiler(this, arguments(), r2, extra_ic_state_); MaybeObject* result = compiler.Compile(masm(), object, holder, @@ -2742,16 +2530,15 @@ MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object, // Restore receiver. __ ldr(r0, MemOperand(sp, argc * kPointerSize)); - GenerateCallFunction(masm(), Handle<Object>(object), arguments(), &miss, - extra_state_); + GenerateCallFunction(masm(), object, arguments(), &miss, extra_ic_state_); // Handle call cache miss. __ bind(&miss); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(INTERCEPTOR, name); + return GetCode(INTERCEPTOR, name); } @@ -2776,7 +2563,7 @@ MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object, Label miss; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); // Get the number of arguments. const int argc = arguments().immediate(); @@ -2798,33 +2585,39 @@ MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object, // Jump to the cached code (tail call). Counters* counters = masm()->isolate()->counters(); __ IncrementCounter(counters->call_global_inline(), 1, r3, r4); + ASSERT(function->is_compiled()); Handle<Code> code(function->code()); ParameterCount expected(function->shared()->formal_parameter_count()); - CallKind call_kind = CallICBase::Contextual::decode(extra_state_) + CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_) ? CALL_AS_FUNCTION : CALL_AS_METHOD; - // We call indirectly through the code field in the function to - // allow recompilation to take effect without changing any of the - // call sites. - __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); - __ InvokeCode(r3, expected, arguments(), JUMP_FUNCTION, - NullCallWrapper(), call_kind); + if (V8::UseCrankshaft()) { + // TODO(kasperl): For now, we always call indirectly through the + // code field in the function to allow recompilation to take effect + // without changing any of the call sites. + __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); + __ InvokeCode(r3, expected, arguments(), JUMP_FUNCTION, + NullCallWrapper(), call_kind); + } else { + __ InvokeCode(code, expected, arguments(), RelocInfo::CODE_TARGET, + JUMP_FUNCTION, call_kind); + } // Handle call cache miss. __ bind(&miss); __ IncrementCounter(counters->call_global_inline_miss(), 1, r1, r3); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(NORMAL, name); + return GetCode(NORMAL, name); } -Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object, +MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object, int index, - Handle<Map> transition, - Handle<String> name) { + Map* transition, + String* name) { // ----------- S t a t e ------------- // -- r0 : value // -- r1 : receiver @@ -2833,20 +2626,24 @@ Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object, // ----------------------------------- Label miss; - GenerateStoreField(masm(), object, index, transition, r1, r2, r3, &miss); + GenerateStoreField(masm(), + object, + index, + transition, + r1, r2, r3, + &miss); __ bind(&miss); Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss(); __ Jump(ic, RelocInfo::CODE_TARGET); // Return the generated code. - return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name); + return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name); } -Handle<Code> StoreStubCompiler::CompileStoreCallback( - Handle<JSObject> object, - Handle<AccessorInfo> callback, - Handle<String> name) { +MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object, + AccessorInfo* callback, + String* name) { // ----------- S t a t e ------------- // -- r0 : value // -- r1 : receiver @@ -2873,7 +2670,7 @@ Handle<Code> StoreStubCompiler::CompileStoreCallback( ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); __ push(r1); // receiver - __ mov(ip, Operand(callback)); // callback info + __ mov(ip, Operand(Handle<AccessorInfo>(callback))); // callback info __ Push(ip, r2, r0); // Do tail-call to the runtime system. @@ -2892,9 +2689,8 @@ Handle<Code> StoreStubCompiler::CompileStoreCallback( } -Handle<Code> StoreStubCompiler::CompileStoreInterceptor( - Handle<JSObject> receiver, - Handle<String> name) { +MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver, + String* name) { // ----------- S t a t e ------------- // -- r0 : value // -- r1 : receiver @@ -2941,10 +2737,9 @@ Handle<Code> StoreStubCompiler::CompileStoreInterceptor( } -Handle<Code> StoreStubCompiler::CompileStoreGlobal( - Handle<GlobalObject> object, - Handle<JSGlobalPropertyCell> cell, - Handle<String> name) { +MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object, + JSGlobalPropertyCell* cell, + String* name) { // ----------- S t a t e ------------- // -- r0 : value // -- r1 : receiver @@ -2962,7 +2757,7 @@ Handle<Code> StoreStubCompiler::CompileStoreGlobal( // cell could have been deleted and reintroducing the global needs // to update the property details in the property dictionary of the // global object. We bail out to the runtime system to do that. - __ mov(r4, Operand(cell)); + __ mov(r4, Operand(Handle<JSGlobalPropertyCell>(cell))); __ LoadRoot(r5, Heap::kTheHoleValueRootIndex); __ ldr(r6, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset)); __ cmp(r5, r6); @@ -2995,9 +2790,9 @@ Handle<Code> StoreStubCompiler::CompileStoreGlobal( } -Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name, - Handle<JSObject> object, - Handle<JSObject> last) { +MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name, + JSObject* object, + JSObject* last) { // ----------- S t a t e ------------- // -- r0 : receiver // -- lr : return address @@ -3013,8 +2808,15 @@ Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name, // If the last object in the prototype chain is a global object, // check that the global property cell is empty. if (last->IsGlobalObject()) { - GenerateCheckPropertyCell( - masm(), Handle<GlobalObject>::cast(last), name, r1, &miss); + MaybeObject* cell = GenerateCheckPropertyCell(masm(), + GlobalObject::cast(last), + name, + r1, + &miss); + if (cell->IsFailure()) { + miss.Unuse(); + return cell; + } } // Return undefined if maps of the full prototype chain are still the @@ -3026,14 +2828,14 @@ Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name, GenerateLoadMiss(masm(), Code::LOAD_IC); // Return the generated code. - return GetCode(NONEXISTENT, factory()->empty_string()); + return GetCode(NONEXISTENT, heap()->empty_string()); } -Handle<Code> LoadStubCompiler::CompileLoadField(Handle<JSObject> object, - Handle<JSObject> holder, +MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object, + JSObject* holder, int index, - Handle<String> name) { + String* name) { // ----------- S t a t e ------------- // -- r0 : receiver // -- r2 : name @@ -3072,14 +2874,14 @@ MaybeObject* LoadStubCompiler::CompileLoadCallback(String* name, GenerateLoadMiss(masm(), Code::LOAD_IC); // Return the generated code. - return TryGetCode(CALLBACKS, name); + return GetCode(CALLBACKS, name); } -Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object, - Handle<JSObject> holder, - Handle<Object> value, - Handle<String> name) { +MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object, + JSObject* holder, + Object* value, + String* name) { // ----------- S t a t e ------------- // -- r0 : receiver // -- r2 : name @@ -3106,7 +2908,7 @@ MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* object, // ----------------------------------- Label miss; - LookupResult lookup(isolate()); + LookupResult lookup; LookupPostInterceptor(holder, name, &lookup); GenerateLoadInterceptor(object, holder, @@ -3122,16 +2924,15 @@ MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* object, GenerateLoadMiss(masm(), Code::LOAD_IC); // Return the generated code. - return TryGetCode(INTERCEPTOR, name); + return GetCode(INTERCEPTOR, name); } -Handle<Code> LoadStubCompiler::CompileLoadGlobal( - Handle<JSObject> object, - Handle<GlobalObject> holder, - Handle<JSGlobalPropertyCell> cell, - Handle<String> name, - bool is_dont_delete) { +MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object, + GlobalObject* holder, + JSGlobalPropertyCell* cell, + String* name, + bool is_dont_delete) { // ----------- S t a t e ------------- // -- r0 : receiver // -- r2 : name @@ -3142,7 +2943,7 @@ Handle<Code> LoadStubCompiler::CompileLoadGlobal( // If the object is the holder then we know that it's a global // object which can only happen for contextual calls. In this case, // the receiver cannot be a smi. - if (!object.is_identical_to(holder)) { + if (object != holder) { __ JumpIfSmi(r0, &miss); } @@ -3150,7 +2951,7 @@ Handle<Code> LoadStubCompiler::CompileLoadGlobal( CheckPrototypes(object, r0, holder, r3, r4, r1, name, &miss); // Get the value from the cell. - __ mov(r3, Operand(cell)); + __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell))); __ ldr(r4, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset)); // Check for deleted property if property can actually be deleted. @@ -3174,9 +2975,9 @@ Handle<Code> LoadStubCompiler::CompileLoadGlobal( } -Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name, - Handle<JSObject> receiver, - Handle<JSObject> holder, +MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name, + JSObject* receiver, + JSObject* holder, int index) { // ----------- S t a t e ------------- // -- lr : return address @@ -3186,7 +2987,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name, Label miss; // Check the key is the cached one. - __ cmp(r0, Operand(name)); + __ cmp(r0, Operand(Handle<String>(name))); __ b(ne, &miss); GenerateLoadField(receiver, holder, r1, r2, r3, r4, index, name, &miss); @@ -3223,15 +3024,14 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback( __ bind(&miss); GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); - return TryGetCode(CALLBACKS, name); + return GetCode(CALLBACKS, name); } -Handle<Code> KeyedLoadStubCompiler::CompileLoadConstant( - Handle<String> name, - Handle<JSObject> receiver, - Handle<JSObject> holder, - Handle<Object> value) { +MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name, + JSObject* receiver, + JSObject* holder, + Object* value) { // ----------- S t a t e ------------- // -- lr : return address // -- r0 : key @@ -3240,7 +3040,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadConstant( Label miss; // Check the key is the cached one. - __ cmp(r0, Operand(name)); + __ cmp(r0, Operand(Handle<String>(name))); __ b(ne, &miss); GenerateLoadConstant(receiver, holder, r1, r2, r3, r4, value, name, &miss); @@ -3266,7 +3066,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, __ cmp(r0, Operand(Handle<String>(name))); __ b(ne, &miss); - LookupResult lookup(isolate()); + LookupResult lookup; LookupPostInterceptor(holder, name, &lookup); GenerateLoadInterceptor(receiver, holder, @@ -3281,12 +3081,11 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, __ bind(&miss); GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); - return TryGetCode(INTERCEPTOR, name); + return GetCode(INTERCEPTOR, name); } -Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength( - Handle<String> name) { +MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) { // ----------- S t a t e ------------- // -- lr : return address // -- r0 : key @@ -3295,7 +3094,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength( Label miss; // Check the key is the cached one. - __ cmp(r0, Operand(name)); + __ cmp(r0, Operand(Handle<String>(name))); __ b(ne, &miss); GenerateLoadArrayLength(masm(), r1, r2, &miss); @@ -3306,8 +3105,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength( } -Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength( - Handle<String> name) { +MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) { // ----------- S t a t e ------------- // -- lr : return address // -- r0 : key @@ -3319,7 +3117,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength( __ IncrementCounter(counters->keyed_load_string_length(), 1, r2, r3); // Check the key is the cached one. - __ cmp(r0, Operand(name)); + __ cmp(r0, Operand(Handle<String>(name))); __ b(ne, &miss); GenerateLoadStringLength(masm(), r1, r2, r3, &miss, true); @@ -3332,8 +3130,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength( } -Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype( - Handle<String> name) { +MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) { // ----------- S t a t e ------------- // -- lr : return address // -- r0 : key @@ -3345,7 +3142,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype( __ IncrementCounter(counters->keyed_load_function_prototype(), 1, r2, r3); // Check the name hasn't changed. - __ cmp(r0, Operand(name)); + __ cmp(r0, Operand(Handle<String>(name))); __ b(ne, &miss); GenerateLoadFunctionPrototype(masm(), r1, r2, r3, &miss); @@ -3357,29 +3154,33 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype( } -Handle<Code> KeyedLoadStubCompiler::CompileLoadElement( - Handle<Map> receiver_map) { +MaybeObject* KeyedLoadStubCompiler::CompileLoadElement(Map* receiver_map) { // ----------- S t a t e ------------- // -- lr : return address // -- r0 : key // -- r1 : receiver // ----------------------------------- + Code* stub; ElementsKind elements_kind = receiver_map->elements_kind(); - Handle<Code> stub = KeyedLoadElementStub(elements_kind).GetCode(); - - __ DispatchMap(r1, r2, receiver_map, stub, DO_SMI_CHECK); + MaybeObject* maybe_stub = KeyedLoadElementStub(elements_kind).TryGetCode(); + if (!maybe_stub->To(&stub)) return maybe_stub; + __ DispatchMap(r1, + r2, + Handle<Map>(receiver_map), + Handle<Code>(stub), + DO_SMI_CHECK); Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss(); __ Jump(ic, RelocInfo::CODE_TARGET); // Return the generated code. - return GetCode(NORMAL, factory()->empty_string()); + return GetCode(NORMAL, NULL); } -Handle<Code> KeyedLoadStubCompiler::CompileLoadPolymorphic( - MapHandleList* receiver_maps, - CodeHandleList* handler_ics) { +MaybeObject* KeyedLoadStubCompiler::CompileLoadPolymorphic( + MapList* receiver_maps, + CodeList* handler_ics) { // ----------- S t a t e ------------- // -- lr : return address // -- r0 : key @@ -3391,9 +3192,11 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadPolymorphic( int receiver_count = receiver_maps->length(); __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); for (int current = 0; current < receiver_count; ++current) { - __ mov(ip, Operand(receiver_maps->at(current))); + Handle<Map> map(receiver_maps->at(current)); + Handle<Code> code(handler_ics->at(current)); + __ mov(ip, Operand(map)); __ cmp(r2, ip); - __ Jump(handler_ics->at(current), RelocInfo::CODE_TARGET, eq); + __ Jump(code, RelocInfo::CODE_TARGET, eq); } __ bind(&miss); @@ -3401,14 +3204,14 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadPolymorphic( __ Jump(miss_ic, RelocInfo::CODE_TARGET, al); // Return the generated code. - return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC); + return GetCode(NORMAL, NULL, MEGAMORPHIC); } -Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object, +MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object, int index, - Handle<Map> transition, - Handle<String> name) { + Map* transition, + String* name) { // ----------- S t a t e ------------- // -- r0 : value // -- r1 : name @@ -3421,12 +3224,17 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object, __ IncrementCounter(counters->keyed_store_field(), 1, r3, r4); // Check that the name has not changed. - __ cmp(r1, Operand(name)); + __ cmp(r1, Operand(Handle<String>(name))); __ b(ne, &miss); // r3 is used as scratch register. r1 and r2 keep their values if a jump to // the miss label is generated. - GenerateStoreField(masm(), object, index, transition, r2, r1, r3, &miss); + GenerateStoreField(masm(), + object, + index, + transition, + r2, r1, r3, + &miss); __ bind(&miss); __ DecrementCounter(counters->keyed_store_field(), 1, r3, r4); @@ -3434,12 +3242,11 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object, __ Jump(ic, RelocInfo::CODE_TARGET); // Return the generated code. - return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name); + return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name); } -Handle<Code> KeyedStoreStubCompiler::CompileStoreElement( - Handle<Map> receiver_map) { +MaybeObject* KeyedStoreStubCompiler::CompileStoreElement(Map* receiver_map) { // ----------- S t a t e ------------- // -- r0 : value // -- r1 : key @@ -3447,25 +3254,30 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreElement( // -- lr : return address // -- r3 : scratch // ----------------------------------- + Code* stub; ElementsKind elements_kind = receiver_map->elements_kind(); bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE; - Handle<Code> stub = - KeyedStoreElementStub(is_js_array, elements_kind).GetCode(); - - __ DispatchMap(r2, r3, receiver_map, stub, DO_SMI_CHECK); + MaybeObject* maybe_stub = + KeyedStoreElementStub(is_js_array, elements_kind).TryGetCode(); + if (!maybe_stub->To(&stub)) return maybe_stub; + __ DispatchMap(r2, + r3, + Handle<Map>(receiver_map), + Handle<Code>(stub), + DO_SMI_CHECK); Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss(); __ Jump(ic, RelocInfo::CODE_TARGET); // Return the generated code. - return GetCode(NORMAL, factory()->empty_string()); + return GetCode(NORMAL, NULL); } -Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic( - MapHandleList* receiver_maps, - CodeHandleList* handler_stubs, - MapHandleList* transitioned_maps) { +MaybeObject* KeyedStoreStubCompiler::CompileStorePolymorphic( + MapList* receiver_maps, + CodeList* handler_stubs, + MapList* transitioned_maps) { // ----------- S t a t e ------------- // -- r0 : value // -- r1 : key @@ -3479,15 +3291,17 @@ Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic( int receiver_count = receiver_maps->length(); __ ldr(r3, FieldMemOperand(r2, HeapObject::kMapOffset)); for (int i = 0; i < receiver_count; ++i) { - __ mov(ip, Operand(receiver_maps->at(i))); + Handle<Map> map(receiver_maps->at(i)); + Handle<Code> code(handler_stubs->at(i)); + __ mov(ip, Operand(map)); __ cmp(r3, ip); - if (transitioned_maps->at(i).is_null()) { - __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, eq); + if (transitioned_maps->at(i) == NULL) { + __ Jump(code, RelocInfo::CODE_TARGET, eq); } else { Label next_map; - __ b(ne, &next_map); - __ mov(r3, Operand(transitioned_maps->at(i))); - __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, al); + __ b(eq, &next_map); + __ mov(r4, Operand(Handle<Map>(transitioned_maps->at(i)))); + __ Jump(code, RelocInfo::CODE_TARGET, al); __ bind(&next_map); } } @@ -3497,7 +3311,7 @@ Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic( __ Jump(miss_ic, RelocInfo::CODE_TARGET, al); // Return the generated code. - return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC); + return GetCode(NORMAL, NULL, MEGAMORPHIC); } diff --git a/deps/v8/src/array.js b/deps/v8/src/array.js index 214065c7b..e1d7c2064 100644 --- a/deps/v8/src/array.js +++ b/deps/v8/src/array.js @@ -1013,22 +1013,18 @@ function ArrayFilter(f, receiver) { } if (IS_NULL_OR_UNDEFINED(receiver)) { receiver = %GetDefaultReceiver(f) || receiver; - } else if (!IS_SPEC_OBJECT(receiver)) { - receiver = ToObject(receiver); } - var result = new $Array(); - var accumulator = new InternalArray(); - var accumulator_length = 0; + var result = []; + var result_length = 0; for (var i = 0; i < length; i++) { var current = array[i]; if (!IS_UNDEFINED(current) || i in array) { if (%_CallFunction(receiver, current, i, array, f)) { - accumulator[accumulator_length++] = current; + result[result_length++] = current; } } } - %MoveArrayContents(accumulator, result); return result; } @@ -1049,8 +1045,6 @@ function ArrayForEach(f, receiver) { } if (IS_NULL_OR_UNDEFINED(receiver)) { receiver = %GetDefaultReceiver(f) || receiver; - } else if (!IS_SPEC_OBJECT(receiver)) { - receiver = ToObject(receiver); } for (var i = 0; i < length; i++) { @@ -1080,8 +1074,6 @@ function ArraySome(f, receiver) { } if (IS_NULL_OR_UNDEFINED(receiver)) { receiver = %GetDefaultReceiver(f) || receiver; - } else if (!IS_SPEC_OBJECT(receiver)) { - receiver = ToObject(receiver); } for (var i = 0; i < length; i++) { @@ -1110,8 +1102,6 @@ function ArrayEvery(f, receiver) { } if (IS_NULL_OR_UNDEFINED(receiver)) { receiver = %GetDefaultReceiver(f) || receiver; - } else if (!IS_SPEC_OBJECT(receiver)) { - receiver = ToObject(receiver); } for (var i = 0; i < length; i++) { @@ -1139,8 +1129,6 @@ function ArrayMap(f, receiver) { } if (IS_NULL_OR_UNDEFINED(receiver)) { receiver = %GetDefaultReceiver(f) || receiver; - } else if (!IS_SPEC_OBJECT(receiver)) { - receiver = ToObject(receiver); } var result = new $Array(); diff --git a/deps/v8/src/assembler.cc b/deps/v8/src/assembler.cc index 4dc2394b0..bda85e69d 100644 --- a/deps/v8/src/assembler.cc +++ b/deps/v8/src/assembler.cc @@ -834,8 +834,8 @@ ExternalReference ExternalReference::keyed_lookup_cache_field_offsets( } -ExternalReference ExternalReference::roots_array_start(Isolate* isolate) { - return ExternalReference(isolate->heap()->roots_array_start()); +ExternalReference ExternalReference::roots_address(Isolate* isolate) { + return ExternalReference(isolate->heap()->roots_address()); } @@ -1137,23 +1137,6 @@ static int native_compare_doubles(double y, double x) { } -bool EvalComparison(Token::Value op, double op1, double op2) { - ASSERT(Token::IsCompareOp(op)); - switch (op) { - case Token::EQ: - case Token::EQ_STRICT: return (op1 == op2); - case Token::NE: return (op1 != op2); - case Token::LT: return (op1 < op2); - case Token::GT: return (op1 > op2); - case Token::LTE: return (op1 <= op2); - case Token::GTE: return (op1 >= op2); - default: - UNREACHABLE(); - return false; - } -} - - ExternalReference ExternalReference::double_fp_operation( Token::Value operation, Isolate* isolate) { typedef double BinaryFPOperation(double x, double y); diff --git a/deps/v8/src/assembler.h b/deps/v8/src/assembler.h index 5b7136375..e5661c9f1 100644 --- a/deps/v8/src/assembler.h +++ b/deps/v8/src/assembler.h @@ -279,17 +279,14 @@ class RelocInfo BASE_EMBEDDED { // this relocation applies to; // can only be called if IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY INLINE(Address target_address()); - INLINE(void set_target_address(Address target, - WriteBarrierMode mode = UPDATE_WRITE_BARRIER)); + INLINE(void set_target_address(Address target)); INLINE(Object* target_object()); INLINE(Handle<Object> target_object_handle(Assembler* origin)); INLINE(Object** target_object_address()); - INLINE(void set_target_object(Object* target, - WriteBarrierMode mode = UPDATE_WRITE_BARRIER)); + INLINE(void set_target_object(Object* target)); INLINE(JSGlobalPropertyCell* target_cell()); INLINE(Handle<JSGlobalPropertyCell> target_cell_handle()); - INLINE(void set_target_cell(JSGlobalPropertyCell* cell, - WriteBarrierMode mode = UPDATE_WRITE_BARRIER)); + INLINE(void set_target_cell(JSGlobalPropertyCell* cell)); // Read the address of the word containing the target_address in an @@ -596,8 +593,8 @@ class ExternalReference BASE_EMBEDDED { static ExternalReference keyed_lookup_cache_keys(Isolate* isolate); static ExternalReference keyed_lookup_cache_field_offsets(Isolate* isolate); - // Static variable Heap::roots_array_start() - static ExternalReference roots_array_start(Isolate* isolate); + // Static variable Heap::roots_address() + static ExternalReference roots_address(Isolate* isolate); // Static variable StackGuard::address_of_jslimit() static ExternalReference address_of_stack_limit(Isolate* isolate); @@ -850,8 +847,6 @@ static inline int NumberOfBitsSet(uint32_t x) { return num_bits_set; } -bool EvalComparison(Token::Value op, double op1, double op2); - // Computes pow(x, y) with the special cases in the spec for Math.pow. double power_double_int(double x, int y); double power_double_double(double x, double y); diff --git a/deps/v8/src/ast-inl.h b/deps/v8/src/ast-inl.h index f8b460d32..731ad2ff3 100644 --- a/deps/v8/src/ast-inl.h +++ b/deps/v8/src/ast-inl.h @@ -111,18 +111,8 @@ ForInStatement::ForInStatement(Isolate* isolate, ZoneStringList* labels) } -int FunctionLiteral::start_position() const { - return scope()->start_position(); -} - - -int FunctionLiteral::end_position() const { - return scope()->end_position(); -} - - -StrictModeFlag FunctionLiteral::strict_mode_flag() const { - return scope()->strict_mode_flag(); +bool FunctionLiteral::strict_mode() const { + return scope()->is_strict_mode(); } diff --git a/deps/v8/src/ast.cc b/deps/v8/src/ast.cc index 9e34bc0e8..d49381454 100644 --- a/deps/v8/src/ast.cc +++ b/deps/v8/src/ast.cc @@ -66,6 +66,7 @@ VariableProxy::VariableProxy(Isolate* isolate, Variable* var) name_(var->name()), var_(NULL), // Will be set by the call to BindTo. is_this_(var->is_this()), + inside_with_(false), is_trivial_(false), position_(RelocInfo::kNoPosition) { BindTo(var); @@ -75,11 +76,13 @@ VariableProxy::VariableProxy(Isolate* isolate, Variable* var) VariableProxy::VariableProxy(Isolate* isolate, Handle<String> name, bool is_this, + bool inside_with, int position) : Expression(isolate), name_(name), var_(NULL), is_this_(is_this), + inside_with_(inside_with), is_trivial_(false), position_(position) { // Names must be canonicalized for fast equality checks. @@ -465,7 +468,7 @@ bool FunctionLiteral::IsInlineable() const { bool ThisFunction::IsInlineable() const { - return true; + return false; } @@ -720,7 +723,7 @@ bool Call::ComputeTarget(Handle<Map> type, Handle<String> name) { holder_ = Handle<JSObject>::null(); } while (true) { - LookupResult lookup(type->GetIsolate()); + LookupResult lookup; type->LookupInDescriptors(NULL, *name, &lookup); // If the function wasn't found directly in the map, we start // looking upwards through the prototype chain. diff --git a/deps/v8/src/ast.h b/deps/v8/src/ast.h index 3de00ef5d..0efc4835c 100644 --- a/deps/v8/src/ast.h +++ b/deps/v8/src/ast.h @@ -405,10 +405,7 @@ class Declaration: public AstNode { mode_(mode), fun_(fun), scope_(scope) { - ASSERT(mode == VAR || - mode == CONST || - mode == CONST_HARMONY || - mode == LET); + ASSERT(mode == VAR || mode == CONST || mode == LET); // At the moment there are no "const functions"'s in JavaScript... ASSERT(fun == NULL || mode == VAR || mode == LET); } @@ -1131,6 +1128,7 @@ class VariableProxy: public Expression { Handle<String> name() const { return name_; } Variable* var() const { return var_; } bool is_this() const { return is_this_; } + bool inside_with() const { return inside_with_; } int position() const { return position_; } void MarkAsTrivial() { is_trivial_ = true; } @@ -1142,12 +1140,14 @@ class VariableProxy: public Expression { Handle<String> name_; Variable* var_; // resolved variable, or NULL bool is_this_; + bool inside_with_; bool is_trivial_; int position_; VariableProxy(Isolate* isolate, Handle<String> name, bool is_this, + bool inside_with, int position = RelocInfo::kNoPosition); friend class Scope; @@ -1620,6 +1620,8 @@ class FunctionLiteral: public Expression { bool has_only_simple_this_property_assignments, Handle<FixedArray> this_property_assignments, int num_parameters, + int start_position, + int end_position, Type type, bool has_duplicate_parameters) : Expression(isolate), @@ -1632,6 +1634,8 @@ class FunctionLiteral: public Expression { has_only_simple_this_property_assignments), this_property_assignments_(this_property_assignments), num_parameters_(num_parameters), + start_position_(start_position), + end_position_(end_position), function_token_position_(RelocInfo::kNoPosition), inferred_name_(HEAP->empty_string()), is_expression_(type != DECLARATION), @@ -1647,12 +1651,11 @@ class FunctionLiteral: public Expression { ZoneList<Statement*>* body() const { return body_; } void set_function_token_position(int pos) { function_token_position_ = pos; } int function_token_position() const { return function_token_position_; } - int start_position() const; - int end_position() const; + int start_position() const { return start_position_; } + int end_position() const { return end_position_; } bool is_expression() const { return is_expression_; } bool is_anonymous() const { return is_anonymous_; } - bool strict_mode() const { return strict_mode_flag() == kStrictMode; } - StrictModeFlag strict_mode_flag() const; + bool strict_mode() const; int materialized_literal_count() { return materialized_literal_count_; } int expected_property_count() { return expected_property_count_; } diff --git a/deps/v8/src/bootstrapper.cc b/deps/v8/src/bootstrapper.cc index 6735ff454..dc722cb74 100644 --- a/deps/v8/src/bootstrapper.cc +++ b/deps/v8/src/bootstrapper.cc @@ -38,7 +38,6 @@ #include "macro-assembler.h" #include "natives.h" #include "objects-visiting.h" -#include "platform.h" #include "snapshot.h" #include "extensions/externalize-string-extension.h" #include "extensions/gc-extension.h" @@ -363,7 +362,6 @@ static Handle<JSFunction> InstallFunction(Handle<JSObject> target, if (is_ecma_native) { function->shared()->set_instance_class_name(*symbol); } - function->shared()->set_native(true); return function; } @@ -377,28 +375,26 @@ Handle<DescriptorArray> Genesis::ComputeFunctionInstanceDescriptor( PropertyAttributes attributes = static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY); - DescriptorArray::WhitenessWitness witness(*descriptors); - { // Add length. Handle<Foreign> foreign = factory()->NewForeign(&Accessors::FunctionLength); CallbacksDescriptor d(*factory()->length_symbol(), *foreign, attributes); - descriptors->Set(0, &d, witness); + descriptors->Set(0, &d); } { // Add name. Handle<Foreign> foreign = factory()->NewForeign(&Accessors::FunctionName); CallbacksDescriptor d(*factory()->name_symbol(), *foreign, attributes); - descriptors->Set(1, &d, witness); + descriptors->Set(1, &d); } { // Add arguments. Handle<Foreign> foreign = factory()->NewForeign(&Accessors::FunctionArguments); CallbacksDescriptor d(*factory()->arguments_symbol(), *foreign, attributes); - descriptors->Set(2, &d, witness); + descriptors->Set(2, &d); } { // Add caller. Handle<Foreign> foreign = factory()->NewForeign(&Accessors::FunctionCaller); CallbacksDescriptor d(*factory()->caller_symbol(), *foreign, attributes); - descriptors->Set(3, &d, witness); + descriptors->Set(3, &d); } if (prototypeMode != DONT_ADD_PROTOTYPE) { // Add prototype. @@ -408,9 +404,9 @@ Handle<DescriptorArray> Genesis::ComputeFunctionInstanceDescriptor( Handle<Foreign> foreign = factory()->NewForeign(&Accessors::FunctionPrototype); CallbacksDescriptor d(*factory()->prototype_symbol(), *foreign, attributes); - descriptors->Set(4, &d, witness); + descriptors->Set(4, &d); } - descriptors->Sort(witness); + descriptors->Sort(); return descriptors; } @@ -526,43 +522,41 @@ Handle<DescriptorArray> Genesis::ComputeStrictFunctionInstanceDescriptor( ? 4 : 5); PropertyAttributes attributes = static_cast<PropertyAttributes>( - DONT_ENUM | DONT_DELETE); - - DescriptorArray::WhitenessWitness witness(*descriptors); + DONT_ENUM | DONT_DELETE | READ_ONLY); { // length Handle<Foreign> foreign = factory()->NewForeign(&Accessors::FunctionLength); CallbacksDescriptor d(*factory()->length_symbol(), *foreign, attributes); - descriptors->Set(0, &d, witness); + descriptors->Set(0, &d); } { // name Handle<Foreign> foreign = factory()->NewForeign(&Accessors::FunctionName); CallbacksDescriptor d(*factory()->name_symbol(), *foreign, attributes); - descriptors->Set(1, &d, witness); + descriptors->Set(1, &d); } { // arguments CallbacksDescriptor d(*factory()->arguments_symbol(), *arguments, attributes); - descriptors->Set(2, &d, witness); + descriptors->Set(2, &d); } { // caller CallbacksDescriptor d(*factory()->caller_symbol(), *caller, attributes); - descriptors->Set(3, &d, witness); + descriptors->Set(3, &d); } // prototype if (prototypeMode != DONT_ADD_PROTOTYPE) { - if (prototypeMode != ADD_WRITEABLE_PROTOTYPE) { - attributes = static_cast<PropertyAttributes>(attributes | READ_ONLY); + if (prototypeMode == ADD_WRITEABLE_PROTOTYPE) { + attributes = static_cast<PropertyAttributes>(attributes & ~READ_ONLY); } Handle<Foreign> foreign = factory()->NewForeign(&Accessors::FunctionPrototype); CallbacksDescriptor d(*factory()->prototype_symbol(), *foreign, attributes); - descriptors->Set(4, &d, witness); + descriptors->Set(4, &d); } - descriptors->Sort(witness); + descriptors->Sort(); return descriptors; } @@ -947,7 +941,6 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global, ASSERT_EQ(0, initial_map->inobject_properties()); Handle<DescriptorArray> descriptors = factory->NewDescriptorArray(5); - DescriptorArray::WhitenessWitness witness(*descriptors); PropertyAttributes final = static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY); int enum_index = 0; @@ -957,7 +950,7 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global, JSRegExp::kSourceFieldIndex, final, enum_index++); - descriptors->Set(0, &field, witness); + descriptors->Set(0, &field); } { // ECMA-262, section 15.10.7.2. @@ -965,7 +958,7 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global, JSRegExp::kGlobalFieldIndex, final, enum_index++); - descriptors->Set(1, &field, witness); + descriptors->Set(1, &field); } { // ECMA-262, section 15.10.7.3. @@ -973,7 +966,7 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global, JSRegExp::kIgnoreCaseFieldIndex, final, enum_index++); - descriptors->Set(2, &field, witness); + descriptors->Set(2, &field); } { // ECMA-262, section 15.10.7.4. @@ -981,7 +974,7 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global, JSRegExp::kMultilineFieldIndex, final, enum_index++); - descriptors->Set(3, &field, witness); + descriptors->Set(3, &field); } { // ECMA-262, section 15.10.7.5. @@ -991,10 +984,10 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global, JSRegExp::kLastIndexFieldIndex, writable, enum_index++); - descriptors->Set(4, &field, witness); + descriptors->Set(4, &field); } descriptors->SetNextEnumerationIndex(enum_index); - descriptors->Sort(witness); + descriptors->Sort(); initial_map->set_inobject_properties(5); initial_map->set_pre_allocated_property_fields(5); @@ -1072,7 +1065,7 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global, DONT_ENUM); #ifdef DEBUG - LookupResult lookup(isolate); + LookupResult lookup; result->LocalLookup(heap->callee_symbol(), &lookup); ASSERT(lookup.IsProperty() && (lookup.type() == FIELD)); ASSERT(lookup.GetFieldIndex() == Heap::kArgumentsCalleeIndex); @@ -1091,6 +1084,11 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global, } { // --- aliased_arguments_boilerplate_ + Handle<Map> old_map(global_context()->arguments_boilerplate()->map()); + Handle<Map> new_map = factory->CopyMapDropTransitions(old_map); + new_map->set_pre_allocated_property_fields(2); + Handle<JSObject> result = factory->NewJSObjectFromMap(new_map); + new_map->set_elements_kind(NON_STRICT_ARGUMENTS_ELEMENTS); // Set up a well-formed parameter map to make assertions happy. Handle<FixedArray> elements = factory->NewFixedArray(2); elements->set_map(heap->non_strict_arguments_elements_map()); @@ -1099,16 +1097,12 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global, elements->set(0, *array); array = factory->NewFixedArray(0); elements->set(1, *array); - - Handle<Map> old_map(global_context()->arguments_boilerplate()->map()); - Handle<Map> new_map = factory->CopyMapDropTransitions(old_map); - new_map->set_pre_allocated_property_fields(2); - Handle<JSObject> result = factory->NewJSObjectFromMap(new_map); - // Set elements kind after allocating the object because - // NewJSObjectFromMap assumes a fast elements map. - new_map->set_elements_kind(NON_STRICT_ARGUMENTS_ELEMENTS); - result->set_elements(*elements); + Handle<Map> non_strict_arguments_elements_map = + factory->GetElementsTransitionMap(result, + NON_STRICT_ARGUMENTS_ELEMENTS); + result->set_map(*non_strict_arguments_elements_map); ASSERT(result->HasNonStrictArgumentsElements()); + result->set_elements(*elements); global_context()->set_aliased_arguments_boilerplate(*result); } @@ -1131,20 +1125,19 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global, // Create the descriptor array for the arguments object. Handle<DescriptorArray> descriptors = factory->NewDescriptorArray(3); - DescriptorArray::WhitenessWitness witness(*descriptors); { // length FieldDescriptor d(*factory->length_symbol(), 0, DONT_ENUM); - descriptors->Set(0, &d, witness); + descriptors->Set(0, &d); } { // callee CallbacksDescriptor d(*factory->callee_symbol(), *callee, attributes); - descriptors->Set(1, &d, witness); + descriptors->Set(1, &d); } { // caller CallbacksDescriptor d(*factory->caller_symbol(), *caller, attributes); - descriptors->Set(2, &d, witness); + descriptors->Set(2, &d); } - descriptors->Sort(witness); + descriptors->Sort(); // Create the map. Allocate one in-object field for length. Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE, @@ -1169,7 +1162,7 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global, DONT_ENUM); #ifdef DEBUG - LookupResult lookup(isolate); + LookupResult lookup; result->LocalLookup(heap->length_symbol(), &lookup); ASSERT(lookup.IsProperty() && (lookup.type() == FIELD)); ASSERT(lookup.GetFieldIndex() == Heap::kArgumentsLengthIndex); @@ -1228,14 +1221,6 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global, // Initialize the data slot. global_context()->set_data(heap->undefined_value()); - - { - // Initialize the random seed slot. - Handle<ByteArray> zeroed_byte_array( - factory->NewByteArray(kRandomStateSize)); - global_context()->set_random_seed(*zeroed_byte_array); - memset(zeroed_byte_array->GetDataStartAddress(), 0, kRandomStateSize); - } } @@ -1243,26 +1228,12 @@ void Genesis::InitializeExperimentalGlobal() { Handle<JSObject> global = Handle<JSObject>(global_context()->global()); // TODO(mstarzinger): Move this into Genesis::InitializeGlobal once we no - // longer need to live behind a flag, so functions get added to the snapshot. - if (FLAG_harmony_collections) { - { // -- S e t - Handle<JSObject> prototype = - factory()->NewJSObject(isolate()->object_function(), TENURED); - InstallFunction(global, "Set", JS_SET_TYPE, JSSet::kSize, - prototype, Builtins::kIllegal, true); - } - { // -- M a p - Handle<JSObject> prototype = - factory()->NewJSObject(isolate()->object_function(), TENURED); - InstallFunction(global, "Map", JS_MAP_TYPE, JSMap::kSize, - prototype, Builtins::kIllegal, true); - } - { // -- W e a k M a p - Handle<JSObject> prototype = - factory()->NewJSObject(isolate()->object_function(), TENURED); - InstallFunction(global, "WeakMap", JS_WEAK_MAP_TYPE, JSWeakMap::kSize, - prototype, Builtins::kIllegal, true); - } + // longer need to live behind a flag, so WeakMap gets added to the snapshot. + if (FLAG_harmony_weakmaps) { // -- W e a k M a p + Handle<JSObject> prototype = + factory()->NewJSObject(isolate()->object_function(), TENURED); + InstallFunction(global, "WeakMap", JS_WEAK_MAP_TYPE, JSWeakMap::kSize, + prototype, Builtins::kIllegal, true); } } @@ -1391,7 +1362,6 @@ void Genesis::InstallExperimentalNativeFunctions() { INSTALL_NATIVE(JSFunction, "DerivedHasTrap", derived_has_trap); INSTALL_NATIVE(JSFunction, "DerivedGetTrap", derived_get_trap); INSTALL_NATIVE(JSFunction, "DerivedSetTrap", derived_set_trap); - INSTALL_NATIVE(JSFunction, "ProxyEnumerate", proxy_enumerate); } } @@ -1726,9 +1696,7 @@ bool Genesis::InstallNatives() { Handle<DescriptorArray> reresult_descriptors = factory()->NewDescriptorArray(3); - DescriptorArray::WhitenessWitness witness(*reresult_descriptors); - - reresult_descriptors->CopyFrom(0, *array_descriptors, 0, witness); + reresult_descriptors->CopyFrom(0, *array_descriptors, 0); int enum_index = 0; { @@ -1736,7 +1704,7 @@ bool Genesis::InstallNatives() { JSRegExpResult::kIndexIndex, NONE, enum_index++); - reresult_descriptors->Set(1, &index_field, witness); + reresult_descriptors->Set(1, &index_field); } { @@ -1744,9 +1712,9 @@ bool Genesis::InstallNatives() { JSRegExpResult::kInputIndex, NONE, enum_index++); - reresult_descriptors->Set(2, &input_field, witness); + reresult_descriptors->Set(2, &input_field); } - reresult_descriptors->Sort(witness); + reresult_descriptors->Sort(); initial_map->set_inobject_properties(2); initial_map->set_pre_allocated_property_fields(2); @@ -1773,9 +1741,9 @@ bool Genesis::InstallExperimentalNatives() { "native proxy.js") == 0) { if (!CompileExperimentalBuiltin(isolate(), i)) return false; } - if (FLAG_harmony_collections && + if (FLAG_harmony_weakmaps && strcmp(ExperimentalNatives::GetScriptName(i).start(), - "native collection.js") == 0) { + "native weakmap.js") == 0) { if (!CompileExperimentalBuiltin(isolate(), i)) return false; } } @@ -2021,12 +1989,6 @@ bool Genesis::InstallExtension(v8::RegisteredExtension* current) { false); ASSERT(isolate->has_pending_exception() != result); if (!result) { - // We print out the name of the extension that fail to install. - // When an error is thrown during bootstrapping we automatically print - // the line number at which this happened to the console in the isolate - // error throwing functionality. - OS::PrintError("Error installing extension '%s'.\n", - current->extension()->name()); isolate->clear_pending_exception(); } current->set_state(v8::INSTALLED); @@ -2046,9 +2008,7 @@ bool Genesis::InstallJSBuiltins(Handle<JSBuiltinsObject> builtins) { builtins->set_javascript_builtin(id, *function); Handle<SharedFunctionInfo> shared = Handle<SharedFunctionInfo>(function->shared()); - if (!SharedFunctionInfo::EnsureCompiled(shared, CLEAR_EXCEPTION)) { - return false; - } + if (!EnsureCompiled(shared, CLEAR_EXCEPTION)) return false; // Set the code object on the function object. function->ReplaceCode(function->shared()->code()); builtins->set_javascript_builtin_code(id, shared->code()); @@ -2128,7 +2088,7 @@ void Genesis::TransferNamedProperties(Handle<JSObject> from, break; } case CALLBACKS: { - LookupResult result(isolate()); + LookupResult result; to->LocalLookup(descs->GetKey(i), &result); // If the property is already there we skip it if (result.IsProperty()) continue; @@ -2166,7 +2126,7 @@ void Genesis::TransferNamedProperties(Handle<JSObject> from, if (properties->IsKey(raw_key)) { ASSERT(raw_key->IsString()); // If the property is already there we skip it. - LookupResult result(isolate()); + LookupResult result; to->LocalLookup(String::cast(raw_key), &result); if (result.IsProperty()) continue; // Set the property. diff --git a/deps/v8/src/builtins.cc b/deps/v8/src/builtins.cc index e758b9a41..d513200f0 100644 --- a/deps/v8/src/builtins.cc +++ b/deps/v8/src/builtins.cc @@ -1507,14 +1507,6 @@ static void Generate_KeyedStoreIC_NonStrictArguments(MacroAssembler* masm) { KeyedStoreIC::GenerateNonStrictArguments(masm); } -static void Generate_TransitionElementsSmiToDouble(MacroAssembler* masm) { - KeyedStoreIC::GenerateTransitionElementsSmiToDouble(masm); -} - -static void Generate_TransitionElementsDoubleToObject(MacroAssembler* masm) { - KeyedStoreIC::GenerateTransitionElementsDoubleToObject(masm); -} - #ifdef ENABLE_DEBUGGER_SUPPORT static void Generate_LoadIC_DebugBreak(MacroAssembler* masm) { Debug::GenerateLoadICDebugBreak(masm); diff --git a/deps/v8/src/builtins.h b/deps/v8/src/builtins.h index 24059e772..31090d3a0 100644 --- a/deps/v8/src/builtins.h +++ b/deps/v8/src/builtins.h @@ -167,10 +167,6 @@ enum BuiltinExtraArguments { kStrictMode) \ V(KeyedStoreIC_NonStrictArguments, KEYED_STORE_IC, MEGAMORPHIC, \ Code::kNoExtraICState) \ - V(TransitionElementsSmiToDouble, BUILTIN, UNINITIALIZED, \ - Code::kNoExtraICState) \ - V(TransitionElementsDoubleToObject, BUILTIN, UNINITIALIZED, \ - Code::kNoExtraICState) \ \ /* Uses KeyedLoadIC_Initialize; must be after in list. */ \ V(FunctionCall, BUILTIN, UNINITIALIZED, \ @@ -238,6 +234,7 @@ enum BuiltinExtraArguments { V(DELETE, 2) \ V(IN, 1) \ V(INSTANCE_OF, 1) \ + V(GET_KEYS, 0) \ V(FILTER_KEY, 1) \ V(CALL_NON_FUNCTION, 0) \ V(CALL_NON_FUNCTION_AS_CONSTRUCTOR, 0) \ diff --git a/deps/v8/src/checks.h b/deps/v8/src/checks.h index 832f778b2..2f359f6cd 100644 --- a/deps/v8/src/checks.h +++ b/deps/v8/src/checks.h @@ -63,9 +63,7 @@ static inline void CheckHelper(const char* file, // The CHECK macro checks that the given condition is true; if not, it // prints a message to stderr and aborts. -#define CHECK(condition) do { \ - if (!(condition)) CheckHelper(__FILE__, __LINE__, #condition, false); \ - } while (0) +#define CHECK(condition) CheckHelper(__FILE__, __LINE__, #condition, condition) // Helper function used by the CHECK_EQ function when given int @@ -259,8 +257,11 @@ template <int> class StaticAssertionHelper { }; SEMI_STATIC_JOIN(__StaticAssertTypedef__, __LINE__) -extern bool FLAG_enable_slow_asserts; +namespace v8 { namespace internal { +bool EnableSlowAsserts(); + +} } // namespace v8::internal // The ASSERT macro is equivalent to CHECK except that it only // generates code in debug builds. @@ -272,7 +273,7 @@ extern bool FLAG_enable_slow_asserts; #define ASSERT_GE(v1, v2) CHECK_GE(v1, v2) #define ASSERT_LT(v1, v2) CHECK_LT(v1, v2) #define ASSERT_LE(v1, v2) CHECK_LE(v1, v2) -#define SLOW_ASSERT(condition) if (FLAG_enable_slow_asserts) CHECK(condition) +#define SLOW_ASSERT(condition) if (EnableSlowAsserts()) CHECK(condition) #else #define ASSERT_RESULT(expr) (expr) #define ASSERT(condition) ((void) 0) diff --git a/deps/v8/src/code-stubs.cc b/deps/v8/src/code-stubs.cc index b4374360c..4bc2603c5 100644 --- a/deps/v8/src/code-stubs.cc +++ b/deps/v8/src/code-stubs.cc @@ -415,29 +415,4 @@ bool ToBooleanStub::Types::CanBeUndetectable() const { } -void ElementsTransitionAndStoreStub::Generate(MacroAssembler* masm) { - Label fail; - if (!FLAG_trace_elements_transitions) { - if (to_ == FAST_ELEMENTS) { - if (from_ == FAST_SMI_ONLY_ELEMENTS) { - ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm); - } else if (from_ == FAST_DOUBLE_ELEMENTS) { - ElementsTransitionGenerator::GenerateDoubleToObject(masm, &fail); - } else { - UNREACHABLE(); - } - KeyedStoreStubCompiler::GenerateStoreFastElement(masm, - is_jsarray_, - FAST_ELEMENTS); - } else if (from_ == FAST_SMI_ONLY_ELEMENTS && to_ == FAST_DOUBLE_ELEMENTS) { - ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &fail); - KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(masm, is_jsarray_); - } else { - UNREACHABLE(); - } - } - masm->bind(&fail); - KeyedStoreIC::GenerateRuntimeSetProperty(masm, strict_mode_); -} - } } // namespace v8::internal diff --git a/deps/v8/src/code-stubs.h b/deps/v8/src/code-stubs.h index fc7000bb0..acfbd469f 100644 --- a/deps/v8/src/code-stubs.h +++ b/deps/v8/src/code-stubs.h @@ -30,7 +30,6 @@ #include "allocation.h" #include "globals.h" -#include "codegen.h" namespace v8 { namespace internal { @@ -70,8 +69,7 @@ namespace internal { V(KeyedLoadElement) \ V(KeyedStoreElement) \ V(DebuggerStatement) \ - V(StringDictionaryLookup) \ - V(ElementsTransitionAndStore) + V(StringDictionaryLookup) // List of code stubs only used on ARM platforms. #ifdef V8_TARGET_ARCH_ARM @@ -364,7 +362,6 @@ class FastCloneShallowArrayStub : public CodeStub { enum Mode { CLONE_ELEMENTS, - CLONE_DOUBLE_ELEMENTS, COPY_ON_WRITE_ELEMENTS }; @@ -383,8 +380,8 @@ class FastCloneShallowArrayStub : public CodeStub { Major MajorKey() { return FastCloneShallowArray; } int MinorKey() { - ASSERT(mode_ == 0 || mode_ == 1 || mode_ == 2); - return length_ * 3 + mode_; + ASSERT(mode_ == 0 || mode_ == 1); + return (length_ << 1) | mode_; } }; @@ -1028,42 +1025,6 @@ class ToBooleanStub: public CodeStub { Types types_; }; - -class ElementsTransitionAndStoreStub : public CodeStub { - public: - ElementsTransitionAndStoreStub(ElementsKind from, - ElementsKind to, - bool is_jsarray, - StrictModeFlag strict_mode) - : from_(from), - to_(to), - is_jsarray_(is_jsarray), - strict_mode_(strict_mode) {} - - private: - class FromBits: public BitField<ElementsKind, 0, 8> {}; - class ToBits: public BitField<ElementsKind, 8, 8> {}; - class IsJSArrayBits: public BitField<bool, 16, 8> {}; - class StrictModeBits: public BitField<StrictModeFlag, 24, 8> {}; - - Major MajorKey() { return ElementsTransitionAndStore; } - int MinorKey() { - return FromBits::encode(from_) | - ToBits::encode(to_) | - IsJSArrayBits::encode(is_jsarray_) | - StrictModeBits::encode(strict_mode_); - } - - void Generate(MacroAssembler* masm); - - ElementsKind from_; - ElementsKind to_; - bool is_jsarray_; - StrictModeFlag strict_mode_; - - DISALLOW_COPY_AND_ASSIGN(ElementsTransitionAndStoreStub); -}; - } } // namespace v8::internal #endif // V8_CODE_STUBS_H_ diff --git a/deps/v8/src/codegen.h b/deps/v8/src/codegen.h index 5360d3ef3..e551abfb1 100644 --- a/deps/v8/src/codegen.h +++ b/deps/v8/src/codegen.h @@ -81,19 +81,4 @@ enum TypeofState { INSIDE_TYPEOF, NOT_INSIDE_TYPEOF }; #error Unsupported target architecture. #endif -namespace v8 { -namespace internal { - -class ElementsTransitionGenerator : public AllStatic { - public: - static void GenerateSmiOnlyToObject(MacroAssembler* masm); - static void GenerateSmiOnlyToDouble(MacroAssembler* masm, Label* fail); - static void GenerateDoubleToObject(MacroAssembler* masm, Label* fail); - - private: - DISALLOW_COPY_AND_ASSIGN(ElementsTransitionGenerator); -}; - -} } // namespace v8::internal - #endif // V8_CODEGEN_H_ diff --git a/deps/v8/src/compiler.cc b/deps/v8/src/compiler.cc index 88db467c3..4979a7f86 100644 --- a/deps/v8/src/compiler.cc +++ b/deps/v8/src/compiler.cc @@ -59,6 +59,7 @@ CompilationInfo::CompilationInfo(Handle<Script> script) script_(script), extension_(NULL), pre_parse_data_(NULL), + supports_deoptimization_(false), osr_ast_id_(AstNode::kNoNumber) { Initialize(NONOPT); } @@ -73,6 +74,7 @@ CompilationInfo::CompilationInfo(Handle<SharedFunctionInfo> shared_info) script_(Handle<Script>(Script::cast(shared_info->script()))), extension_(NULL), pre_parse_data_(NULL), + supports_deoptimization_(false), osr_ast_id_(AstNode::kNoNumber) { Initialize(BASE); } @@ -88,6 +90,7 @@ CompilationInfo::CompilationInfo(Handle<JSFunction> closure) script_(Handle<Script>(Script::cast(shared_info_->script()))), extension_(NULL), pre_parse_data_(NULL), + supports_deoptimization_(false), osr_ast_id_(AstNode::kNoNumber) { Initialize(BASE); } @@ -306,9 +309,9 @@ static bool MakeCrankshaftCode(CompilationInfo* info) { static bool GenerateCode(CompilationInfo* info) { - return info->IsCompilingForDebugging() || !V8::UseCrankshaft() ? - FullCodeGenerator::MakeCode(info) : - MakeCrankshaftCode(info); + return V8::UseCrankshaft() ? + MakeCrankshaftCode(info) : + FullCodeGenerator::MakeCode(info); } @@ -477,22 +480,20 @@ Handle<SharedFunctionInfo> Compiler::Compile(Handle<String> source, // that would be compiled lazily anyway, so we skip the preparse step // in that case too. ScriptDataImpl* pre_data = input_pre_data; - int flags = kNoParsingFlags; - if ((natives == NATIVES_CODE) || FLAG_allow_natives_syntax) { - flags |= kAllowNativesSyntax; - } - if (natives != NATIVES_CODE && FLAG_harmony_scoping) { - flags |= kHarmonyScoping; - } + bool harmony_scoping = natives != NATIVES_CODE && FLAG_harmony_scoping; if (pre_data == NULL && source_length >= FLAG_min_preparse_length) { if (source->IsExternalTwoByteString()) { ExternalTwoByteStringUC16CharacterStream stream( Handle<ExternalTwoByteString>::cast(source), 0, source->length()); - pre_data = ParserApi::PartialPreParse(&stream, extension, flags); + pre_data = ParserApi::PartialPreParse(&stream, + extension, + harmony_scoping); } else { GenericStringUC16CharacterStream stream(source, 0, source->length()); - pre_data = ParserApi::PartialPreParse(&stream, extension, flags); + pre_data = ParserApi::PartialPreParse(&stream, + extension, + harmony_scoping); } } @@ -515,6 +516,9 @@ Handle<SharedFunctionInfo> Compiler::Compile(Handle<String> source, info.MarkAsGlobal(); info.SetExtension(extension); info.SetPreParseData(pre_data); + if (natives == NATIVES_CODE) { + info.MarkAsAllowingNativesSyntax(); + } result = MakeFunctionInfo(&info); if (extension == NULL && !result.is_null()) { compilation_cache->PutScript(source, result); @@ -558,7 +562,7 @@ Handle<SharedFunctionInfo> Compiler::CompileEval(Handle<String> source, CompilationInfo info(script); info.MarkAsEval(); if (is_global) info.MarkAsGlobal(); - info.SetStrictModeFlag(strict_mode); + if (strict_mode == kStrictMode) info.MarkAsStrictMode(); info.SetCallingContext(context); result = MakeFunctionInfo(&info); if (!result.is_null()) { @@ -566,7 +570,6 @@ Handle<SharedFunctionInfo> Compiler::CompileEval(Handle<String> source, // If caller is strict mode, the result must be strict as well, // but not the other way around. Consider: // eval("'use strict'; ..."); - // TODO(keuchel): adapt this for extended mode. ASSERT(strict_mode == kNonStrictMode || result->strict_mode()); compilation_cache->PutEval(source, context, is_global, result); } @@ -598,13 +601,10 @@ bool Compiler::CompileLazy(CompilationInfo* info) { HistogramTimerScope timer(isolate->counters()->compile_lazy()); // After parsing we know function's strict mode. Remember it. - StrictModeFlag strict_mode = info->function()->strict_mode_flag(); - ASSERT(info->strict_mode_flag() == kNonStrictMode || - info->strict_mode_flag() == strict_mode); - ASSERT(shared->strict_mode_flag() == kNonStrictMode || - shared->strict_mode_flag() == strict_mode); - info->SetStrictModeFlag(strict_mode); - shared->set_strict_mode_flag(strict_mode); + if (info->function()->strict_mode()) { + shared->set_strict_mode(true); + info->MarkAsStrictMode(); + } // Compile the code. if (!MakeCode(info)) { @@ -684,7 +684,7 @@ Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal, CompilationInfo info(script); info.SetFunction(literal); info.SetScope(literal->scope()); - info.SetStrictModeFlag(literal->scope()->strict_mode_flag()); + if (literal->scope()->is_strict_mode()) info.MarkAsStrictMode(); LiveEditFunctionTracker live_edit_tracker(info.isolate(), literal); // Determine if the function can be lazily compiled. This is necessary to @@ -750,7 +750,7 @@ void Compiler::SetFunctionInfo(Handle<SharedFunctionInfo> function_info, lit->has_only_simple_this_property_assignments(), *lit->this_property_assignments()); function_info->set_allows_lazy_compilation(lit->AllowsLazyCompilation()); - function_info->set_strict_mode_flag(lit->strict_mode_flag()); + function_info->set_strict_mode(lit->strict_mode()); function_info->set_uses_arguments(lit->scope()->arguments() != NULL); function_info->set_has_duplicate_parameters(lit->has_duplicate_parameters()); } diff --git a/deps/v8/src/compiler.h b/deps/v8/src/compiler.h index bedf5eebb..09aa23dec 100644 --- a/deps/v8/src/compiler.h +++ b/deps/v8/src/compiler.h @@ -52,10 +52,7 @@ class CompilationInfo BASE_EMBEDDED { bool is_lazy() const { return IsLazy::decode(flags_); } bool is_eval() const { return IsEval::decode(flags_); } bool is_global() const { return IsGlobal::decode(flags_); } - bool is_strict_mode() const { return strict_mode_flag() == kStrictMode; } - StrictModeFlag strict_mode_flag() const { - return StrictModeFlagField::decode(flags_); - } + bool is_strict_mode() const { return IsStrictMode::decode(flags_); } bool is_in_loop() const { return IsInLoop::decode(flags_); } FunctionLiteral* function() const { return function_; } Scope* scope() const { return scope_; } @@ -76,15 +73,22 @@ class CompilationInfo BASE_EMBEDDED { ASSERT(!is_lazy()); flags_ |= IsGlobal::encode(true); } - void SetStrictModeFlag(StrictModeFlag strict_mode_flag) { - ASSERT(StrictModeFlagField::decode(flags_) == kNonStrictMode || - StrictModeFlagField::decode(flags_) == strict_mode_flag); - flags_ = StrictModeFlagField::update(flags_, strict_mode_flag); + void MarkAsStrictMode() { + flags_ |= IsStrictMode::encode(true); + } + StrictModeFlag StrictMode() { + return is_strict_mode() ? kStrictMode : kNonStrictMode; } void MarkAsInLoop() { ASSERT(is_lazy()); flags_ |= IsInLoop::encode(true); } + void MarkAsAllowingNativesSyntax() { + flags_ |= IsNativesSyntaxAllowed::encode(true); + } + bool allows_natives_syntax() const { + return IsNativesSyntaxAllowed::decode(flags_); + } void MarkAsNative() { flags_ |= IsNative::encode(true); } @@ -116,19 +120,6 @@ class CompilationInfo BASE_EMBEDDED { ASSERT(IsOptimizing()); osr_ast_id_ = osr_ast_id; } - void MarkCompilingForDebugging(Handle<Code> current_code) { - ASSERT(mode_ != OPTIMIZE); - ASSERT(current_code->kind() == Code::FUNCTION); - flags_ |= IsCompilingForDebugging::encode(true); - if (current_code->is_compiled_optimizable()) { - EnableDeoptimizationSupport(); - } else { - mode_ = CompilationInfo::NONOPT; - } - } - bool IsCompilingForDebugging() { - return IsCompilingForDebugging::decode(flags_); - } bool has_global_object() const { return !closure().is_null() && (closure()->context()->global() != NULL); @@ -148,12 +139,10 @@ class CompilationInfo BASE_EMBEDDED { void DisableOptimization(); // Deoptimization support. - bool HasDeoptimizationSupport() const { - return SupportsDeoptimization::decode(flags_); - } + bool HasDeoptimizationSupport() const { return supports_deoptimization_; } void EnableDeoptimizationSupport() { ASSERT(IsOptimizable()); - flags_ |= SupportsDeoptimization::encode(true); + supports_deoptimization_ = true; } // Determine whether or not we can adaptively optimize. @@ -188,9 +177,8 @@ class CompilationInfo BASE_EMBEDDED { if (script_->type()->value() == Script::TYPE_NATIVE) { MarkAsNative(); } - if (!shared_info_.is_null()) { - ASSERT(strict_mode_flag() == kNonStrictMode); - SetStrictModeFlag(shared_info_->strict_mode_flag()); + if (!shared_info_.is_null() && shared_info_->strict_mode()) { + MarkAsStrictMode(); } } @@ -210,14 +198,11 @@ class CompilationInfo BASE_EMBEDDED { // Flags that can be set for lazy compilation. class IsInLoop: public BitField<bool, 3, 1> {}; // Strict mode - used in eager compilation. - class StrictModeFlagField: public BitField<StrictModeFlag, 4, 1> {}; + class IsStrictMode: public BitField<bool, 4, 1> {}; + // Native syntax (%-stuff) allowed? + class IsNativesSyntaxAllowed: public BitField<bool, 5, 1> {}; // Is this a function from our natives. class IsNative: public BitField<bool, 6, 1> {}; - // Is this code being compiled with support for deoptimization.. - class SupportsDeoptimization: public BitField<bool, 7, 1> {}; - // If compiling for debugging produce just full code matching the - // initial mode setting. - class IsCompilingForDebugging: public BitField<bool, 8, 1> {}; unsigned flags_; @@ -246,6 +231,7 @@ class CompilationInfo BASE_EMBEDDED { // Compilation mode flag and whether deoptimization is allowed. Mode mode_; + bool supports_deoptimization_; int osr_ast_id_; DISALLOW_COPY_AND_ASSIGN(CompilationInfo); diff --git a/deps/v8/src/contexts.cc b/deps/v8/src/contexts.cc index b25ffac93..0cda43049 100644 --- a/deps/v8/src/contexts.cc +++ b/deps/v8/src/contexts.cc @@ -174,10 +174,6 @@ Handle<Object> Context::Lookup(Handle<String> name, *attributes = READ_ONLY; *binding_flags = IMMUTABLE_CHECK_INITIALIZED; break; - case CONST_HARMONY: - *attributes = READ_ONLY; - *binding_flags = IMMUTABLE_CHECK_INITIALIZED_HARMONY; - break; case DYNAMIC: case DYNAMIC_GLOBAL: case DYNAMIC_LOCAL: @@ -191,8 +187,7 @@ Handle<Object> Context::Lookup(Handle<String> name, // Check the slot corresponding to the intermediate context holding // only the function name variable. if (follow_context_chain && context->IsFunctionContext()) { - VariableMode mode; - int function_index = scope_info->FunctionContextSlotIndex(*name, &mode); + int function_index = scope_info->FunctionContextSlotIndex(*name); if (function_index >= 0) { if (FLAG_trace_contexts) { PrintF("=> found intermediate function in context slot %d\n", @@ -200,9 +195,7 @@ Handle<Object> Context::Lookup(Handle<String> name, } *index = function_index; *attributes = READ_ONLY; - ASSERT(mode == CONST || mode == CONST_HARMONY); - *binding_flags = (mode == CONST) - ? IMMUTABLE_IS_INITIALIZED : IMMUTABLE_IS_INITIALIZED_HARMONY; + *binding_flags = IMMUTABLE_IS_INITIALIZED; return context; } } @@ -262,7 +255,7 @@ bool Context::GlobalIfNotShadowedByEval(Handle<String> name) { if (param_index >= 0) return false; // Check context only holding the function name variable. - index = scope_info->FunctionContextSlotIndex(*name, NULL); + index = scope_info->FunctionContextSlotIndex(*name); if (index >= 0) return false; context = context->previous(); } @@ -273,7 +266,8 @@ bool Context::GlobalIfNotShadowedByEval(Handle<String> name) { } -void Context::ComputeEvalScopeInfo(bool* outer_scope_calls_non_strict_eval) { +void Context::ComputeEvalScopeInfo(bool* outer_scope_calls_eval, + bool* outer_scope_calls_non_strict_eval) { // Skip up the context chain checking all the function contexts to see // whether they call eval. Context* context = this; @@ -281,11 +275,14 @@ void Context::ComputeEvalScopeInfo(bool* outer_scope_calls_non_strict_eval) { if (context->IsFunctionContext()) { Handle<SerializedScopeInfo> scope_info( context->closure()->shared()->scope_info()); - if (scope_info->CallsEval() && !scope_info->IsStrictMode()) { - // No need to go further since the answers will not change from - // here. - *outer_scope_calls_non_strict_eval = true; - return; + if (scope_info->CallsEval()) { + *outer_scope_calls_eval = true; + if (!scope_info->IsStrictMode()) { + // No need to go further since the answers will not change from + // here. + *outer_scope_calls_non_strict_eval = true; + return; + } } } context = context->previous(); diff --git a/deps/v8/src/contexts.h b/deps/v8/src/contexts.h index 7021ff8fb..b80475f0f 100644 --- a/deps/v8/src/contexts.h +++ b/deps/v8/src/contexts.h @@ -46,43 +46,24 @@ enum ContextLookupFlags { // ES5 10.2 defines lexical environments with mutable and immutable bindings. // Immutable bindings have two states, initialized and uninitialized, and -// their state is changed by the InitializeImmutableBinding method. The -// BindingFlags enum represents information if a binding has definitely been -// initialized. A mutable binding does not need to be checked and thus has -// the BindingFlag MUTABLE_IS_INITIALIZED. -// -// There are two possibilities for immutable bindings -// * 'const' declared variables. They are initialized when evaluating the -// corresponding declaration statement. They need to be checked for being -// initialized and thus get the flag IMMUTABLE_CHECK_INITIALIZED. -// * The function name of a named function literal. The binding is immediately -// initialized when entering the function and thus does not need to be -// checked. it gets the BindingFlag IMMUTABLE_IS_INITIALIZED. -// Accessing an uninitialized binding produces the undefined value. +// their state is changed by the InitializeImmutableBinding method. // // The harmony proposal for block scoped bindings also introduces the -// uninitialized state for mutable bindings. -// * A 'let' declared variable. They are initialized when evaluating the -// corresponding declaration statement. They need to be checked for being -// initialized and thus get the flag MUTABLE_CHECK_INITIALIZED. -// * A 'var' declared variable. It is initialized immediately upon creation -// and thus doesn't need to be checked. It gets the flag -// MUTABLE_IS_INITIALIZED. -// * Catch bound variables, function parameters and variables introduced by -// function declarations are initialized immediately and do not need to be -// checked. Thus they get the flag MUTABLE_IS_INITIALIZED. -// Immutable bindings in harmony mode get the _HARMONY flag variants. Accessing -// an uninitialized binding produces a reference error. -// -// In V8 uninitialized bindings are set to the hole value upon creation and set -// to a different value upon initialization. +// uninitialized state for mutable bindings. A 'let' declared variable +// is a mutable binding that is created uninitalized upon activation of its +// lexical environment and it is initialized when evaluating its declaration +// statement. Var declared variables are mutable bindings that are +// immediately initialized upon creation. The BindingFlags enum represents +// information if a binding has definitely been initialized. 'const' declared +// variables are created as uninitialized immutable bindings. + +// In harmony mode accessing an uninitialized binding produces a reference +// error. enum BindingFlags { MUTABLE_IS_INITIALIZED, MUTABLE_CHECK_INITIALIZED, IMMUTABLE_IS_INITIALIZED, IMMUTABLE_CHECK_INITIALIZED, - IMMUTABLE_IS_INITIALIZED_HARMONY, - IMMUTABLE_CHECK_INITIALIZED_HARMONY, MISSING_BINDING }; @@ -157,9 +138,7 @@ enum BindingFlags { to_complete_property_descriptor) \ V(DERIVED_HAS_TRAP_INDEX, JSFunction, derived_has_trap) \ V(DERIVED_GET_TRAP_INDEX, JSFunction, derived_get_trap) \ - V(DERIVED_SET_TRAP_INDEX, JSFunction, derived_set_trap) \ - V(PROXY_ENUMERATE, JSFunction, proxy_enumerate) \ - V(RANDOM_SEED_INDEX, ByteArray, random_seed) + V(DERIVED_SET_TRAP_INDEX, JSFunction, derived_set_trap) // JSFunctions are pairs (context, function code), sometimes also called // closures. A Context object is used to represent function contexts and @@ -215,8 +194,7 @@ class Context: public FixedArray { PREVIOUS_INDEX, // The extension slot is used for either the global object (in global // contexts), eval extension object (function contexts), subject of with - // (with contexts), or the variable name (catch contexts), the serialized - // scope info (block contexts). + // (with contexts), or the variable name (catch contexts). EXTENSION_INDEX, GLOBAL_INDEX, MIN_CONTEXT_SLOTS, @@ -280,8 +258,6 @@ class Context: public FixedArray { DERIVED_HAS_TRAP_INDEX, DERIVED_GET_TRAP_INDEX, DERIVED_SET_TRAP_INDEX, - PROXY_ENUMERATE, - RANDOM_SEED_INDEX, // Properties from here are treated as weak references by the full GC. // Scavenge treats them as strong references. @@ -409,7 +385,8 @@ class Context: public FixedArray { // Determine if any function scope in the context call eval and if // any of those calls are in non-strict mode. - void ComputeEvalScopeInfo(bool* outer_scope_calls_non_strict_eval); + void ComputeEvalScopeInfo(bool* outer_scope_calls_eval, + bool* outer_scope_calls_non_strict_eval); // Code generation support. static int SlotOffset(int index) { diff --git a/deps/v8/src/d8.cc b/deps/v8/src/d8.cc index 64ada2c24..a516576fa 100644 --- a/deps/v8/src/d8.cc +++ b/deps/v8/src/d8.cc @@ -178,8 +178,7 @@ bool Shell::ExecuteString(Handle<String> source, // If all went well and the result wasn't undefined then print // the returned value. v8::String::Utf8Value str(result); - size_t count = fwrite(*str, sizeof(**str), str.length(), stdout); - (void) count; // Silence GCC-4.5.x "unused result" warning. + fwrite(*str, sizeof(**str), str.length(), stdout); printf("\n"); } return true; diff --git a/deps/v8/src/debug.cc b/deps/v8/src/debug.cc index dc9f2974f..3d79485b5 100644 --- a/deps/v8/src/debug.cc +++ b/deps/v8/src/debug.cc @@ -87,13 +87,19 @@ static void PrintLn(v8::Local<v8::Value> value) { static Handle<Code> ComputeCallDebugBreak(int argc, Code::Kind kind) { Isolate* isolate = Isolate::Current(); - return isolate->stub_cache()->ComputeCallDebugBreak(argc, kind); + CALL_HEAP_FUNCTION( + isolate, + isolate->stub_cache()->ComputeCallDebugBreak(argc, kind), + Code); } static Handle<Code> ComputeCallDebugPrepareStepIn(int argc, Code::Kind kind) { Isolate* isolate = Isolate::Current(); - return isolate->stub_cache()->ComputeCallDebugPrepareStepIn(argc, kind); + CALL_HEAP_FUNCTION( + isolate, + isolate->stub_cache()->ComputeCallDebugPrepareStepIn(argc, kind), + Code); } @@ -1721,203 +1727,50 @@ void Debug::ClearStepNext() { } -// Helper function to compile full code for debugging. This code will -// have debug break slots and deoptimization -// information. Deoptimization information is required in case that an -// optimized version of this function is still activated on the -// stack. It will also make sure that the full code is compiled with -// the same flags as the previous version - that is flags which can -// change the code generated. The current method of mapping from -// already compiled full code without debug break slots to full code -// with debug break slots depends on the generated code is otherwise -// exactly the same. -static bool CompileFullCodeForDebugging(Handle<SharedFunctionInfo> shared, - Handle<Code> current_code) { - ASSERT(!current_code->has_debug_break_slots()); - - CompilationInfo info(shared); - info.MarkCompilingForDebugging(current_code); - ASSERT(!info.shared_info()->is_compiled()); - ASSERT(!info.isolate()->has_pending_exception()); - - // Use compile lazy which will end up compiling the full code in the - // configuration configured above. - bool result = Compiler::CompileLazy(&info); - ASSERT(result != Isolate::Current()->has_pending_exception()); - info.isolate()->clear_pending_exception(); -#if DEBUG - if (result) { - Handle<Code> new_code(shared->code()); - ASSERT(new_code->has_debug_break_slots()); - ASSERT(current_code->is_compiled_optimizable() == - new_code->is_compiled_optimizable()); - ASSERT(current_code->instruction_size() <= new_code->instruction_size()); - } -#endif - return result; -} - - void Debug::PrepareForBreakPoints() { // If preparing for the first break point make sure to deoptimize all // functions as debugging does not work with optimized code. if (!has_break_points_) { Deoptimizer::DeoptimizeAll(); - Handle<Code> lazy_compile = - Handle<Code>(isolate_->builtins()->builtin(Builtins::kLazyCompile)); - - // Keep the list of activated functions in a handlified list as it - // is used both in GC and non-GC code. - List<Handle<JSFunction> > active_functions(100); - - { - // We are going to iterate heap to find all functions without - // debug break slots. - isolate_->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask); - - // Ensure no GC in this scope as we are comparing raw pointer - // values and performing a heap iteration. - AssertNoAllocation no_allocation; - - // Find all non-optimized code functions with activation frames on - // the stack. - for (JavaScriptFrameIterator it(isolate_); !it.done(); it.Advance()) { - JavaScriptFrame* frame = it.frame(); - if (frame->function()->IsJSFunction()) { - JSFunction* function = JSFunction::cast(frame->function()); - if (function->code()->kind() == Code::FUNCTION && - !function->code()->has_debug_break_slots()) - active_functions.Add(Handle<JSFunction>(function)); - } - } - // Sort the functions on the object pointer value to prepare for - // the binary search below. - active_functions.Sort(HandleObjectPointerCompare<JSFunction>); - - // Scan the heap for all non-optimized functions which has no - // debug break slots. - HeapIterator iterator; - HeapObject* obj = NULL; - while (((obj = iterator.next()) != NULL)) { - if (obj->IsJSFunction()) { - JSFunction* function = JSFunction::cast(obj); - if (function->shared()->allows_lazy_compilation() && - function->shared()->script()->IsScript() && - function->code()->kind() == Code::FUNCTION && - !function->code()->has_debug_break_slots()) { - bool has_activation = - SortedListBSearch<Handle<JSFunction> >( - active_functions, - Handle<JSFunction>(function), - HandleObjectPointerCompare<JSFunction>) != -1; - if (!has_activation) { - function->set_code(*lazy_compile); - function->shared()->set_code(*lazy_compile); - } - } - } + // We are going to iterate heap to find all functions without + // debug break slots. + isolate_->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask); + + AssertNoAllocation no_allocation; + Builtins* builtins = isolate_->builtins(); + Code* lazy_compile = builtins->builtin(Builtins::kLazyCompile); + + // Find all non-optimized code functions with activation frames on + // the stack. + List<JSFunction*> active_functions(100); + for (JavaScriptFrameIterator it(isolate_); !it.done(); it.Advance()) { + JavaScriptFrame* frame = it.frame(); + if (frame->function()->IsJSFunction()) { + JSFunction* function = JSFunction::cast(frame->function()); + if (function->code()->kind() == Code::FUNCTION) + active_functions.Add(function); } } - - // Now the non-GC scope is left, and the sorting of the functions - // in active_function is not ensured any more. The code below does - // not rely on it. - - // Now recompile all functions with activation frames and and - // patch the return address to run in the new compiled code. - for (int i = 0; i < active_functions.length(); i++) { - Handle<JSFunction> function = active_functions[i]; - Handle<SharedFunctionInfo> shared(function->shared()); - // If recompilation is not possible just skip it. - if (shared->is_toplevel() || - !shared->allows_lazy_compilation() || - shared->code()->kind() == Code::BUILTIN) { - continue; - } - - // Make sure that the shared full code is compiled with debug - // break slots. - Handle<Code> current_code(function->code()); - if (shared->code()->has_debug_break_slots()) { - // if the code is already recompiled to have break slots skip - // recompilation. - ASSERT(!function->code()->has_debug_break_slots()); - } else { - // Try to compile the full code with debug break slots. If it - // fails just keep the current code. - ASSERT(shared->code() == *current_code); - ZoneScope zone_scope(isolate_, DELETE_ON_EXIT); - shared->set_code(*lazy_compile); - bool prev_force_debugger_active = - isolate_->debugger()->force_debugger_active(); - isolate_->debugger()->set_force_debugger_active(true); - CompileFullCodeForDebugging(shared, current_code); - isolate_->debugger()->set_force_debugger_active( - prev_force_debugger_active); - if (!shared->is_compiled()) { - shared->set_code(*current_code); - continue; - } - } - Handle<Code> new_code(shared->code()); - - // Find the function and patch return address. - for (JavaScriptFrameIterator it(isolate_); !it.done(); it.Advance()) { - JavaScriptFrame* frame = it.frame(); - // If the current frame is for this function in its - // non-optimized form rewrite the return address to continue - // in the newly compiled full code with debug break slots. - if (frame->function()->IsJSFunction() && - frame->function() == *function && - frame->LookupCode()->kind() == Code::FUNCTION) { - intptr_t delta = frame->pc() - current_code->instruction_start(); - int debug_break_slot_count = 0; - int mask = RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT); - for (RelocIterator it(*new_code, mask); !it.done(); it.next()) { - // Check if the pc in the new code with debug break - // slots is before this slot. - RelocInfo* info = it.rinfo(); - int debug_break_slot_bytes = - debug_break_slot_count * Assembler::kDebugBreakSlotLength; - intptr_t new_delta = - info->pc() - - new_code->instruction_start() - - debug_break_slot_bytes; - if (new_delta > delta) { - break; - } - - // Passed a debug break slot in the full code with debug - // break slots. - debug_break_slot_count++; - } - int debug_break_slot_bytes = - debug_break_slot_count * Assembler::kDebugBreakSlotLength; - if (FLAG_trace_deopt) { - PrintF("Replacing code %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) " - "with %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) " - "for debugging, " - "changing pc from %08" V8PRIxPTR " to %08" V8PRIxPTR "\n", - reinterpret_cast<intptr_t>( - current_code->instruction_start()), - reinterpret_cast<intptr_t>( - current_code->instruction_start()) + - current_code->instruction_size(), - current_code->instruction_size(), - reinterpret_cast<intptr_t>(new_code->instruction_start()), - reinterpret_cast<intptr_t>(new_code->instruction_start()) + - new_code->instruction_size(), - new_code->instruction_size(), - reinterpret_cast<intptr_t>(frame->pc()), - reinterpret_cast<intptr_t>(new_code->instruction_start()) + - delta + debug_break_slot_bytes); + active_functions.Sort(); + + // Scan the heap for all non-optimized functions which has no + // debug break slots. + HeapIterator iterator; + HeapObject* obj = NULL; + while (((obj = iterator.next()) != NULL)) { + if (obj->IsJSFunction()) { + JSFunction* function = JSFunction::cast(obj); + if (function->shared()->allows_lazy_compilation() && + function->shared()->script()->IsScript() && + function->code()->kind() == Code::FUNCTION && + !function->code()->has_debug_break_slots()) { + bool has_activation = + SortedListBSearch<JSFunction*>(active_functions, function) != -1; + if (!has_activation) { + function->set_code(lazy_compile); + function->shared()->set_code(lazy_compile); } - - // Patch the return address to return into the code with - // debug break slots. - frame->set_pc( - new_code->instruction_start() + delta + debug_break_slot_bytes); } } } @@ -1934,9 +1787,7 @@ bool Debug::EnsureDebugInfo(Handle<SharedFunctionInfo> shared) { } // Ensure shared in compiled. Return false if this failed. - if (!SharedFunctionInfo::EnsureCompiled(shared, CLEAR_EXCEPTION)) { - return false; - } + if (!EnsureCompiled(shared, CLEAR_EXCEPTION)) return false; // Create the debug info object. Handle<DebugInfo> debug_info = FACTORY->NewDebugInfo(shared); @@ -2226,7 +2077,6 @@ Debugger::Debugger(Isolate* isolate) compiling_natives_(false), is_loading_debugger_(false), never_unload_debugger_(false), - force_debugger_active_(false), message_handler_(NULL), debugger_unload_pending_(false), host_dispatch_handler_(NULL), @@ -2994,9 +2844,7 @@ void Debugger::EnqueueDebugCommand(v8::Debug::ClientData* client_data) { bool Debugger::IsDebuggerActive() { ScopedLock with(debugger_access_); - return message_handler_ != NULL || - !event_listener_.is_null() || - force_debugger_active_; + return message_handler_ != NULL || !event_listener_.is_null(); } diff --git a/deps/v8/src/debug.h b/deps/v8/src/debug.h index 3c3718617..f01ef393f 100644 --- a/deps/v8/src/debug.h +++ b/deps/v8/src/debug.h @@ -810,15 +810,11 @@ class Debugger { } void set_compiling_natives(bool compiling_natives) { - compiling_natives_ = compiling_natives; + Debugger::compiling_natives_ = compiling_natives; } bool compiling_natives() const { return compiling_natives_; } void set_loading_debugger(bool v) { is_loading_debugger_ = v; } bool is_loading_debugger() const { return is_loading_debugger_; } - void set_force_debugger_active(bool force_debugger_active) { - force_debugger_active_ = force_debugger_active; - } - bool force_debugger_active() const { return force_debugger_active_; } bool IsDebuggerActive(); @@ -844,7 +840,6 @@ class Debugger { bool compiling_natives_; // Are we compiling natives? bool is_loading_debugger_; // Are we loading the debugger? bool never_unload_debugger_; // Can we unload the debugger? - bool force_debugger_active_; // Activate debugger without event listeners. v8::Debug::MessageHandler2 message_handler_; bool debugger_unload_pending_; // Was message handler cleared? v8::Debug::HostDispatchHandler host_dispatch_handler_; diff --git a/deps/v8/src/deoptimizer.cc b/deps/v8/src/deoptimizer.cc index a83493db4..b0522757e 100644 --- a/deps/v8/src/deoptimizer.cc +++ b/deps/v8/src/deoptimizer.cc @@ -882,12 +882,10 @@ bool Deoptimizer::DoOsrTranslateCommand(TranslationIterator* iterator, unsigned output_offset = output->GetOffsetFromSlotIndex(this, output_index); if (FLAG_trace_osr) { - PrintF(" [sp + %d] <- 0x%08" V8PRIxPTR " ; [sp + %d] ", + PrintF(" [sp + %d] <- 0x%08" V8PRIxPTR " ; [sp + %d]\n", output_offset, input_value, *input_offset); - reinterpret_cast<Object*>(input_value)->ShortPrint(); - PrintF("\n"); } output->SetFrameSlot(output_offset, input_value); break; @@ -1009,10 +1007,7 @@ void Deoptimizer::RevertStackCheckCode(Code* unoptimized_code, for (uint32_t i = 0; i < table_length; ++i) { uint32_t pc_offset = Memory::uint32_at(stack_check_cursor + kIntSize); Address pc_after = unoptimized_code->instruction_start() + pc_offset; - RevertStackCheckCodeAt(unoptimized_code, - pc_after, - check_code, - replacement_code); + RevertStackCheckCodeAt(pc_after, check_code, replacement_code); stack_check_cursor += 2 * kIntSize; } } diff --git a/deps/v8/src/deoptimizer.h b/deps/v8/src/deoptimizer.h index 33580a1b9..3cf70466c 100644 --- a/deps/v8/src/deoptimizer.h +++ b/deps/v8/src/deoptimizer.h @@ -186,8 +186,7 @@ class Deoptimizer : public Malloced { // Change all patched stack guard checks in the unoptimized code // back to a normal stack guard check. - static void RevertStackCheckCodeAt(Code* unoptimized_code, - Address pc_after, + static void RevertStackCheckCodeAt(Address pc_after, Code* check_code, Code* replacement_code); diff --git a/deps/v8/src/factory.cc b/deps/v8/src/factory.cc index 15f640e7e..143b34208 100644 --- a/deps/v8/src/factory.cc +++ b/deps/v8/src/factory.cc @@ -59,13 +59,13 @@ Handle<FixedArray> Factory::NewFixedArrayWithHoles(int size, } -Handle<FixedDoubleArray> Factory::NewFixedDoubleArray(int size, - PretenureFlag pretenure) { +Handle<FixedArray> Factory::NewFixedDoubleArray(int size, + PretenureFlag pretenure) { ASSERT(0 <= size); CALL_HEAP_FUNCTION( isolate(), isolate()->heap()->AllocateUninitializedFixedDoubleArray(size, pretenure), - FixedDoubleArray); + FixedArray); } @@ -85,14 +85,6 @@ Handle<NumberDictionary> Factory::NewNumberDictionary(int at_least_space_for) { } -Handle<ObjectHashSet> Factory::NewObjectHashSet(int at_least_space_for) { - ASSERT(0 <= at_least_space_for); - CALL_HEAP_FUNCTION(isolate(), - ObjectHashSet::Allocate(at_least_space_for), - ObjectHashSet); -} - - Handle<ObjectHashTable> Factory::NewObjectHashTable(int at_least_space_for) { ASSERT(0 <= at_least_space_for); CALL_HEAP_FUNCTION(isolate(), @@ -479,12 +471,6 @@ Handle<FixedArray> Factory::CopyFixedArray(Handle<FixedArray> array) { } -Handle<FixedDoubleArray> Factory::CopyFixedDoubleArray( - Handle<FixedDoubleArray> array) { - CALL_HEAP_FUNCTION(isolate(), array->Copy(), FixedDoubleArray); -} - - Handle<JSFunction> Factory::BaseNewFunctionFromSharedFunctionInfo( Handle<SharedFunctionInfo> function_info, Handle<Map> function_map, @@ -511,20 +497,16 @@ Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo( pretenure); result->set_context(*context); - if (!function_info->bound()) { - int number_of_literals = function_info->num_literals(); - Handle<FixedArray> literals = NewFixedArray(number_of_literals, pretenure); - if (number_of_literals > 0) { - // Store the object, regexp and array functions in the literals - // array prefix. These functions will be used when creating - // object, regexp and array literals in this function. - literals->set(JSFunction::kLiteralGlobalContextIndex, - context->global_context()); - } - result->set_literals(*literals); - } else { - result->set_function_bindings(isolate()->heap()->empty_fixed_array()); + int number_of_literals = function_info->num_literals(); + Handle<FixedArray> literals = NewFixedArray(number_of_literals, pretenure); + if (number_of_literals > 0) { + // Store the object, regexp and array functions in the literals + // array prefix. These functions will be used when creating + // object, regexp and array literals in this function. + literals->set(JSFunction::kLiteralGlobalContextIndex, + context->global_context()); } + result->set_literals(*literals); result->set_next_function_link(isolate()->heap()->undefined_value()); if (V8::UseCrankshaft() && @@ -839,13 +821,10 @@ Handle<DescriptorArray> Factory::CopyAppendCallbackDescriptors( // Number of descriptors added to the result so far. int descriptor_count = 0; - // Ensure that marking will not progress and change color of objects. - DescriptorArray::WhitenessWitness witness(*result); - // Copy the descriptors from the array. for (int i = 0; i < array->number_of_descriptors(); i++) { if (array->GetType(i) != NULL_DESCRIPTOR) { - result->CopyFrom(descriptor_count++, *array, i, witness); + result->CopyFrom(descriptor_count++, *array, i); } } @@ -865,7 +844,7 @@ Handle<DescriptorArray> Factory::CopyAppendCallbackDescriptors( if (result->LinearSearch(*key, descriptor_count) == DescriptorArray::kNotFound) { CallbacksDescriptor desc(*key, *entry, entry->property_attributes()); - result->Set(descriptor_count, &desc, witness); + result->Set(descriptor_count, &desc); descriptor_count++; } else { duplicates++; @@ -879,13 +858,13 @@ Handle<DescriptorArray> Factory::CopyAppendCallbackDescriptors( Handle<DescriptorArray> new_result = NewDescriptorArray(number_of_descriptors); for (int i = 0; i < number_of_descriptors; i++) { - new_result->CopyFrom(i, *result, i, witness); + new_result->CopyFrom(i, *result, i); } result = new_result; } // Sort the result before returning. - result->Sort(witness); + result->Sort(); return result; } diff --git a/deps/v8/src/factory.h b/deps/v8/src/factory.h index 2073ce392..a3615f2a0 100644 --- a/deps/v8/src/factory.h +++ b/deps/v8/src/factory.h @@ -50,7 +50,7 @@ class Factory { PretenureFlag pretenure = NOT_TENURED); // Allocate a new uninitialized fixed double array. - Handle<FixedDoubleArray> NewFixedDoubleArray( + Handle<FixedArray> NewFixedDoubleArray( int size, PretenureFlag pretenure = NOT_TENURED); @@ -58,8 +58,6 @@ class Factory { Handle<StringDictionary> NewStringDictionary(int at_least_space_for); - Handle<ObjectHashSet> NewObjectHashSet(int at_least_space_for); - Handle<ObjectHashTable> NewObjectHashTable(int at_least_space_for); Handle<DescriptorArray> NewDescriptorArray(int number_of_descriptors); @@ -224,9 +222,6 @@ class Factory { Handle<FixedArray> CopyFixedArray(Handle<FixedArray> array); - Handle<FixedDoubleArray> CopyFixedDoubleArray( - Handle<FixedDoubleArray> array); - // Numbers (eg, literals) are pretenured by the parser. Handle<Object> NewNumber(double value, PretenureFlag pretenure = NOT_TENURED); diff --git a/deps/v8/src/flag-definitions.h b/deps/v8/src/flag-definitions.h index ee6ef01a6..58fab14e1 100644 --- a/deps/v8/src/flag-definitions.h +++ b/deps/v8/src/flag-definitions.h @@ -100,8 +100,7 @@ private: DEFINE_bool(harmony_typeof, false, "enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false, "enable harmony block scoping") DEFINE_bool(harmony_proxies, false, "enable harmony proxies") -DEFINE_bool(harmony_collections, false, - "enable harmony collections (sets, maps, and weak maps)") +DEFINE_bool(harmony_weakmaps, false, "enable harmony weak maps") DEFINE_bool(harmony, false, "enable all harmony features") // Flags for experimental implementation features. @@ -187,8 +186,6 @@ DEFINE_bool(expose_gc, false, "expose gc extension") DEFINE_bool(expose_externalize_string, false, "expose externalize string extension") DEFINE_int(stack_trace_limit, 10, "number of stack frames to capture") -DEFINE_bool(builtins_in_stack_traces, false, - "show built-in functions in stack traces") DEFINE_bool(disable_native_files, false, "disable builtin natives files") // builtins-ia32.cc @@ -530,9 +527,6 @@ DEFINE_bool(ll_prof, false, "Enable low-level linux profiler.") #define FLAG FLAG_READONLY #endif -// elements.cc -DEFINE_bool(trace_elements_transitions, false, "trace elements transitions") - // code-stubs.cc DEFINE_bool(print_code_stubs, false, "print code stubs") diff --git a/deps/v8/src/frames.cc b/deps/v8/src/frames.cc index 7c4c573e1..412a59cc7 100644 --- a/deps/v8/src/frames.cc +++ b/deps/v8/src/frames.cc @@ -711,69 +711,6 @@ void JavaScriptFrame::Summarize(List<FrameSummary>* functions) { } -void JavaScriptFrame::PrintTop(FILE* file, - bool print_args, - bool print_line_number) { - // constructor calls - HandleScope scope; - AssertNoAllocation no_allocation; - JavaScriptFrameIterator it; - while (!it.done()) { - if (it.frame()->is_java_script()) { - JavaScriptFrame* frame = it.frame(); - if (frame->IsConstructor()) PrintF(file, "new "); - // function name - Object* fun = frame->function(); - if (fun->IsJSFunction()) { - SharedFunctionInfo* shared = JSFunction::cast(fun)->shared(); - shared->DebugName()->ShortPrint(file); - if (print_line_number) { - Address pc = frame->pc(); - Code* code = Code::cast( - v8::internal::Isolate::Current()->heap()->FindCodeObject(pc)); - int source_pos = code->SourcePosition(pc); - Object* maybe_script = shared->script(); - if (maybe_script->IsScript()) { - Handle<Script> script(Script::cast(maybe_script)); - int line = GetScriptLineNumberSafe(script, source_pos) + 1; - Object* script_name_raw = script->name(); - if (script_name_raw->IsString()) { - String* script_name = String::cast(script->name()); - SmartArrayPointer<char> c_script_name = - script_name->ToCString(DISALLOW_NULLS, - ROBUST_STRING_TRAVERSAL); - PrintF(file, " at %s:%d", *c_script_name, line); - } else { - PrintF(file, "at <unknown>:%d", line); - } - } else { - PrintF(file, " at <unknown>:<unknown>"); - } - } - } else { - fun->ShortPrint(file); - } - - if (print_args) { - // function arguments - // (we are intentionally only printing the actually - // supplied parameters, not all parameters required) - PrintF(file, "(this="); - frame->receiver()->ShortPrint(file); - const int length = frame->ComputeParametersCount(); - for (int i = 0; i < length; i++) { - PrintF(file, ", "); - frame->GetParameter(i)->ShortPrint(file); - } - PrintF(file, ")"); - } - break; - } - it.Advance(); - } -} - - void FrameSummary::Print() { PrintF("receiver: "); receiver_->ShortPrint(); diff --git a/deps/v8/src/frames.h b/deps/v8/src/frames.h index 778b80316..ca19b053a 100644 --- a/deps/v8/src/frames.h +++ b/deps/v8/src/frames.h @@ -512,8 +512,6 @@ class JavaScriptFrame: public StandardFrame { return static_cast<JavaScriptFrame*>(frame); } - static void PrintTop(FILE* file, bool print_args, bool print_line_number); - protected: inline explicit JavaScriptFrame(StackFrameIterator* iterator); diff --git a/deps/v8/src/full-codegen.cc b/deps/v8/src/full-codegen.cc index 27c509f77..083675d13 100644 --- a/deps/v8/src/full-codegen.cc +++ b/deps/v8/src/full-codegen.cc @@ -289,12 +289,11 @@ bool FullCodeGenerator::MakeCode(CompilationInfo* info) { #ifdef ENABLE_DEBUGGER_SUPPORT code->set_has_debug_break_slots( info->isolate()->debugger()->IsDebuggerActive()); - code->set_compiled_optimizable(info->IsOptimizable()); #endif // ENABLE_DEBUGGER_SUPPORT code->set_allow_osr_at_loop_nesting_level(0); code->set_stack_check_table_offset(table_offset); CodeGenerator::PrintCode(code, info); - info->SetCode(code); // May be an empty handle. + info->SetCode(code); // may be an empty handle. #ifdef ENABLE_GDB_JIT_INTERFACE if (FLAG_gdbjit && !code.is_null()) { GDBJITLineInfo* lineinfo = @@ -521,8 +520,8 @@ void FullCodeGenerator::VisitDeclarations( if (var->IsUnallocated()) { array->set(j++, *(var->name())); if (decl->fun() == NULL) { - if (var->binding_needs_init()) { - // In case this binding needs initialization use the hole. + if (var->mode() == CONST) { + // In case this is const property use the hole. array->set_the_hole(j++); } else { array->set_undefined(j++); @@ -547,10 +546,11 @@ void FullCodeGenerator::VisitDeclarations( int FullCodeGenerator::DeclareGlobalsFlags() { - ASSERT(DeclareGlobalsStrictModeFlag::is_valid(strict_mode_flag())); - return DeclareGlobalsEvalFlag::encode(is_eval()) | - DeclareGlobalsStrictModeFlag::encode(strict_mode_flag()) | - DeclareGlobalsNativeFlag::encode(is_native()); + int flags = 0; + if (is_eval()) flags |= kDeclareGlobalsEvalFlag; + if (is_strict_mode()) flags |= kDeclareGlobalsStrictModeFlag; + if (is_native()) flags |= kDeclareGlobalsNativeFlag; + return flags; } diff --git a/deps/v8/src/full-codegen.h b/deps/v8/src/full-codegen.h index 913250268..081192a54 100644 --- a/deps/v8/src/full-codegen.h +++ b/deps/v8/src/full-codegen.h @@ -577,11 +577,9 @@ class FullCodeGenerator: public AstVisitor { Handle<Script> script() { return info_->script(); } bool is_eval() { return info_->is_eval(); } bool is_native() { return info_->is_native(); } - bool is_strict_mode() { - return strict_mode_flag() == kStrictMode; - } + bool is_strict_mode() { return function()->strict_mode(); } StrictModeFlag strict_mode_flag() { - return function()->strict_mode_flag(); + return is_strict_mode() ? kStrictMode : kNonStrictMode; } FunctionLiteral* function() { return info_->function(); } Scope* scope() { return scope_; } diff --git a/deps/v8/src/globals.h b/deps/v8/src/globals.h index cbe7abdf6..d0c78d6e2 100644 --- a/deps/v8/src/globals.h +++ b/deps/v8/src/globals.h @@ -230,9 +230,6 @@ const int kPointerSize = sizeof(void*); // NOLINT const int kDoubleSizeLog2 = 3; -// Size of the state of a the random number generator. -const int kRandomStateSize = 2 * kIntSize; - #if V8_HOST_ARCH_64_BIT const int kPointerSizeLog2 = 3; const intptr_t kIntptrSignBit = V8_INT64_C(0x8000000000000000); diff --git a/deps/v8/src/handles.cc b/deps/v8/src/handles.cc index 62851f341..57f5d1b66 100644 --- a/deps/v8/src/handles.cc +++ b/deps/v8/src/handles.cc @@ -376,6 +376,24 @@ Handle<Object> GetProperty(Handle<Object> obj, } +Handle<Object> GetProperty(Handle<JSReceiver> obj, + Handle<String> name, + LookupResult* result) { + PropertyAttributes attributes; + Isolate* isolate = Isolate::Current(); + CALL_HEAP_FUNCTION(isolate, + obj->GetProperty(*obj, result, *name, &attributes), + Object); +} + + +Handle<Object> GetElement(Handle<Object> obj, + uint32_t index) { + Isolate* isolate = Isolate::Current(); + CALL_HEAP_FUNCTION(isolate, Runtime::GetElement(obj, index), Object); +} + + Handle<Object> GetPropertyWithInterceptor(Handle<JSObject> receiver, Handle<JSObject> holder, Handle<String> name, @@ -486,14 +504,6 @@ Handle<Object> SetOwnElement(Handle<JSObject> object, } -Handle<Object> TransitionElementsKind(Handle<JSObject> object, - ElementsKind to_kind) { - CALL_HEAP_FUNCTION(object->GetIsolate(), - object->TransitionElementsKind(to_kind), - Object); -} - - Handle<JSObject> Copy(Handle<JSObject> obj) { Isolate* isolate = obj->GetIsolate(); CALL_HEAP_FUNCTION(isolate, @@ -691,7 +701,7 @@ void CustomArguments::IterateInstance(ObjectVisitor* v) { // Compute the property keys from the interceptor. -v8::Handle<v8::Array> GetKeysForNamedInterceptor(Handle<JSReceiver> receiver, +v8::Handle<v8::Array> GetKeysForNamedInterceptor(Handle<JSObject> receiver, Handle<JSObject> object) { Isolate* isolate = receiver->GetIsolate(); Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor()); @@ -713,7 +723,7 @@ v8::Handle<v8::Array> GetKeysForNamedInterceptor(Handle<JSReceiver> receiver, // Compute the element keys from the interceptor. -v8::Handle<v8::Array> GetKeysForIndexedInterceptor(Handle<JSReceiver> receiver, +v8::Handle<v8::Array> GetKeysForIndexedInterceptor(Handle<JSObject> receiver, Handle<JSObject> object) { Isolate* isolate = receiver->GetIsolate(); Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor()); @@ -744,9 +754,8 @@ static bool ContainsOnlyValidKeys(Handle<FixedArray> array) { } -Handle<FixedArray> GetKeysInFixedArrayFor(Handle<JSReceiver> object, - KeyCollectionType type, - bool* threw) { +Handle<FixedArray> GetKeysInFixedArrayFor(Handle<JSObject> object, + KeyCollectionType type) { USE(ContainsOnlyValidKeys); Isolate* isolate = object->GetIsolate(); Handle<FixedArray> content = isolate->factory()->empty_fixed_array(); @@ -761,16 +770,6 @@ Handle<FixedArray> GetKeysInFixedArrayFor(Handle<JSReceiver> object, for (Handle<Object> p = object; *p != isolate->heap()->null_value(); p = Handle<Object>(p->GetPrototype(), isolate)) { - if (p->IsJSProxy()) { - Handle<JSProxy> proxy(JSProxy::cast(*p), isolate); - Handle<Object> args[] = { proxy }; - Handle<Object> names = Execution::Call( - isolate->proxy_enumerate(), object, ARRAY_SIZE(args), args, threw); - if (*threw) return content; - content = AddKeysFromJSArray(content, Handle<JSArray>::cast(names)); - break; - } - Handle<JSObject> current(JSObject::cast(*p), isolate); // Check access rights if required. @@ -837,11 +836,11 @@ Handle<FixedArray> GetKeysInFixedArrayFor(Handle<JSReceiver> object, } -Handle<JSArray> GetKeysFor(Handle<JSReceiver> object, bool* threw) { +Handle<JSArray> GetKeysFor(Handle<JSObject> object) { Isolate* isolate = object->GetIsolate(); isolate->counters()->for_in()->Increment(); - Handle<FixedArray> elements = - GetKeysInFixedArrayFor(object, INCLUDE_PROTOS, threw); + Handle<FixedArray> elements = GetKeysInFixedArrayFor(object, + INCLUDE_PROTOS); return isolate->factory()->NewJSArrayWithElements(elements); } @@ -891,29 +890,62 @@ Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object, } -Handle<ObjectHashSet> ObjectHashSetAdd(Handle<ObjectHashSet> table, - Handle<Object> key) { +Handle<ObjectHashTable> PutIntoObjectHashTable(Handle<ObjectHashTable> table, + Handle<JSReceiver> key, + Handle<Object> value) { CALL_HEAP_FUNCTION(table->GetIsolate(), - table->Add(*key), - ObjectHashSet); + table->Put(*key, *value), + ObjectHashTable); } -Handle<ObjectHashSet> ObjectHashSetRemove(Handle<ObjectHashSet> table, - Handle<Object> key) { - CALL_HEAP_FUNCTION(table->GetIsolate(), - table->Remove(*key), - ObjectHashSet); +bool EnsureCompiled(Handle<SharedFunctionInfo> shared, + ClearExceptionFlag flag) { + return shared->is_compiled() || CompileLazyShared(shared, flag); } -Handle<ObjectHashTable> PutIntoObjectHashTable(Handle<ObjectHashTable> table, - Handle<Object> key, - Handle<Object> value) { - CALL_HEAP_FUNCTION(table->GetIsolate(), - table->Put(*key, *value), - ObjectHashTable); +static bool CompileLazyHelper(CompilationInfo* info, + ClearExceptionFlag flag) { + // Compile the source information to a code object. + ASSERT(info->IsOptimizing() || !info->shared_info()->is_compiled()); + ASSERT(!info->isolate()->has_pending_exception()); + bool result = Compiler::CompileLazy(info); + ASSERT(result != Isolate::Current()->has_pending_exception()); + if (!result && flag == CLEAR_EXCEPTION) { + info->isolate()->clear_pending_exception(); + } + return result; } +bool CompileLazyShared(Handle<SharedFunctionInfo> shared, + ClearExceptionFlag flag) { + CompilationInfo info(shared); + return CompileLazyHelper(&info, flag); +} + + +bool CompileLazy(Handle<JSFunction> function, ClearExceptionFlag flag) { + bool result = true; + if (function->shared()->is_compiled()) { + function->ReplaceCode(function->shared()->code()); + function->shared()->set_code_age(0); + } else { + CompilationInfo info(function); + result = CompileLazyHelper(&info, flag); + ASSERT(!result || function->is_compiled()); + } + return result; +} + + +bool CompileOptimized(Handle<JSFunction> function, + int osr_ast_id, + ClearExceptionFlag flag) { + CompilationInfo info(function); + info.SetOptimizing(osr_ast_id); + return CompileLazyHelper(&info, flag); +} + } } // namespace v8::internal diff --git a/deps/v8/src/handles.h b/deps/v8/src/handles.h index 06e47fca9..d5521f89c 100644 --- a/deps/v8/src/handles.h +++ b/deps/v8/src/handles.h @@ -240,15 +240,20 @@ Handle<Object> SetOwnElement(Handle<JSObject> object, Handle<Object> value, StrictModeFlag strict_mode); -Handle<Object> TransitionElementsKind(Handle<JSObject> object, - ElementsKind to_kind); - Handle<Object> GetProperty(Handle<JSReceiver> obj, const char* name); Handle<Object> GetProperty(Handle<Object> obj, Handle<Object> key); +Handle<Object> GetProperty(Handle<JSReceiver> obj, + Handle<String> name, + LookupResult* result); + + +Handle<Object> GetElement(Handle<Object> obj, + uint32_t index); + Handle<Object> GetPropertyWithInterceptor(Handle<JSObject> receiver, Handle<JSObject> holder, Handle<String> name, @@ -295,19 +300,18 @@ int GetScriptLineNumberSafe(Handle<Script> script, int code_position); // Computes the enumerable keys from interceptors. Used for debug mirrors and // by GetKeysInFixedArrayFor below. -v8::Handle<v8::Array> GetKeysForNamedInterceptor(Handle<JSReceiver> receiver, +v8::Handle<v8::Array> GetKeysForNamedInterceptor(Handle<JSObject> receiver, Handle<JSObject> object); -v8::Handle<v8::Array> GetKeysForIndexedInterceptor(Handle<JSReceiver> receiver, +v8::Handle<v8::Array> GetKeysForIndexedInterceptor(Handle<JSObject> receiver, Handle<JSObject> object); enum KeyCollectionType { LOCAL_ONLY, INCLUDE_PROTOS }; // Computes the enumerable keys for a JSObject. Used for implementing // "for (n in object) { }". -Handle<FixedArray> GetKeysInFixedArrayFor(Handle<JSReceiver> object, - KeyCollectionType type, - bool* threw); -Handle<JSArray> GetKeysFor(Handle<JSReceiver> object, bool* threw); +Handle<FixedArray> GetKeysInFixedArrayFor(Handle<JSObject> object, + KeyCollectionType type); +Handle<JSArray> GetKeysFor(Handle<JSObject> object); Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object, bool cache_result); @@ -342,16 +346,26 @@ Handle<Object> SetPrototype(Handle<JSFunction> function, Handle<Object> PreventExtensions(Handle<JSObject> object); -Handle<ObjectHashSet> ObjectHashSetAdd(Handle<ObjectHashSet> table, - Handle<Object> key); - -Handle<ObjectHashSet> ObjectHashSetRemove(Handle<ObjectHashSet> table, - Handle<Object> key); - Handle<ObjectHashTable> PutIntoObjectHashTable(Handle<ObjectHashTable> table, - Handle<Object> key, + Handle<JSReceiver> key, Handle<Object> value); +// Does lazy compilation of the given function. Returns true on success and +// false if the compilation resulted in a stack overflow. +enum ClearExceptionFlag { KEEP_EXCEPTION, CLEAR_EXCEPTION }; + +bool EnsureCompiled(Handle<SharedFunctionInfo> shared, + ClearExceptionFlag flag); + +bool CompileLazyShared(Handle<SharedFunctionInfo> shared, + ClearExceptionFlag flag); + +bool CompileLazy(Handle<JSFunction> function, ClearExceptionFlag flag); + +bool CompileOptimized(Handle<JSFunction> function, + int osr_ast_id, + ClearExceptionFlag flag); + class NoHandleAllocation BASE_EMBEDDED { public: #ifndef DEBUG diff --git a/deps/v8/src/heap-inl.h b/deps/v8/src/heap-inl.h index aaf2927f7..4bd893e8e 100644 --- a/deps/v8/src/heap-inl.h +++ b/deps/v8/src/heap-inl.h @@ -359,6 +359,7 @@ AllocationSpace Heap::TargetSpaceId(InstanceType type) { void Heap::CopyBlock(Address dst, Address src, int byte_size) { + ASSERT(IsAligned(byte_size, kPointerSize)); CopyWords(reinterpret_cast<Object**>(dst), reinterpret_cast<Object**>(src), byte_size / kPointerSize); @@ -590,9 +591,7 @@ void ExternalStringTable::AddOldString(String* string) { void ExternalStringTable::ShrinkNewStrings(int position) { new_space_strings_.Rewind(position); - if (FLAG_verify_heap) { - Verify(); - } + Verify(); } diff --git a/deps/v8/src/heap.cc b/deps/v8/src/heap.cc index bbb9d3e26..c6efd6205 100644 --- a/deps/v8/src/heap.cc +++ b/deps/v8/src/heap.cc @@ -693,9 +693,7 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector, PROFILE(isolate_, CodeMovingGCEvent()); } - if (FLAG_verify_heap) { - VerifySymbolTable(); - } + VerifySymbolTable(); if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) { ASSERT(!allocation_allowed_); GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); @@ -791,9 +789,7 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector, GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); global_gc_epilogue_callback_(); } - if (FLAG_verify_heap) { - VerifySymbolTable(); - } + VerifySymbolTable(); return next_gc_likely_to_collect_more; } @@ -987,7 +983,7 @@ void StoreBufferRebuilder::Callback(MemoryChunk* page, StoreBufferEvent event) { void Heap::Scavenge() { #ifdef DEBUG - if (FLAG_verify_heap) VerifyNonPointerSpacePointers(); + if (FLAG_enable_slow_asserts) VerifyNonPointerSpacePointers(); #endif gc_state_ = SCAVENGE; @@ -1116,9 +1112,7 @@ String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap, void Heap::UpdateNewSpaceReferencesInExternalStringTable( ExternalStringTableUpdaterCallback updater_func) { - if (FLAG_verify_heap) { - external_string_table_.Verify(); - } + external_string_table_.Verify(); if (external_string_table_.new_space_strings_.is_empty()) return; @@ -1449,9 +1443,9 @@ class ScavengingVisitor : public StaticVisitorBase { HeapObject** slot, HeapObject* object, int object_size) { - SLOW_ASSERT((size_restriction != SMALL) || - (object_size <= Page::kMaxHeapObjectSize)); - SLOW_ASSERT(object->Size() == object_size); + ASSERT((size_restriction != SMALL) || + (object_size <= Page::kMaxHeapObjectSize)); + ASSERT(object->Size() == object_size); Heap* heap = map->GetHeap(); if (heap->ShouldBePromoted(object->address(), object_size)) { @@ -1684,9 +1678,9 @@ void Heap::SelectScavengingVisitorsTable() { void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) { - SLOW_ASSERT(HEAP->InFromSpace(object)); + ASSERT(HEAP->InFromSpace(object)); MapWord first_word = object->map_word(); - SLOW_ASSERT(!first_word.IsForwardingAddress()); + ASSERT(!first_word.IsForwardingAddress()); Map* map = first_word.ToMap(); map->GetHeap()->DoScavengeObject(map, p, object); } @@ -2916,9 +2910,7 @@ MaybeObject* Heap::AllocateSubString(String* buffer, ASSERT(buffer->IsFlat()); #if DEBUG - if (FLAG_verify_heap) { - buffer->StringVerify(); - } + buffer->StringVerify(); #endif Object* result; @@ -3164,9 +3156,7 @@ MaybeObject* Heap::CreateCode(const CodeDesc& desc, code->CopyFrom(desc); #ifdef DEBUG - if (FLAG_verify_heap) { - code->Verify(); - } + code->Verify(); #endif return code; } @@ -3246,9 +3236,7 @@ MaybeObject* Heap::CopyCode(Code* code, Vector<byte> reloc_info) { new_code->Relocate(new_addr - old_addr); #ifdef DEBUG - if (FLAG_verify_heap) { - code->Verify(); - } + code->Verify(); #endif return new_code; } @@ -3281,7 +3269,7 @@ void Heap::InitializeFunction(JSFunction* function, function->set_code(shared->code()); function->set_prototype_or_initial_map(prototype); function->set_context(undefined_value()); - function->set_literals_or_bindings(empty_fixed_array()); + function->set_literals(empty_fixed_array()); function->set_next_function_link(undefined_value()); } @@ -3446,22 +3434,22 @@ MaybeObject* Heap::AllocateInitialMap(JSFunction* fun) { // Inline constructor can only handle inobject properties. fun->shared()->ForbidInlineConstructor(); } else { - DescriptorArray* descriptors; + Object* descriptors_obj; { MaybeObject* maybe_descriptors_obj = DescriptorArray::Allocate(count); - if (!maybe_descriptors_obj->To<DescriptorArray>(&descriptors)) { + if (!maybe_descriptors_obj->ToObject(&descriptors_obj)) { return maybe_descriptors_obj; } } - DescriptorArray::WhitenessWitness witness(descriptors); + DescriptorArray* descriptors = DescriptorArray::cast(descriptors_obj); for (int i = 0; i < count; i++) { String* name = fun->shared()->GetThisPropertyAssignmentName(i); ASSERT(name->IsSymbol()); FieldDescriptor field(name, i, NONE); field.SetEnumerationIndex(i); - descriptors->Set(i, &field, witness); + descriptors->Set(i, &field); } descriptors->SetNextEnumerationIndex(count); - descriptors->SortUnchecked(witness); + descriptors->SortUnchecked(); // The descriptors may contain duplicates because the compiler does not // guarantee the uniqueness of property names (it would have required @@ -3700,15 +3688,13 @@ MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) { MaybeObject* Heap::CopyJSObject(JSObject* source) { // Never used to copy functions. If functions need to be copied we // have to be careful to clear the literals array. - SLOW_ASSERT(!source->IsJSFunction()); + ASSERT(!source->IsJSFunction()); // Make the clone. Map* map = source->map(); int object_size = map->instance_size(); Object* clone; - WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER; - // If we're forced to always allocate, we use the general allocation // functions which may leave us with an object in old space. if (always_allocate()) { @@ -3725,11 +3711,10 @@ MaybeObject* Heap::CopyJSObject(JSObject* source) { JSObject::kHeaderSize, (object_size - JSObject::kHeaderSize) / kPointerSize); } else { - wb_mode = SKIP_WRITE_BARRIER; { MaybeObject* maybe_clone = new_space_.AllocateRaw(object_size); if (!maybe_clone->ToObject(&clone)) return maybe_clone; } - SLOW_ASSERT(InNewSpace(clone)); + ASSERT(InNewSpace(clone)); // Since we know the clone is allocated in new space, we can copy // the contents without worrying about updating the write barrier. CopyBlock(HeapObject::cast(clone)->address(), @@ -3737,8 +3722,7 @@ MaybeObject* Heap::CopyJSObject(JSObject* source) { object_size); } - SLOW_ASSERT( - JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind()); + ASSERT(JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind()); FixedArrayBase* elements = FixedArrayBase::cast(source->elements()); FixedArray* properties = FixedArray::cast(source->properties()); // Update elements if necessary. @@ -3754,7 +3738,7 @@ MaybeObject* Heap::CopyJSObject(JSObject* source) { } if (!maybe_elem->ToObject(&elem)) return maybe_elem; } - JSObject::cast(clone)->set_elements(FixedArrayBase::cast(elem), wb_mode); + JSObject::cast(clone)->set_elements(FixedArrayBase::cast(elem)); } // Update properties if necessary. if (properties->length() > 0) { @@ -3762,7 +3746,7 @@ MaybeObject* Heap::CopyJSObject(JSObject* source) { { MaybeObject* maybe_prop = CopyFixedArray(properties); if (!maybe_prop->ToObject(&prop)) return maybe_prop; } - JSObject::cast(clone)->set_properties(FixedArray::cast(prop), wb_mode); + JSObject::cast(clone)->set_properties(FixedArray::cast(prop)); } // Return the new clone. return clone; @@ -4818,12 +4802,12 @@ void Heap::IterateAndMarkPointersToFromSpace(Address start, HeapObject::cast(object)); Object* new_object = *slot; if (InNewSpace(new_object)) { - SLOW_ASSERT(Heap::InToSpace(new_object)); - SLOW_ASSERT(new_object->IsHeapObject()); + ASSERT(Heap::InToSpace(new_object)); + ASSERT(new_object->IsHeapObject()); store_buffer_.EnterDirectlyIntoStoreBuffer( reinterpret_cast<Address>(slot)); } - SLOW_ASSERT(!MarkCompactCollector::IsOnEvacuationCandidate(new_object)); + ASSERT(!MarkCompactCollector::IsOnEvacuationCandidate(new_object)); } else if (record_slots && MarkCompactCollector::IsOnEvacuationCandidate(object)) { mark_compact_collector()->RecordSlot(slot, slot, object); @@ -5377,7 +5361,6 @@ class HeapDebugUtils { bool Heap::Setup(bool create_heap_objects) { #ifdef DEBUG - allocation_timeout_ = FLAG_gc_interval; debug_utils_ = new HeapDebugUtils(this); #endif @@ -5463,7 +5446,7 @@ bool Heap::Setup(bool create_heap_objects) { // The large object code space may contain code or data. We set the memory // to be non-executable here for safety, but this means we need to enable it // explicitly when allocating large code objects. - lo_space_ = new LargeObjectSpace(this, max_old_generation_size_, LO_SPACE); + lo_space_ = new LargeObjectSpace(this, LO_SPACE); if (lo_space_ == NULL) return false; if (!lo_space_->Setup()) return false; if (create_heap_objects) { @@ -5779,51 +5762,56 @@ class HeapObjectsFilter { class UnreachableObjectsFilter : public HeapObjectsFilter { public: UnreachableObjectsFilter() { - MarkReachableObjects(); - } - - ~UnreachableObjectsFilter() { - Isolate::Current()->heap()->mark_compact_collector()->ClearMarkbits(); + MarkUnreachableObjects(); } bool SkipObject(HeapObject* object) { - MarkBit mark_bit = Marking::MarkBitFrom(object); - return !mark_bit.Get(); + if (IntrusiveMarking::IsMarked(object)) { + IntrusiveMarking::ClearMark(object); + return true; + } else { + return false; + } } private: - class MarkingVisitor : public ObjectVisitor { + class UnmarkingVisitor : public ObjectVisitor { public: - MarkingVisitor() : marking_stack_(10) {} + UnmarkingVisitor() : list_(10) {} void VisitPointers(Object** start, Object** end) { for (Object** p = start; p < end; p++) { if (!(*p)->IsHeapObject()) continue; HeapObject* obj = HeapObject::cast(*p); - MarkBit mark_bit = Marking::MarkBitFrom(obj); - if (!mark_bit.Get()) { - mark_bit.Set(); - marking_stack_.Add(obj); + if (IntrusiveMarking::IsMarked(obj)) { + IntrusiveMarking::ClearMark(obj); + list_.Add(obj); } } } - void TransitiveClosure() { - while (!marking_stack_.is_empty()) { - HeapObject* obj = marking_stack_.RemoveLast(); - obj->Iterate(this); - } + bool can_process() { return !list_.is_empty(); } + + void ProcessNext() { + HeapObject* obj = list_.RemoveLast(); + obj->Iterate(this); } private: - List<HeapObject*> marking_stack_; + List<HeapObject*> list_; }; - void MarkReachableObjects() { - Heap* heap = Isolate::Current()->heap(); - MarkingVisitor visitor; - heap->IterateRoots(&visitor, VISIT_ALL); - visitor.TransitiveClosure(); + void MarkUnreachableObjects() { + HeapIterator iterator; + for (HeapObject* obj = iterator.next(); + obj != NULL; + obj = iterator.next()) { + IntrusiveMarking::SetMark(obj); + } + UnmarkingVisitor visitor; + HEAP->IterateRoots(&visitor, VISIT_ALL); + while (visitor.can_process()) + visitor.ProcessNext(); } AssertNoAllocation no_alloc; @@ -5851,8 +5839,13 @@ HeapIterator::~HeapIterator() { void HeapIterator::Init() { // Start the iteration. - space_iterator_ = new SpaceIterator; + space_iterator_ = filtering_ == kNoFiltering ? new SpaceIterator : + new SpaceIterator(Isolate::Current()->heap()-> + GcSafeSizeOfOldObjectFunction()); switch (filtering_) { + case kFilterFreeListNodes: + // TODO(gc): Not handled. + break; case kFilterUnreachable: filter_ = new UnreachableObjectsFilter; break; @@ -6357,9 +6350,7 @@ void ExternalStringTable::CleanUp() { old_space_strings_[last++] = old_space_strings_[i]; } old_space_strings_.Rewind(last); - if (FLAG_verify_heap) { - Verify(); - } + Verify(); } diff --git a/deps/v8/src/heap.h b/deps/v8/src/heap.h index 7c0b0ea8d..6fb2d18c2 100644 --- a/deps/v8/src/heap.h +++ b/deps/v8/src/heap.h @@ -64,31 +64,18 @@ inline Heap* _inline_get_heap_(); V(Oddball, null_value, NullValue) \ V(Oddball, true_value, TrueValue) \ V(Oddball, false_value, FalseValue) \ - V(Map, global_property_cell_map, GlobalPropertyCellMap) \ - V(Map, shared_function_info_map, SharedFunctionInfoMap) \ - V(Map, meta_map, MetaMap) \ - V(Map, ascii_symbol_map, AsciiSymbolMap) \ - V(Map, ascii_string_map, AsciiStringMap) \ + V(Oddball, arguments_marker, ArgumentsMarker) \ + V(Oddball, frame_alignment_marker, FrameAlignmentMarker) \ V(Map, heap_number_map, HeapNumberMap) \ V(Map, global_context_map, GlobalContextMap) \ V(Map, fixed_array_map, FixedArrayMap) \ - V(Map, code_map, CodeMap) \ V(Map, serialized_scope_info_map, SerializedScopeInfoMap) \ V(Map, fixed_cow_array_map, FixedCOWArrayMap) \ V(Map, fixed_double_array_map, FixedDoubleArrayMap) \ V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \ + V(Map, meta_map, MetaMap) \ V(Map, hash_table_map, HashTableMap) \ - V(FixedArray, empty_fixed_array, EmptyFixedArray) \ - V(ByteArray, empty_byte_array, EmptyByteArray) \ - V(FixedDoubleArray, empty_fixed_double_array, EmptyFixedDoubleArray) \ - V(String, empty_string, EmptyString) \ - V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \ V(Smi, stack_limit, StackLimit) \ - V(Oddball, frame_alignment_marker, FrameAlignmentMarker) \ - V(Oddball, arguments_marker, ArgumentsMarker) \ - /* The first 32 roots above this line should be boring from a GC point of */ \ - /* view. This means they are never in new space and never on a page that */ \ - /* is being compacted. */ \ V(FixedArray, number_string_cache, NumberStringCache) \ V(Object, instanceof_cache_function, InstanceofCacheFunction) \ V(Object, instanceof_cache_map, InstanceofCacheMap) \ @@ -96,12 +83,19 @@ inline Heap* _inline_get_heap_(); V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \ V(FixedArray, string_split_cache, StringSplitCache) \ V(Object, termination_exception, TerminationException) \ + V(FixedArray, empty_fixed_array, EmptyFixedArray) \ + V(ByteArray, empty_byte_array, EmptyByteArray) \ + V(FixedDoubleArray, empty_fixed_double_array, EmptyFixedDoubleArray) \ + V(String, empty_string, EmptyString) \ + V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \ V(Map, string_map, StringMap) \ + V(Map, ascii_string_map, AsciiStringMap) \ V(Map, symbol_map, SymbolMap) \ V(Map, cons_string_map, ConsStringMap) \ V(Map, cons_ascii_string_map, ConsAsciiStringMap) \ V(Map, sliced_string_map, SlicedStringMap) \ V(Map, sliced_ascii_string_map, SlicedAsciiStringMap) \ + V(Map, ascii_symbol_map, AsciiSymbolMap) \ V(Map, cons_symbol_map, ConsSymbolMap) \ V(Map, cons_ascii_symbol_map, ConsAsciiSymbolMap) \ V(Map, external_symbol_map, ExternalSymbolMap) \ @@ -126,7 +120,10 @@ inline Heap* _inline_get_heap_(); V(Map, catch_context_map, CatchContextMap) \ V(Map, with_context_map, WithContextMap) \ V(Map, block_context_map, BlockContextMap) \ + V(Map, code_map, CodeMap) \ V(Map, oddball_map, OddballMap) \ + V(Map, global_property_cell_map, GlobalPropertyCellMap) \ + V(Map, shared_function_info_map, SharedFunctionInfoMap) \ V(Map, message_object_map, JSMessageObjectMap) \ V(Map, foreign_map, ForeignMap) \ V(HeapNumber, nan_value, NanValue) \ @@ -1100,7 +1097,7 @@ class Heap { inline void SetLastScriptId(Object* last_script_id); // Generated code can embed this address to get access to the roots. - Object** roots_array_start() { return roots_; } + Object** roots_address() { return roots_; } Address* store_buffer_top_address() { return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]); @@ -1422,9 +1419,6 @@ class Heap { // around a GC). inline void CompletelyClearInstanceofCache(); - // The roots that have an index less than this are always in old space. - static const int kOldSpaceRoots = 0x20; - private: Heap(); @@ -1480,10 +1474,7 @@ class Heap { int unflattened_strings_length_; #define ROOT_ACCESSOR(type, name, camel_name) \ - inline void set_##name(type* value) { \ - /* The deserializer makes use of the fact that these common roots are */ \ - /* never in new space and never on a page that is being compacted. */ \ - ASSERT(k##camel_name##RootIndex >= kOldSpaceRoots || !InNewSpace(value)); \ + inline void set_##name(type* value) { \ roots_[k##camel_name##RootIndex] = value; \ } ROOT_LIST(ROOT_ACCESSOR) @@ -1963,6 +1954,7 @@ class HeapIterator BASE_EMBEDDED { public: enum HeapObjectsFiltering { kNoFiltering, + kFilterFreeListNodes, kFilterUnreachable }; diff --git a/deps/v8/src/hydrogen-instructions.cc b/deps/v8/src/hydrogen-instructions.cc index 6f46509ca..fd0c3bb0d 100644 --- a/deps/v8/src/hydrogen-instructions.cc +++ b/deps/v8/src/hydrogen-instructions.cc @@ -587,10 +587,11 @@ void HInstruction::Verify() { HBasicBlock* other_block = other_operand->block(); if (cur_block == other_block) { if (!other_operand->IsPhi()) { - HInstruction* cur = this->previous(); + HInstruction* cur = cur_block->first(); while (cur != NULL) { + ASSERT(cur != this); // We should reach other_operand before! if (cur == other_operand) break; - cur = cur->previous(); + cur = cur->next(); } // Must reach other operand in the same block! ASSERT(cur == other_operand); @@ -782,21 +783,12 @@ void HHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) { void HTypeofIsAndBranch::PrintDataTo(StringStream* stream) { value()->PrintNameTo(stream); - stream->Add(" == %o", *type_literal_); + stream->Add(" == "); + stream->Add(type_literal_->GetFlatContent().ToAsciiVector()); HControlInstruction::PrintDataTo(stream); } -HValue* HConstant::Canonicalize() { - return HasNoUses() && !IsBlockEntry() ? NULL : this; -} - - -HValue* HTypeof::Canonicalize() { - return HasNoUses() && !IsBlockEntry() ? NULL : this; -} - - void HTypeof::PrintDataTo(StringStream* stream) { value()->PrintNameTo(stream); } @@ -1146,16 +1138,15 @@ void HPhi::AddIndirectUsesTo(int* dest) { void HSimulate::PrintDataTo(StringStream* stream) { - stream->Add("id=%d", ast_id()); - if (pop_count_ > 0) stream->Add(" pop %d", pop_count_); + stream->Add("id=%d ", ast_id()); + if (pop_count_ > 0) stream->Add("pop %d", pop_count_); if (values_.length() > 0) { if (pop_count_ > 0) stream->Add(" /"); for (int i = 0; i < values_.length(); ++i) { - if (i > 0) stream->Add(","); - if (HasAssignedIndexAt(i)) { - stream->Add(" var[%d] = ", GetAssignedIndexAt(i)); - } else { + if (!HasAssignedIndexAt(i)) { stream->Add(" push "); + } else { + stream->Add(" var[%d] = ", GetAssignedIndexAt(i)); } values_[i]->PrintNameTo(stream); } @@ -1236,10 +1227,7 @@ void HConstant::PrintDataTo(StringStream* stream) { bool HArrayLiteral::IsCopyOnWrite() const { - Handle<FixedArray> constant_elements = this->constant_elements(); - FixedArrayBase* constant_elements_values = - FixedArrayBase::cast(constant_elements->get(1)); - return constant_elements_values->map() == HEAP->fixed_cow_array_map(); + return constant_elements()->map() == HEAP->fixed_cow_array_map(); } @@ -1404,7 +1392,7 @@ HLoadNamedFieldPolymorphic::HLoadNamedFieldPolymorphic(HValue* context, i < types->length() && types_.length() < kMaxLoadPolymorphism; ++i) { Handle<Map> map = types->at(i); - LookupResult lookup(map->GetIsolate()); + LookupResult lookup; map->LookupInDescriptors(NULL, *name, &lookup); if (lookup.IsProperty()) { switch (lookup.type()) { @@ -1457,14 +1445,14 @@ bool HLoadNamedFieldPolymorphic::DataEquals(HValue* value) { void HLoadNamedFieldPolymorphic::PrintDataTo(StringStream* stream) { object()->PrintNameTo(stream); - stream->Add("."); + stream->Add(" ."); stream->Add(*String::cast(*name())->ToCString()); } void HLoadNamedGeneric::PrintDataTo(StringStream* stream) { object()->PrintNameTo(stream); - stream->Add("."); + stream->Add(" ."); stream->Add(*String::cast(*name())->ToCString()); } @@ -1561,10 +1549,10 @@ void HStoreNamedGeneric::PrintDataTo(StringStream* stream) { void HStoreNamedField::PrintDataTo(StringStream* stream) { object()->PrintNameTo(stream); stream->Add("."); + ASSERT(name()->IsString()); stream->Add(*String::cast(*name())->ToCString()); stream->Add(" = "); value()->PrintNameTo(stream); - stream->Add(" @%d%s", offset(), is_in_object() ? "[in-object]" : ""); if (!transition().is_null()) { stream->Add(" (transition map %p)", *transition()); } @@ -1645,12 +1633,6 @@ void HStoreKeyedSpecializedArrayElement::PrintDataTo( } -void HTransitionElementsKind::PrintDataTo(StringStream* stream) { - object()->PrintNameTo(stream); - stream->Add(" %p -> %p", *original_map(), *transitioned_map()); -} - - void HLoadGlobalCell::PrintDataTo(StringStream* stream) { stream->Add("[%p]", *cell()); if (!details_.IsDontDelete()) stream->Add(" (deleteable)"); @@ -1764,12 +1746,6 @@ HType HInstanceOfKnownGlobal::CalculateInferredType() { } -HType HChange::CalculateInferredType() { - if (from().IsDouble() && to().IsTagged()) return HType::HeapNumber(); - return type(); -} - - HType HBitwiseBinaryOperation::CalculateInferredType() { return HType::TaggedNumber(); } @@ -1825,31 +1801,6 @@ HType HSar::CalculateInferredType() { } -HType HStringCharFromCode::CalculateInferredType() { - return HType::String(); -} - - -HType HArrayLiteral::CalculateInferredType() { - return HType::JSArray(); -} - - -HType HObjectLiteral::CalculateInferredType() { - return HType::JSObject(); -} - - -HType HRegExpLiteral::CalculateInferredType() { - return HType::JSObject(); -} - - -HType HFunctionLiteral::CalculateInferredType() { - return HType::JSObject(); -} - - HValue* HUnaryMathOperation::EnsureAndPropagateNotMinusZero( BitVector* visited) { visited->Add(id()); diff --git a/deps/v8/src/hydrogen-instructions.h b/deps/v8/src/hydrogen-instructions.h index 65fc4df4b..6b43f53da 100644 --- a/deps/v8/src/hydrogen-instructions.h +++ b/deps/v8/src/hydrogen-instructions.h @@ -171,7 +171,6 @@ class LChunkBuilder; V(Throw) \ V(ToFastProperties) \ V(ToInt32) \ - V(TransitionElementsKind) \ V(Typeof) \ V(TypeofIsAndBranch) \ V(UnaryMathOperation) \ @@ -398,11 +397,6 @@ class HType { return type_ == kUninitialized; } - bool IsHeapObject() { - ASSERT(type_ != kUninitialized); - return IsHeapNumber() || IsString() || IsNonPrimitive(); - } - static HType TypeFromValue(Handle<Object> value); const char* ToString(); @@ -1107,14 +1101,12 @@ class HChange: public HUnaryOperation { ASSERT(!value->representation().IsNone() && !to.IsNone()); ASSERT(!value->representation().Equals(to)); set_representation(to); - set_type(HType::TaggedNumber()); SetFlag(kUseGVN); if (deoptimize_on_undefined) SetFlag(kDeoptimizeOnUndefined); if (is_truncating) SetFlag(kTruncatingToInt32); } virtual HValue* EnsureAndPropagateNotMinusZero(BitVector* visited); - virtual HType CalculateInferredType(); Representation from() { return value()->representation(); } Representation to() { return representation(); } @@ -1348,7 +1340,7 @@ class HPushArgument: public HUnaryOperation { class HThisFunction: public HTemplateInstruction<0> { public: - explicit HThisFunction(Handle<JSFunction> closure) : closure_(closure) { + HThisFunction() { set_representation(Representation::Tagged()); SetFlag(kUseGVN); } @@ -1357,18 +1349,10 @@ class HThisFunction: public HTemplateInstruction<0> { return Representation::None(); } - Handle<JSFunction> closure() const { return closure_; } - DECLARE_CONCRETE_INSTRUCTION(ThisFunction) protected: - virtual bool DataEquals(HValue* other) { - HThisFunction* b = HThisFunction::cast(other); - return *closure() == *b->closure(); - } - - private: - Handle<JSFunction> closure_; + virtual bool DataEquals(HValue* other) { return true; } }; @@ -2296,7 +2280,6 @@ class HConstant: public HTemplateInstruction<0> { } virtual bool EmitAtUses() { return !representation().IsDouble(); } - virtual HValue* Canonicalize(); virtual void PrintDataTo(StringStream* stream); virtual HType CalculateInferredType(); bool IsInteger() const { return handle_->IsSmi(); } @@ -3277,13 +3260,6 @@ class HLoadGlobalGeneric: public HTemplateInstruction<2> { }; -static inline bool StoringValueNeedsWriteBarrier(HValue* value) { - return !value->type().IsBoolean() - && !value->type().IsSmi() - && !(value->IsConstant() && HConstant::cast(value)->ImmortalImmovable()); -} - - class HStoreGlobalCell: public HUnaryOperation { public: HStoreGlobalCell(HValue* value, @@ -3299,9 +3275,6 @@ class HStoreGlobalCell: public HUnaryOperation { bool RequiresHoleCheck() { return !details_.IsDontDelete() || details_.IsReadOnly(); } - bool NeedsWriteBarrier() { - return StoringValueNeedsWriteBarrier(value()); - } virtual Representation RequiredInputRepresentation(int index) { return Representation::Tagged(); @@ -3382,6 +3355,13 @@ class HLoadContextSlot: public HUnaryOperation { }; +static inline bool StoringValueNeedsWriteBarrier(HValue* value) { + return !value->type().IsBoolean() + && !value->type().IsSmi() + && !(value->IsConstant() && HConstant::cast(value)->ImmortalImmovable()); +} + + class HStoreContextSlot: public HTemplateInstruction<2> { public: HStoreContextSlot(HValue* context, int slot_index, HValue* value) @@ -3720,9 +3700,9 @@ class HStoreNamedGeneric: public HTemplateInstruction<3> { HValue* object, Handle<String> name, HValue* value, - StrictModeFlag strict_mode_flag) + bool strict_mode) : name_(name), - strict_mode_flag_(strict_mode_flag) { + strict_mode_(strict_mode) { SetOperandAt(0, object); SetOperandAt(1, value); SetOperandAt(2, context); @@ -3733,7 +3713,7 @@ class HStoreNamedGeneric: public HTemplateInstruction<3> { HValue* value() { return OperandAt(1); } HValue* context() { return OperandAt(2); } Handle<String> name() { return name_; } - StrictModeFlag strict_mode_flag() { return strict_mode_flag_; } + bool strict_mode() { return strict_mode_; } virtual void PrintDataTo(StringStream* stream); @@ -3745,7 +3725,7 @@ class HStoreNamedGeneric: public HTemplateInstruction<3> { private: Handle<String> name_; - StrictModeFlag strict_mode_flag_; + bool strict_mode_; }; @@ -3906,44 +3886,6 @@ class HStoreKeyedGeneric: public HTemplateInstruction<4> { }; -class HTransitionElementsKind: public HTemplateInstruction<1> { - public: - HTransitionElementsKind(HValue* object, - Handle<Map> original_map, - Handle<Map> transitioned_map) - : original_map_(original_map), - transitioned_map_(transitioned_map) { - SetOperandAt(0, object); - SetFlag(kUseGVN); - SetFlag(kDependsOnMaps); - set_representation(Representation::Tagged()); - } - - virtual Representation RequiredInputRepresentation(int index) { - return Representation::Tagged(); - } - - HValue* object() { return OperandAt(0); } - Handle<Map> original_map() { return original_map_; } - Handle<Map> transitioned_map() { return transitioned_map_; } - - virtual void PrintDataTo(StringStream* stream); - - DECLARE_CONCRETE_INSTRUCTION(TransitionElementsKind) - - protected: - virtual bool DataEquals(HValue* other) { - HTransitionElementsKind* instr = HTransitionElementsKind::cast(other); - return original_map_.is_identical_to(instr->original_map()) && - transitioned_map_.is_identical_to(instr->transitioned_map()); - } - - private: - Handle<Map> original_map_; - Handle<Map> transitioned_map_; -}; - - class HStringAdd: public HBinaryOperation { public: HStringAdd(HValue* context, HValue* left, HValue* right) @@ -4006,7 +3948,7 @@ class HStringCharFromCode: public HTemplateInstruction<2> { HStringCharFromCode(HValue* context, HValue* char_code) { SetOperandAt(0, context); SetOperandAt(1, char_code); - set_representation(Representation::Tagged()); + set_representation(Representation::Tagged()); SetFlag(kUseGVN); } @@ -4015,7 +3957,6 @@ class HStringCharFromCode: public HTemplateInstruction<2> { ? Representation::Tagged() : Representation::Integer32(); } - virtual HType CalculateInferredType(); HValue* context() { return OperandAt(0); } HValue* value() { return OperandAt(1); } @@ -4093,7 +4034,6 @@ class HArrayLiteral: public HMaterializedLiteral<1> { virtual Representation RequiredInputRepresentation(int index) { return Representation::Tagged(); } - virtual HType CalculateInferredType(); DECLARE_CONCRETE_INSTRUCTION(ArrayLiteral) @@ -4128,7 +4068,6 @@ class HObjectLiteral: public HMaterializedLiteral<1> { virtual Representation RequiredInputRepresentation(int index) { return Representation::Tagged(); } - virtual HType CalculateInferredType(); DECLARE_CONCRETE_INSTRUCTION(ObjectLiteral) @@ -4158,7 +4097,6 @@ class HRegExpLiteral: public HMaterializedLiteral<1> { virtual Representation RequiredInputRepresentation(int index) { return Representation::Tagged(); } - virtual HType CalculateInferredType(); DECLARE_CONCRETE_INSTRUCTION(RegExpLiteral) @@ -4183,7 +4121,6 @@ class HFunctionLiteral: public HTemplateInstruction<1> { virtual Representation RequiredInputRepresentation(int index) { return Representation::Tagged(); } - virtual HType CalculateInferredType(); DECLARE_CONCRETE_INSTRUCTION(FunctionLiteral) @@ -4207,7 +4144,6 @@ class HTypeof: public HTemplateInstruction<2> { HValue* context() { return OperandAt(0); } HValue* value() { return OperandAt(1); } - virtual HValue* Canonicalize(); virtual void PrintDataTo(StringStream* stream); virtual Representation RequiredInputRepresentation(int index) { diff --git a/deps/v8/src/hydrogen.cc b/deps/v8/src/hydrogen.cc index 1460db8ae..2d471cc29 100644 --- a/deps/v8/src/hydrogen.cc +++ b/deps/v8/src/hydrogen.cc @@ -164,11 +164,10 @@ void HBasicBlock::Finish(HControlInstruction* end) { } -void HBasicBlock::Goto(HBasicBlock* block, bool drop_extra) { +void HBasicBlock::Goto(HBasicBlock* block) { if (block->IsInlineReturnTarget()) { AddInstruction(new(zone()) HLeaveInlined); last_environment_ = last_environment()->outer(); - if (drop_extra) last_environment_->Drop(1); } AddSimulate(AstNode::kNoNumber); HGoto* instr = new(zone()) HGoto(block); @@ -176,14 +175,11 @@ void HBasicBlock::Goto(HBasicBlock* block, bool drop_extra) { } -void HBasicBlock::AddLeaveInlined(HValue* return_value, - HBasicBlock* target, - bool drop_extra) { +void HBasicBlock::AddLeaveInlined(HValue* return_value, HBasicBlock* target) { ASSERT(target->IsInlineReturnTarget()); ASSERT(return_value != NULL); AddInstruction(new(zone()) HLeaveInlined); last_environment_ = last_environment()->outer(); - if (drop_extra) last_environment_->Drop(1); last_environment()->Push(return_value); AddSimulate(AstNode::kNoNumber); HGoto* instr = new(zone()) HGoto(target); @@ -545,7 +541,7 @@ HConstant* HGraph::GetConstantHole() { HGraphBuilder::HGraphBuilder(CompilationInfo* info, TypeFeedbackOracle* oracle) : function_state_(NULL), - initial_function_state_(this, info, oracle, false), + initial_function_state_(this, info, oracle), ast_context_(NULL), break_scope_(NULL), graph_(NULL), @@ -1503,9 +1499,6 @@ int HGlobalValueNumberer::CollectSideEffectsOnPathsToDominatedBlock( block->block_id() < dominated->block_id() && visited_on_paths_.Add(block->block_id())) { side_effects |= block_side_effects_[block->block_id()]; - if (block->IsLoopHeader()) { - side_effects |= loop_side_effects_[block->block_id()]; - } side_effects |= CollectSideEffectsOnPathsToDominatedBlock( dominator, block); } @@ -2012,13 +2005,11 @@ void HGraph::ComputeMinusZeroChecks() { // a (possibly inlined) function. FunctionState::FunctionState(HGraphBuilder* owner, CompilationInfo* info, - TypeFeedbackOracle* oracle, - bool drop_extra) + TypeFeedbackOracle* oracle) : owner_(owner), compilation_info_(info), oracle_(oracle), call_context_(NULL), - drop_extra_(drop_extra), function_return_(NULL), test_context_(NULL), outer_(owner->function_state()) { @@ -2177,8 +2168,8 @@ void TestContext::ReturnControl(HControlInstruction* instr, int ast_id) { instr->SetSuccessorAt(0, empty_true); instr->SetSuccessorAt(1, empty_false); owner()->current_block()->Finish(instr); - empty_true->Goto(if_true(), owner()->function_state()->drop_extra()); - empty_false->Goto(if_false(), owner()->function_state()->drop_extra()); + empty_true->Goto(if_true()); + empty_false->Goto(if_false()); owner()->set_current_block(NULL); } @@ -2199,8 +2190,8 @@ void TestContext::BuildBranch(HValue* value) { HBranch* test = new(zone()) HBranch(value, empty_true, empty_false, expected); builder->current_block()->Finish(test); - empty_true->Goto(if_true(), owner()->function_state()->drop_extra()); - empty_false->Goto(if_false(), owner()->function_state()->drop_extra()); + empty_true->Goto(if_true()); + empty_false->Goto(if_false()); builder->set_current_block(NULL); } @@ -2661,14 +2652,12 @@ void HGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) { test->if_false()); } else if (context->IsEffect()) { CHECK_ALIVE(VisitForEffect(stmt->expression())); - current_block()->Goto(function_return(), function_state()->drop_extra()); + current_block()->Goto(function_return()); } else { ASSERT(context->IsValue()); CHECK_ALIVE(VisitForValue(stmt->expression())); HValue* return_value = environment()->Pop(); - current_block()->AddLeaveInlined(return_value, - function_return(), - function_state()->drop_extra()); + current_block()->AddLeaveInlined(return_value, function_return()); } set_current_block(NULL); } @@ -3167,7 +3156,7 @@ void HGraphBuilder::VisitVariableProxy(VariableProxy* expr) { return ast_context()->ReturnInstruction(instr, expr->id()); } - LookupResult lookup(isolate()); + LookupResult lookup; GlobalPropertyAccess type = LookupGlobalProperty(variable, &lookup, false); @@ -3287,7 +3276,7 @@ void HGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) { literal, name, value, - function_strict_mode_flag()); + function_strict_mode()); AddInstruction(store); AddSimulate(key->id()); } else { @@ -3348,8 +3337,11 @@ void HGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) { HValue* value = Pop(); if (!Smi::IsValid(i)) return Bailout("Non-smi key in array literal"); - elements = new(zone()) HLoadElements(literal); - AddInstruction(elements); + // Load the elements array before the first store. + if (elements == NULL) { + elements = new(zone()) HLoadElements(literal); + AddInstruction(elements); + } HValue* key = AddInstruction( new(zone()) HConstant(Handle<Object>(Smi::FromInt(i)), @@ -3373,10 +3365,10 @@ void HGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) { set_current_block(check_smi_only_elements); HCompareConstantEqAndBranch* smi_elements_check = new(zone()) HCompareConstantEqAndBranch(elements_kind, - FAST_ELEMENTS, + FAST_SMI_ONLY_ELEMENTS, Token::EQ_STRICT); - smi_elements_check->SetSuccessorAt(0, store_fast_edgesplit2); - smi_elements_check->SetSuccessorAt(1, store_generic); + smi_elements_check->SetSuccessorAt(0, store_generic); + smi_elements_check->SetSuccessorAt(1, store_fast_edgesplit2); current_block()->Finish(smi_elements_check); store_fast_edgesplit2->Finish(new(zone()) HGoto(store_fast)); @@ -3465,7 +3457,7 @@ HInstruction* HGraphBuilder::BuildStoreNamedGeneric(HValue* object, object, name, value, - function_strict_mode_flag()); + function_strict_mode()); } @@ -3479,7 +3471,7 @@ HInstruction* HGraphBuilder::BuildStoreNamed(HValue* object, Handle<String> name = Handle<String>::cast(key->handle()); ASSERT(!name.is_null()); - LookupResult lookup(isolate()); + LookupResult lookup; SmallMapList* types = expr->GetReceiverTypes(); bool is_monomorphic = expr->IsMonomorphic() && ComputeStoredField(types->first(), name, &lookup); @@ -3503,7 +3495,7 @@ void HGraphBuilder::HandlePolymorphicStoreNamedField(Assignment* expr, HBasicBlock* join = NULL; for (int i = 0; i < types->length() && count < kMaxStorePolymorphism; ++i) { Handle<Map> map = types->at(i); - LookupResult lookup(isolate()); + LookupResult lookup; if (ComputeStoredField(map, name, &lookup)) { if (count == 0) { AddInstruction(new(zone()) HCheckNonSmi(object)); // Only needed once. @@ -3586,7 +3578,7 @@ void HGraphBuilder::HandlePropertyAssignment(Assignment* expr) { ASSERT(!name.is_null()); SmallMapList* types = expr->GetReceiverTypes(); - LookupResult lookup(isolate()); + LookupResult lookup; if (expr->IsMonomorphic()) { instr = BuildStoreNamed(object, value, expr); @@ -3631,7 +3623,7 @@ void HGraphBuilder::HandleGlobalVariableAssignment(Variable* var, HValue* value, int position, int ast_id) { - LookupResult lookup(isolate()); + LookupResult lookup; GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup, true); if (type == kUseCell) { Handle<GlobalObject> global(info()->global_object()); @@ -3650,7 +3642,7 @@ void HGraphBuilder::HandleGlobalVariableAssignment(Variable* var, global_object, var->name(), value, - function_strict_mode_flag()); + function_strict_mode()); instr->set_position(position); AddInstruction(instr); ASSERT(instr->HasSideEffects()); @@ -3946,7 +3938,7 @@ HInstruction* HGraphBuilder::BuildLoadNamed(HValue* obj, Property* expr, Handle<Map> map, Handle<String> name) { - LookupResult lookup(isolate()); + LookupResult lookup; map->LookupInDescriptors(NULL, *name, &lookup); if (lookup.IsProperty() && lookup.type() == FIELD) { return BuildLoadNamedField(obj, @@ -4045,8 +4037,11 @@ HInstruction* HGraphBuilder::BuildFastElementAccess(HValue* elements, HInstruction* HGraphBuilder::BuildMonomorphicElementAccess(HValue* object, HValue* key, HValue* val, - Handle<Map> map, + Expression* expr, bool is_store) { + ASSERT(expr->IsMonomorphic()); + Handle<Map> map = expr->GetMonomorphicReceiverType(); + AddInstruction(new(zone()) HCheckNonSmi(object)); HInstruction* mapcheck = AddInstruction(new(zone()) HCheckMap(object, map)); bool fast_smi_only_elements = map->has_fast_smi_only_elements(); bool fast_elements = map->has_fast_elements(); @@ -4096,6 +4091,7 @@ HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object, bool* has_side_effects) { *has_side_effects = false; AddInstruction(new(zone()) HCheckNonSmi(object)); + AddInstruction(HCheckInstanceType::NewIsSpecObject(object)); SmallMapList* maps = prop->GetReceiverTypes(); bool todo_external_array = false; @@ -4105,55 +4101,15 @@ HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object, type_todo[i] = false; } - // Elements_kind transition support. - MapHandleList transition_target(maps->length()); - // Collect possible transition targets. - MapHandleList possible_transitioned_maps(maps->length()); for (int i = 0; i < maps->length(); ++i) { - Handle<Map> map = maps->at(i); - ElementsKind elements_kind = map->elements_kind(); - if (elements_kind == FAST_DOUBLE_ELEMENTS || - elements_kind == FAST_ELEMENTS) { - possible_transitioned_maps.Add(map); + ASSERT(maps->at(i)->IsMap()); + type_todo[maps->at(i)->elements_kind()] = true; + if (maps->at(i)->elements_kind() + >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND) { + todo_external_array = true; } } - // Get transition target for each map (NULL == no transition). - for (int i = 0; i < maps->length(); ++i) { - Handle<Map> map = maps->at(i); - Handle<Map> transitioned_map = - map->FindTransitionedMap(&possible_transitioned_maps); - transition_target.Add(transitioned_map); - } - int num_untransitionable_maps = 0; - Handle<Map> untransitionable_map; - for (int i = 0; i < maps->length(); ++i) { - Handle<Map> map = maps->at(i); - ASSERT(map->IsMap()); - if (!transition_target.at(i).is_null()) { - object = AddInstruction(new(zone()) HTransitionElementsKind( - object, map, transition_target.at(i))); - } else { - type_todo[map->elements_kind()] = true; - if (map->elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND) { - todo_external_array = true; - } - num_untransitionable_maps++; - untransitionable_map = map; - } - } - - // If only one map is left after transitioning, handle this case - // monomorphically. - if (num_untransitionable_maps == 1) { - HInstruction* instr = AddInstruction(BuildMonomorphicElementAccess( - object, key, val, untransitionable_map, is_store)); - *has_side_effects |= instr->HasSideEffects(); - instr->set_position(position); - return is_store ? NULL : instr; - } - - AddInstruction(HCheckInstanceType::NewIsSpecObject(object)); HBasicBlock* join = graph()->CreateBasicBlock(); HInstruction* elements_kind_instr = @@ -4285,9 +4241,7 @@ HValue* HGraphBuilder::HandleKeyedElementAccess(HValue* obj, ASSERT(!expr->IsPropertyName()); HInstruction* instr = NULL; if (expr->IsMonomorphic()) { - Handle<Map> map = expr->GetMonomorphicReceiverType(); - AddInstruction(new(zone()) HCheckNonSmi(obj)); - instr = BuildMonomorphicElementAccess(obj, key, val, map, is_store); + instr = BuildMonomorphicElementAccess(obj, key, val, expr, is_store); } else if (expr->GetReceiverTypes() != NULL && !expr->GetReceiverTypes()->is_empty()) { return HandlePolymorphicElementAccess( @@ -4315,7 +4269,7 @@ HInstruction* HGraphBuilder::BuildStoreKeyedGeneric(HValue* object, object, key, value, - function_strict_mode_flag()); + function_strict_mode()); } bool HGraphBuilder::TryArgumentsAccess(Property* expr) { @@ -4557,7 +4511,7 @@ void HGraphBuilder::TraceInline(Handle<JSFunction> target, } -bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) { +bool HGraphBuilder::TryInline(Call* expr) { if (!FLAG_use_inlining) return false; // The function call we are inlining is a method call if the call @@ -4585,9 +4539,9 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) { return false; } + CompilationInfo* outer_info = info(); #if !defined(V8_TARGET_ARCH_IA32) // Target must be able to use caller's context. - CompilationInfo* outer_info = info(); if (target->context() != outer_info->closure()->context() || outer_info->scope()->contains_with() || outer_info->scope()->num_heap_slots() > 0) { @@ -4601,7 +4555,9 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) { HEnvironment* env = environment(); int current_level = 1; while (env->outer() != NULL) { - if (current_level == Compiler::kMaxInliningLevels) { + if (current_level == (FLAG_limit_inlining + ? Compiler::kMaxInliningLevels + : 2 * Compiler::kMaxInliningLevels)) { TraceInline(target, caller, "inline depth limit reached"); return false; } @@ -4610,13 +4566,9 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) { } // Don't inline recursive functions. - for (FunctionState* state = function_state(); - state != NULL; - state = state->outer()) { - if (state->compilation_info()->closure()->shared() == *target_shared) { - TraceInline(target, caller, "target is recursive"); - return false; - } + if (*target_shared == outer_info->closure()->shared()) { + TraceInline(target, caller, "target is recursive"); + return false; } // We don't want to add more than a certain number of nodes from inlining. @@ -4713,10 +4665,7 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) { Handle<Code>(target_shared->code()), Handle<Context>(target->context()->global_context()), isolate()); - // The function state is new-allocated because we need to delete it - // in two different places. - FunctionState* target_state = - new FunctionState(this, &target_info, &target_oracle, drop_extra); + FunctionState target_state(this, &target_info, &target_oracle); HConstant* undefined = graph()->GetConstantUndefined(); HEnvironment* inner_env = @@ -4750,7 +4699,6 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) { TraceInline(target, caller, "inline graph construction failed"); target_shared->DisableOptimization(*target); inline_bailout_ = true; - delete target_state; return true; } @@ -4766,11 +4714,9 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) { ASSERT(function_return() != NULL); ASSERT(call_context()->IsEffect() || call_context()->IsValue()); if (call_context()->IsEffect()) { - current_block()->Goto(function_return(), drop_extra); + current_block()->Goto(function_return()); } else { - current_block()->AddLeaveInlined(undefined, - function_return(), - drop_extra); + current_block()->AddLeaveInlined(undefined, function_return()); } } else { // The graph builder assumes control can reach both branches of a @@ -4778,14 +4724,13 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) { // simply jumping to the false target. // // TODO(3168478): refactor to avoid this. - ASSERT(call_context()->IsTest()); HBasicBlock* empty_true = graph()->CreateBasicBlock(); HBasicBlock* empty_false = graph()->CreateBasicBlock(); HBranch* test = new(zone()) HBranch(undefined, empty_true, empty_false); current_block()->Finish(test); - empty_true->Goto(inlined_test_context()->if_true(), drop_extra); - empty_false->Goto(inlined_test_context()->if_false(), drop_extra); + empty_true->Goto(inlined_test_context()->if_true()); + empty_false->Goto(inlined_test_context()->if_false()); } } @@ -4797,21 +4742,19 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) { // Pop the return test context from the expression context stack. ASSERT(ast_context() == inlined_test_context()); ClearInlinedTestContext(); - delete target_state; // Forward to the real test context. if (if_true->HasPredecessor()) { if_true->SetJoinId(expr->id()); HBasicBlock* true_target = TestContext::cast(ast_context())->if_true(); - if_true->Goto(true_target, function_state()->drop_extra()); + if_true->Goto(true_target); } if (if_false->HasPredecessor()) { if_false->SetJoinId(expr->id()); HBasicBlock* false_target = TestContext::cast(ast_context())->if_false(); - if_false->Goto(false_target, function_state()->drop_extra()); + if_false->Goto(false_target); } set_current_block(NULL); - return true; } else if (function_return()->HasPredecessor()) { function_return()->SetJoinId(expr->id()); @@ -4819,7 +4762,7 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) { } else { set_current_block(NULL); } - delete target_state; + return true; } @@ -5071,7 +5014,7 @@ void HGraphBuilder::VisitCall(Call* expr) { // If there is a global property cell for the name at compile time and // access check is not enabled we assume that the function will not change // and generate optimized code for calling the function. - LookupResult lookup(isolate()); + LookupResult lookup; GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup, false); if (type == kUseCell && !info()->global_object()->IsAccessCheckNeeded()) { @@ -5126,17 +5069,32 @@ void HGraphBuilder::VisitCall(Call* expr) { PushAndAdd(receiver); CHECK_ALIVE(VisitExpressions(expr->arguments())); AddInstruction(new(zone()) HCheckFunction(function, expr->target())); - if (TryInline(expr, true)) { // Drop function from environment. + if (TryInline(expr)) { + // The function is lingering in the deoptimization environment. + // Handle it by case analysis on the AST context. + if (ast_context()->IsEffect()) { + Drop(1); + } else if (ast_context()->IsValue()) { + HValue* result = Pop(); + Drop(1); + Push(result); + } else if (ast_context()->IsTest()) { + TestContext* context = TestContext::cast(ast_context()); + if (context->if_true()->HasPredecessor()) { + context->if_true()->last_environment()->Drop(1); + } + if (context->if_false()->HasPredecessor()) { + context->if_true()->last_environment()->Drop(1); + } + } else { + UNREACHABLE(); + } return; } else { call = PreProcessCall(new(zone()) HInvokeFunction(context, function, argument_count)); - call->set_position(expr->position()); - AddInstruction(call); - AddSimulate(expr->id()); Drop(1); // The function. - return ast_context()->ReturnValue(call); } } else { @@ -5346,6 +5304,7 @@ void HGraphBuilder::VisitBitNot(UnaryOperation* expr) { void HGraphBuilder::VisitNot(UnaryOperation* expr) { + // TODO(svenpanne) Perhaps a switch/virtual function is nicer here. if (ast_context()->IsTest()) { TestContext* context = TestContext::cast(ast_context()); VisitForControl(expr->expression(), @@ -5832,65 +5791,35 @@ Representation HGraphBuilder::ToRepresentation(TypeInfo info) { void HGraphBuilder::HandleLiteralCompareTypeof(CompareOperation* expr, - HTypeof* typeof_expr, + Expression* sub_expr, Handle<String> check) { - // Note: The HTypeof itself is removed during canonicalization, if possible. - HValue* value = typeof_expr->value(); + CHECK_ALIVE(VisitForTypeOf(sub_expr)); + HValue* value = Pop(); HTypeofIsAndBranch* instr = new(zone()) HTypeofIsAndBranch(value, check); instr->set_position(expr->position()); return ast_context()->ReturnControl(instr, expr->id()); } -static bool MatchLiteralCompareNil(HValue* left, - Token::Value op, - HValue* right, - Handle<Object> nil, - HValue** expr) { - if (left->IsConstant() && - HConstant::cast(left)->handle().is_identical_to(nil) && - Token::IsEqualityOp(op)) { - *expr = right; +bool HGraphBuilder::TryLiteralCompare(CompareOperation* expr) { + Expression *sub_expr; + Handle<String> check; + if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) { + HandleLiteralCompareTypeof(expr, sub_expr, check); return true; } - return false; -} - -static bool MatchLiteralCompareTypeof(HValue* left, - Token::Value op, - HValue* right, - HTypeof** typeof_expr, - Handle<String>* check) { - if (left->IsTypeof() && - Token::IsEqualityOp(op) && - right->IsConstant() && - HConstant::cast(right)->HasStringValue()) { - *typeof_expr = HTypeof::cast(left); - *check = Handle<String>::cast(HConstant::cast(right)->handle()); + if (expr->IsLiteralCompareUndefined(&sub_expr)) { + HandleLiteralCompareNil(expr, sub_expr, kUndefinedValue); return true; } - return false; -} - - -static bool IsLiteralCompareTypeof(HValue* left, - Token::Value op, - HValue* right, - HTypeof** typeof_expr, - Handle<String>* check) { - return MatchLiteralCompareTypeof(left, op, right, typeof_expr, check) || - MatchLiteralCompareTypeof(right, op, left, typeof_expr, check); -} + if (expr->IsLiteralCompareNull(&sub_expr)) { + HandleLiteralCompareNil(expr, sub_expr, kNullValue); + return true; + } -static bool IsLiteralCompareNil(HValue* left, - Token::Value op, - HValue* right, - Handle<Object> nil, - HValue** expr) { - return MatchLiteralCompareNil(left, op, right, nil, expr) || - MatchLiteralCompareNil(right, op, left, nil, expr); + return false; } @@ -5911,9 +5840,11 @@ void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) { return ast_context()->ReturnControl(instr, expr->id()); } + // Check for special cases that compare against literals. + if (TryLiteralCompare(expr)) return; + TypeInfo type_info = oracle()->CompareType(expr); // Check if this expression was ever executed according to type feedback. - // Note that for the special typeof/null/undefined cases we get unknown here. if (type_info.IsUninitialized()) { AddInstruction(new(zone()) HSoftDeoptimize); current_block()->MarkAsDeoptimizing(); @@ -5928,20 +5859,6 @@ void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) { HValue* left = Pop(); Token::Value op = expr->op(); - HTypeof* typeof_expr = NULL; - Handle<String> check; - if (IsLiteralCompareTypeof(left, op, right, &typeof_expr, &check)) { - return HandleLiteralCompareTypeof(expr, typeof_expr, check); - } - HValue* sub_expr = NULL; - Factory* f = graph()->isolate()->factory(); - if (IsLiteralCompareNil(left, op, right, f->undefined_value(), &sub_expr)) { - return HandleLiteralCompareNil(expr, sub_expr, kUndefinedValue); - } - if (IsLiteralCompareNil(left, op, right, f->null_value(), &sub_expr)) { - return HandleLiteralCompareNil(expr, sub_expr, kNullValue); - } - if (op == Token::INSTANCEOF) { // Check to see if the rhs of the instanceof is a global function not // residing in new space. If it is we assume that the function will stay the @@ -5954,7 +5871,7 @@ void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) { !info()->global_object()->IsAccessCheckNeeded()) { Handle<String> name = proxy->name(); Handle<GlobalObject> global(info()->global_object()); - LookupResult lookup(isolate()); + LookupResult lookup; global->Lookup(*name, &lookup); if (lookup.IsProperty() && lookup.type() == NORMAL && @@ -6030,11 +5947,13 @@ void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) { void HGraphBuilder::HandleLiteralCompareNil(CompareOperation* expr, - HValue* value, + Expression* sub_expr, NilValue nil) { ASSERT(!HasStackOverflow()); ASSERT(current_block() != NULL); ASSERT(current_block()->HasPredecessor()); + CHECK_ALIVE(VisitForValue(sub_expr)); + HValue* value = Pop(); EqualityKind kind = expr->op() == Token::EQ_STRICT ? kStrictEquality : kNonStrictEquality; HIsNilAndBranch* instr = new(zone()) HIsNilAndBranch(value, kind, nil); @@ -6047,8 +5966,7 @@ void HGraphBuilder::VisitThisFunction(ThisFunction* expr) { ASSERT(!HasStackOverflow()); ASSERT(current_block() != NULL); ASSERT(current_block()->HasPredecessor()); - HThisFunction* self = new(zone()) HThisFunction( - function_state()->compilation_info()->closure()); + HThisFunction* self = new(zone()) HThisFunction; return ast_context()->ReturnInstruction(self, expr->id()); } @@ -6061,9 +5979,7 @@ void HGraphBuilder::VisitDeclaration(Declaration* decl) { void HGraphBuilder::HandleDeclaration(VariableProxy* proxy, VariableMode mode, FunctionLiteral* function) { - if (mode == LET || mode == CONST_HARMONY) { - return Bailout("unsupported harmony declaration"); - } + if (mode == LET) return Bailout("unsupported let declaration"); Variable* var = proxy->var(); switch (var->location()) { case Variable::UNALLOCATED: diff --git a/deps/v8/src/hydrogen.h b/deps/v8/src/hydrogen.h index 2d08dc8e7..b66042c2c 100644 --- a/deps/v8/src/hydrogen.h +++ b/deps/v8/src/hydrogen.h @@ -121,7 +121,7 @@ class HBasicBlock: public ZoneObject { void Finish(HControlInstruction* last); void FinishExit(HControlInstruction* instruction); - void Goto(HBasicBlock* block, bool drop_extra = false); + void Goto(HBasicBlock* block); int PredecessorIndexOf(HBasicBlock* predecessor) const; void AddSimulate(int ast_id) { AddInstruction(CreateSimulate(ast_id)); } @@ -133,9 +133,7 @@ class HBasicBlock: public ZoneObject { // Add the inlined function exit sequence, adding an HLeaveInlined // instruction and updating the bailout environment. - void AddLeaveInlined(HValue* return_value, - HBasicBlock* target, - bool drop_extra = false); + void AddLeaveInlined(HValue* return_value, HBasicBlock* target); // If a target block is tagged as an inline function return, all // predecessors should contain the inlined exit sequence: @@ -605,18 +603,16 @@ class TestContext: public AstContext { }; -class FunctionState { +class FunctionState BASE_EMBEDDED { public: FunctionState(HGraphBuilder* owner, CompilationInfo* info, - TypeFeedbackOracle* oracle, - bool drop_extra); + TypeFeedbackOracle* oracle); ~FunctionState(); CompilationInfo* compilation_info() { return compilation_info_; } TypeFeedbackOracle* oracle() { return oracle_; } AstContext* call_context() { return call_context_; } - bool drop_extra() { return drop_extra_; } HBasicBlock* function_return() { return function_return_; } TestContext* test_context() { return test_context_; } void ClearInlinedTestContext() { @@ -636,10 +632,6 @@ class FunctionState { // inlined. NULL when not inlining. AstContext* call_context_; - // Indicate if we have to drop an extra value from the environment on - // return from inlined functions. - bool drop_extra_; - // When inlining in an effect of value context, this is the return block. // It is NULL otherwise. When inlining in a test context, there are a // pair of return blocks in the context. When not inlining, there is no @@ -736,8 +728,6 @@ class HGraphBuilder: public AstVisitor { TypeFeedbackOracle* oracle() const { return function_state()->oracle(); } - FunctionState* function_state() const { return function_state_; } - private: // Type of a member function that generates inline code for a native function. typedef void (HGraphBuilder::*InlineFunctionGenerator)(CallRuntime* call); @@ -756,6 +746,7 @@ class HGraphBuilder: public AstVisitor { static const int kMaxSourceSize = 600; // Simple accessors. + FunctionState* function_state() const { return function_state_; } void set_function_state(FunctionState* state) { function_state_ = state; } AstContext* ast_context() const { return ast_context_; } @@ -778,8 +769,8 @@ class HGraphBuilder: public AstVisitor { void ClearInlinedTestContext() { function_state()->ClearInlinedTestContext(); } - StrictModeFlag function_strict_mode_flag() { - return function_state()->compilation_info()->strict_mode_flag(); + bool function_strict_mode() { + return function_state()->compilation_info()->is_strict_mode(); } // Generators for inline runtime functions. @@ -892,7 +883,7 @@ class HGraphBuilder: public AstVisitor { // Try to optimize fun.apply(receiver, arguments) pattern. bool TryCallApply(Call* expr); - bool TryInline(Call* expr, bool drop_extra = false); + bool TryInline(Call* expr); bool TryInlineBuiltinFunction(Call* expr, HValue* receiver, Handle<Map> receiver_map, @@ -921,11 +912,12 @@ class HGraphBuilder: public AstVisitor { HValue* receiver, SmallMapList* types, Handle<String> name); + bool TryLiteralCompare(CompareOperation* expr); void HandleLiteralCompareTypeof(CompareOperation* expr, - HTypeof* typeof_expr, + Expression* sub_expr, Handle<String> check); void HandleLiteralCompareNil(CompareOperation* expr, - HValue* value, + Expression* sub_expr, NilValue nil); HStringCharCodeAt* BuildStringCharCodeAt(HValue* context, @@ -959,7 +951,7 @@ class HGraphBuilder: public AstVisitor { HInstruction* BuildMonomorphicElementAccess(HValue* object, HValue* key, HValue* val, - Handle<Map> map, + Expression* expr, bool is_store); HValue* HandlePolymorphicElementAccess(HValue* object, HValue* key, diff --git a/deps/v8/src/ia32/assembler-ia32-inl.h b/deps/v8/src/ia32/assembler-ia32-inl.h index 2e9fcb6c5..446aa3e2d 100644 --- a/deps/v8/src/ia32/assembler-ia32-inl.h +++ b/deps/v8/src/ia32/assembler-ia32-inl.h @@ -88,10 +88,10 @@ int RelocInfo::target_address_size() { } -void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) { +void RelocInfo::set_target_address(Address target) { Assembler::set_target_address_at(pc_, target); ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY); - if (mode == UPDATE_WRITE_BARRIER && host() != NULL && IsCodeTarget(rmode_)) { + if (host() != NULL && IsCodeTarget(rmode_)) { Object* target_code = Code::GetCodeFromTargetAddress(target); host()->GetHeap()->incremental_marking()->RecordWriteIntoCode( host(), this, HeapObject::cast(target_code)); @@ -117,13 +117,11 @@ Object** RelocInfo::target_object_address() { } -void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) { +void RelocInfo::set_target_object(Object* target) { ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); Memory::Object_at(pc_) = target; CPU::FlushICache(pc_, sizeof(Address)); - if (mode == UPDATE_WRITE_BARRIER && - host() != NULL && - target->IsHeapObject()) { + if (host() != NULL && target->IsHeapObject()) { host()->GetHeap()->incremental_marking()->RecordWrite( host(), &Memory::Object_at(pc_), HeapObject::cast(target)); } @@ -153,13 +151,12 @@ JSGlobalPropertyCell* RelocInfo::target_cell() { } -void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell, - WriteBarrierMode mode) { +void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell) { ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL); Address address = cell->address() + JSGlobalPropertyCell::kValueOffset; Memory::Address_at(pc_) = address; CPU::FlushICache(pc_, sizeof(Address)); - if (mode == UPDATE_WRITE_BARRIER && host() != NULL) { + if (host() != NULL) { // TODO(1550) We are passing NULL as a slot because cell can never be on // evacuation candidate. host()->GetHeap()->incremental_marking()->RecordWrite( diff --git a/deps/v8/src/ia32/builtins-ia32.cc b/deps/v8/src/ia32/builtins-ia32.cc index 70e342d3d..53ade3a6c 100644 --- a/deps/v8/src/ia32/builtins-ia32.cc +++ b/deps/v8/src/ia32/builtins-ia32.cc @@ -915,6 +915,10 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) { } +// Number of empty elements to allocate for an empty array. +static const int kPreallocatedArrayElements = 4; + + // Allocate an empty JSArray. The allocated array is put into the result // register. If the parameter initial_capacity is larger than zero an elements // backing store is allocated with this size and filled with the hole values. @@ -925,9 +929,10 @@ static void AllocateEmptyJSArray(MacroAssembler* masm, Register scratch1, Register scratch2, Register scratch3, + int initial_capacity, Label* gc_required) { - const int initial_capacity = JSArray::kPreallocatedArrayElements; - STATIC_ASSERT(initial_capacity >= 0); + ASSERT(initial_capacity >= 0); + // Load the initial map from the array function. __ mov(scratch1, FieldOperand(array_function, JSFunction::kPrototypeOrInitialMapOffset)); @@ -985,6 +990,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm, // Fill the FixedArray with the hole value. Inline the code if short. // Reconsider loop unfolding if kPreallocatedArrayElements gets changed. static const int kLoopUnfoldLimit = 4; + STATIC_ASSERT(kPreallocatedArrayElements <= kLoopUnfoldLimit); if (initial_capacity <= kLoopUnfoldLimit) { // Use a scratch register here to have only one reloc info when unfolding // the loop. @@ -1147,6 +1153,7 @@ static void ArrayNativeCode(MacroAssembler* masm, ebx, ecx, edi, + kPreallocatedArrayElements, &prepare_generic_code_call); __ IncrementCounter(masm->isolate()->counters()->array_function_native(), 1); __ pop(ebx); @@ -1175,7 +1182,7 @@ static void ArrayNativeCode(MacroAssembler* masm, __ mov(eax, Operand(esp, i * kPointerSize)); __ mov(Operand(esp, (i + 1) * kPointerSize), eax); } - __ Drop(2); // Drop two stack slots. + __ add(esp, Immediate(2 * kPointerSize)); // Drop two stack slots. __ push(Immediate(0)); // Treat this as a call with argc of zero. __ jmp(&empty_array); diff --git a/deps/v8/src/ia32/code-stubs-ia32.cc b/deps/v8/src/ia32/code-stubs-ia32.cc index d7d1d9c3c..1e886e202 100644 --- a/deps/v8/src/ia32/code-stubs-ia32.cc +++ b/deps/v8/src/ia32/code-stubs-ia32.cc @@ -34,7 +34,6 @@ #include "isolate.h" #include "jsregexp.h" #include "regexp-macro-assembler.h" -#include "stub-cache.h" namespace v8 { namespace internal { @@ -239,12 +238,7 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { // [esp + (3 * kPointerSize)]: literals array. // All sizes here are multiples of kPointerSize. - int elements_size = 0; - if (length_ > 0) { - elements_size = mode_ == CLONE_DOUBLE_ELEMENTS - ? FixedDoubleArray::SizeFor(length_) - : FixedArray::SizeFor(length_); - } + int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0; int size = JSArray::kSize + elements_size; // Load boilerplate object into ecx and check if we need to create a @@ -267,9 +261,6 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { if (mode_ == CLONE_ELEMENTS) { message = "Expected (writable) fixed array"; expected_map = factory->fixed_array_map(); - } else if (mode_ == CLONE_DOUBLE_ELEMENTS) { - message = "Expected (writable) fixed double array"; - expected_map = factory->fixed_double_array_map(); } else { ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS); message = "Expected copy-on-write fixed array"; @@ -302,24 +293,9 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { __ mov(FieldOperand(eax, JSArray::kElementsOffset), edx); // Copy the elements array. - if (mode_ == CLONE_ELEMENTS) { - for (int i = 0; i < elements_size; i += kPointerSize) { - __ mov(ebx, FieldOperand(ecx, i)); - __ mov(FieldOperand(edx, i), ebx); - } - } else { - ASSERT(mode_ == CLONE_DOUBLE_ELEMENTS); - int i; - for (i = 0; i < FixedDoubleArray::kHeaderSize; i += kPointerSize) { - __ mov(ebx, FieldOperand(ecx, i)); - __ mov(FieldOperand(edx, i), ebx); - } - while (i < elements_size) { - __ fld_d(FieldOperand(ecx, i)); - __ fstp_d(FieldOperand(edx, i)); - i += kDoubleSize; - } - ASSERT(i == elements_size); + for (int i = 0; i < elements_size; i += kPointerSize) { + __ mov(ebx, FieldOperand(ecx, i)); + __ mov(FieldOperand(edx, i), ebx); } } @@ -3882,11 +3858,11 @@ void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm, Register scratch = scratch2; // Load the number string cache. - ExternalReference roots_array_start = - ExternalReference::roots_array_start(masm->isolate()); + ExternalReference roots_address = + ExternalReference::roots_address(masm->isolate()); __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex)); __ mov(number_string_cache, - Operand::StaticArray(scratch, times_pointer_size, roots_array_start)); + Operand::StaticArray(scratch, times_pointer_size, roots_address)); // Make the hash mask from the length of the number string cache. It // contains two elements (number and string) for each cache entry. __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset)); @@ -4854,8 +4830,8 @@ void InstanceofStub::Generate(MacroAssembler* masm) { static const int8_t kCmpEdiImmediateByte2 = BitCast<int8_t, uint8_t>(0xff); static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8); - ExternalReference roots_array_start = - ExternalReference::roots_array_start(masm->isolate()); + ExternalReference roots_address = + ExternalReference::roots_address(masm->isolate()); ASSERT_EQ(object.code(), InstanceofStub::left().code()); ASSERT_EQ(function.code(), InstanceofStub::right().code()); @@ -4877,23 +4853,22 @@ void InstanceofStub::Generate(MacroAssembler* masm) { // Look up the function and the map in the instanceof cache. Label miss; __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex)); - __ cmp(function, Operand::StaticArray(scratch, - times_pointer_size, - roots_array_start)); + __ cmp(function, + Operand::StaticArray(scratch, times_pointer_size, roots_address)); __ j(not_equal, &miss, Label::kNear); __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex)); __ cmp(map, Operand::StaticArray( - scratch, times_pointer_size, roots_array_start)); + scratch, times_pointer_size, roots_address)); __ j(not_equal, &miss, Label::kNear); __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); __ mov(eax, Operand::StaticArray( - scratch, times_pointer_size, roots_array_start)); + scratch, times_pointer_size, roots_address)); __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); __ bind(&miss); } // Get the prototype of the function. - __ TryGetFunctionPrototype(function, prototype, scratch, &slow, true); + __ TryGetFunctionPrototype(function, prototype, scratch, &slow); // Check that the function prototype is a JS object. __ JumpIfSmi(prototype, &slow); @@ -4903,10 +4878,9 @@ void InstanceofStub::Generate(MacroAssembler* masm) { // map and function. The cached answer will be set when it is known below. if (!HasCallSiteInlineCheck()) { __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex)); - __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start), - map); + __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), map); __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex)); - __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start), + __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), function); } else { // The constants for the code patching are based on no push instructions @@ -4943,7 +4917,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) { __ Set(eax, Immediate(0)); __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); __ mov(Operand::StaticArray(scratch, - times_pointer_size, roots_array_start), eax); + times_pointer_size, roots_address), eax); } else { // Get return address and delta to inlined map check. __ mov(eax, factory->true_value()); @@ -4965,7 +4939,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) { __ Set(eax, Immediate(Smi::FromInt(1))); __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); __ mov(Operand::StaticArray( - scratch, times_pointer_size, roots_array_start), eax); + scratch, times_pointer_size, roots_address), eax); } else { // Get return address and delta to inlined map check. __ mov(eax, factory->false_value()); @@ -5754,11 +5728,11 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm, // Load the symbol table. Register symbol_table = c2; - ExternalReference roots_array_start = - ExternalReference::roots_array_start(masm->isolate()); + ExternalReference roots_address = + ExternalReference::roots_address(masm->isolate()); __ mov(scratch, Immediate(Heap::kSymbolTableRootIndex)); __ mov(symbol_table, - Operand::StaticArray(scratch, times_pointer_size, roots_array_start)); + Operand::StaticArray(scratch, times_pointer_size, roots_address)); // Calculate capacity mask from the symbol table capacity. Register mask = scratch2; @@ -6541,67 +6515,7 @@ void ICCompareStub::GenerateMiss(MacroAssembler* masm) { // must always call a backup property check that is complete. // This function is safe to call if the receiver has fast properties. // Name must be a symbol and receiver must be a heap object. -void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm, - Label* miss, - Label* done, - Register properties, - Handle<String> name, - Register r0) { - ASSERT(name->IsSymbol()); - - // If names of slots in range from 1 to kProbes - 1 for the hash value are - // not equal to the name and kProbes-th slot is not used (its name is the - // undefined value), it guarantees the hash table doesn't contain the - // property. It's true even if some slots represent deleted properties - // (their names are the null value). - for (int i = 0; i < kInlinedProbes; i++) { - // Compute the masked index: (hash + i + i * i) & mask. - Register index = r0; - // Capacity is smi 2^n. - __ mov(index, FieldOperand(properties, kCapacityOffset)); - __ dec(index); - __ and_(index, - Immediate(Smi::FromInt(name->Hash() + - StringDictionary::GetProbeOffset(i)))); - - // Scale the index by multiplying by the entry size. - ASSERT(StringDictionary::kEntrySize == 3); - __ lea(index, Operand(index, index, times_2, 0)); // index *= 3. - Register entity_name = r0; - // Having undefined at this place means the name is not contained. - ASSERT_EQ(kSmiTagSize, 1); - __ mov(entity_name, Operand(properties, index, times_half_pointer_size, - kElementsStartOffset - kHeapObjectTag)); - __ cmp(entity_name, masm->isolate()->factory()->undefined_value()); - __ j(equal, done); - - // Stop if found the property. - __ cmp(entity_name, Handle<String>(name)); - __ j(equal, miss); - - // Check if the entry name is not a symbol. - __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset)); - __ test_b(FieldOperand(entity_name, Map::kInstanceTypeOffset), - kIsSymbolMask); - __ j(zero, miss); - } - - StringDictionaryLookupStub stub(properties, - r0, - r0, - StringDictionaryLookupStub::NEGATIVE_LOOKUP); - __ push(Immediate(Handle<Object>(name))); - __ push(Immediate(name->Hash())); - __ CallStub(&stub); - __ test(r0, r0); - __ j(not_zero, miss); - __ jmp(done); -} - - -// TODO(kmillikin): Eliminate this function when the stub cache is fully -// handlified. -MaybeObject* StringDictionaryLookupStub::TryGenerateNegativeLookup( +MaybeObject* StringDictionaryLookupStub::GenerateNegativeLookup( MacroAssembler* masm, Label* miss, Label* done, @@ -6835,13 +6749,6 @@ struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = { { ebx, edx, ecx, EMIT_REMEMBERED_SET}, // KeyedStoreStubCompiler::GenerateStoreFastElement. { edi, edx, ecx, EMIT_REMEMBERED_SET}, - // ElementsTransitionGenerator::GenerateSmiOnlyToObject - // and ElementsTransitionGenerator::GenerateSmiOnlyToDouble - // and ElementsTransitionGenerator::GenerateDoubleToObject - { edx, ebx, edi, EMIT_REMEMBERED_SET}, - // ElementsTransitionGenerator::GenerateDoubleToObject - { eax, edx, esi, EMIT_REMEMBERED_SET}, - { edx, eax, edi, EMIT_REMEMBERED_SET}, // Null termination. { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET} }; @@ -7084,6 +6991,7 @@ void RecordWriteStub::CheckNeedsToInformIncrementalMarker( // Fall through when we need to inform the incremental marker. } + #undef __ } } // namespace v8::internal diff --git a/deps/v8/src/ia32/code-stubs-ia32.h b/deps/v8/src/ia32/code-stubs-ia32.h index 8775344a5..2a7d316f4 100644 --- a/deps/v8/src/ia32/code-stubs-ia32.h +++ b/deps/v8/src/ia32/code-stubs-ia32.h @@ -421,16 +421,7 @@ class StringDictionaryLookupStub: public CodeStub { void Generate(MacroAssembler* masm); - static void GenerateNegativeLookup(MacroAssembler* masm, - Label* miss, - Label* done, - Register properties, - Handle<String> name, - Register r0); - - // TODO(kmillikin): Eliminate this function when the stub cache is fully - // handlified. - MUST_USE_RESULT static MaybeObject* TryGenerateNegativeLookup( + MUST_USE_RESULT static MaybeObject* GenerateNegativeLookup( MacroAssembler* masm, Label* miss, Label* done, diff --git a/deps/v8/src/ia32/codegen-ia32.cc b/deps/v8/src/ia32/codegen-ia32.cc index 7dc302bf6..f901b6f88 100644 --- a/deps/v8/src/ia32/codegen-ia32.cc +++ b/deps/v8/src/ia32/codegen-ia32.cc @@ -30,7 +30,6 @@ #if defined(V8_TARGET_ARCH_IA32) #include "codegen.h" -#include "macro-assembler.h" namespace v8 { namespace internal { @@ -266,263 +265,6 @@ OS::MemCopyFunction CreateMemCopyFunction() { #undef __ -// ------------------------------------------------------------------------- -// Code generators - -#define __ ACCESS_MASM(masm) - -void ElementsTransitionGenerator::GenerateSmiOnlyToObject( - MacroAssembler* masm) { - // ----------- S t a t e ------------- - // -- eax : value - // -- ebx : target map - // -- ecx : key - // -- edx : receiver - // -- esp[0] : return address - // ----------------------------------- - // Set transitioned map. - __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx); - __ RecordWriteField(edx, - HeapObject::kMapOffset, - ebx, - edi, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); -} - - -void ElementsTransitionGenerator::GenerateSmiOnlyToDouble( - MacroAssembler* masm, Label* fail) { - // ----------- S t a t e ------------- - // -- eax : value - // -- ebx : target map - // -- ecx : key - // -- edx : receiver - // -- esp[0] : return address - // ----------------------------------- - Label loop, entry, convert_hole, gc_required; - __ push(eax); - __ push(ebx); - - __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset)); - __ mov(edi, FieldOperand(edi, FixedArray::kLengthOffset)); - - // Allocate new FixedDoubleArray. - // edx: receiver - // edi: length of source FixedArray (smi-tagged) - __ lea(esi, Operand(edi, times_4, FixedDoubleArray::kHeaderSize)); - __ AllocateInNewSpace(esi, eax, ebx, no_reg, &gc_required, TAG_OBJECT); - - // eax: destination FixedDoubleArray - // edi: number of elements - // edx: receiver - __ mov(FieldOperand(eax, HeapObject::kMapOffset), - Immediate(masm->isolate()->factory()->fixed_double_array_map())); - __ mov(FieldOperand(eax, FixedDoubleArray::kLengthOffset), edi); - __ mov(esi, FieldOperand(edx, JSObject::kElementsOffset)); - // Replace receiver's backing store with newly created FixedDoubleArray. - __ mov(FieldOperand(edx, JSObject::kElementsOffset), eax); - __ mov(ebx, eax); - __ RecordWriteField(edx, - JSObject::kElementsOffset, - ebx, - edi, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - - __ mov(edi, FieldOperand(esi, FixedArray::kLengthOffset)); - - // Prepare for conversion loop. - ExternalReference canonical_the_hole_nan_reference = - ExternalReference::address_of_the_hole_nan(); - XMMRegister the_hole_nan = xmm1; - if (CpuFeatures::IsSupported(SSE2)) { - CpuFeatures::Scope use_sse2(SSE2); - __ movdbl(the_hole_nan, - Operand::StaticVariable(canonical_the_hole_nan_reference)); - } - __ jmp(&entry); - - // Call into runtime if GC is required. - __ bind(&gc_required); - // Restore registers before jumping into runtime. - __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); - __ pop(ebx); - __ pop(eax); - __ jmp(fail); - - // Convert and copy elements - // esi: source FixedArray - // edi: number of elements to convert/copy - __ bind(&loop); - __ sub(edi, Immediate(Smi::FromInt(1))); - __ mov(ebx, FieldOperand(esi, edi, times_2, FixedArray::kHeaderSize)); - // ebx: current element from source - // edi: index of current element - __ JumpIfNotSmi(ebx, &convert_hole); - - // Normal smi, convert it to double and store. - __ SmiUntag(ebx); - if (CpuFeatures::IsSupported(SSE2)) { - CpuFeatures::Scope fscope(SSE2); - __ cvtsi2sd(xmm0, ebx); - __ movdbl(FieldOperand(eax, edi, times_4, FixedDoubleArray::kHeaderSize), - xmm0); - } else { - __ push(ebx); - __ fild_s(Operand(esp, 0)); - __ pop(ebx); - __ fstp_d(FieldOperand(eax, edi, times_4, FixedDoubleArray::kHeaderSize)); - } - __ jmp(&entry); - - // Found hole, store hole_nan_as_double instead. - __ bind(&convert_hole); - if (CpuFeatures::IsSupported(SSE2)) { - CpuFeatures::Scope use_sse2(SSE2); - __ movdbl(FieldOperand(eax, edi, times_4, FixedDoubleArray::kHeaderSize), - the_hole_nan); - } else { - __ fld_d(Operand::StaticVariable(canonical_the_hole_nan_reference)); - __ fstp_d(FieldOperand(eax, edi, times_4, FixedDoubleArray::kHeaderSize)); - } - - __ bind(&entry); - __ test(edi, edi); - __ j(not_zero, &loop); - - __ pop(ebx); - __ pop(eax); - // eax: value - // ebx: target map - // Set transitioned map. - __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx); - __ RecordWriteField(edx, - HeapObject::kMapOffset, - ebx, - edi, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - // Restore esi. - __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); -} - - -void ElementsTransitionGenerator::GenerateDoubleToObject( - MacroAssembler* masm, Label* fail) { - // ----------- S t a t e ------------- - // -- eax : value - // -- ebx : target map - // -- ecx : key - // -- edx : receiver - // -- esp[0] : return address - // ----------------------------------- - Label loop, entry, convert_hole, gc_required; - __ push(eax); - __ push(edx); - __ push(ebx); - - __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset)); - __ mov(ebx, FieldOperand(edi, FixedDoubleArray::kLengthOffset)); - - // Allocate new FixedArray. - // ebx: length of source FixedDoubleArray (smi-tagged) - __ lea(edi, Operand(ebx, times_2, FixedArray::kHeaderSize)); - __ AllocateInNewSpace(edi, eax, esi, no_reg, &gc_required, TAG_OBJECT); - - // eax: destination FixedArray - // ebx: number of elements - __ mov(FieldOperand(eax, HeapObject::kMapOffset), - Immediate(masm->isolate()->factory()->fixed_array_map())); - __ mov(FieldOperand(eax, FixedArray::kLengthOffset), ebx); - __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset)); - - __ jmp(&entry); - - // Call into runtime if GC is required. - __ bind(&gc_required); - __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); - __ pop(ebx); - __ pop(edx); - __ pop(eax); - __ jmp(fail); - - // Box doubles into heap numbers. - // edi: source FixedDoubleArray - // eax: destination FixedArray - __ bind(&loop); - __ sub(ebx, Immediate(Smi::FromInt(1))); - // ebx: index of current element (smi-tagged) - uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32); - __ cmp(FieldOperand(edi, ebx, times_4, offset), Immediate(kHoleNanUpper32)); - __ j(equal, &convert_hole); - - // Non-hole double, copy value into a heap number. - __ AllocateHeapNumber(edx, esi, no_reg, &gc_required); - // edx: new heap number - if (CpuFeatures::IsSupported(SSE2)) { - CpuFeatures::Scope fscope(SSE2); - __ movdbl(xmm0, - FieldOperand(edi, ebx, times_4, FixedDoubleArray::kHeaderSize)); - __ movdbl(FieldOperand(edx, HeapNumber::kValueOffset), xmm0); - } else { - __ mov(esi, FieldOperand(edi, ebx, times_4, FixedDoubleArray::kHeaderSize)); - __ mov(FieldOperand(edx, HeapNumber::kValueOffset), esi); - __ mov(esi, FieldOperand(edi, ebx, times_4, offset)); - __ mov(FieldOperand(edx, HeapNumber::kValueOffset + kPointerSize), esi); - } - __ mov(FieldOperand(eax, ebx, times_2, FixedArray::kHeaderSize), edx); - __ mov(esi, ebx); - __ RecordWriteArray(eax, - edx, - esi, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - __ jmp(&entry, Label::kNear); - - // Replace the-hole NaN with the-hole pointer. - __ bind(&convert_hole); - __ mov(FieldOperand(eax, ebx, times_2, FixedArray::kHeaderSize), - masm->isolate()->factory()->the_hole_value()); - - __ bind(&entry); - __ test(ebx, ebx); - __ j(not_zero, &loop); - - __ pop(ebx); - __ pop(edx); - // ebx: target map - // edx: receiver - // Set transitioned map. - __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx); - __ RecordWriteField(edx, - HeapObject::kMapOffset, - ebx, - edi, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - // Replace receiver's backing store with newly created and filled FixedArray. - __ mov(FieldOperand(edx, JSObject::kElementsOffset), eax); - __ RecordWriteField(edx, - JSObject::kElementsOffset, - eax, - edi, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - - // Restore registers. - __ pop(eax); - __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); -} - -#undef __ - } } // namespace v8::internal #endif // V8_TARGET_ARCH_IA32 diff --git a/deps/v8/src/ia32/deoptimizer-ia32.cc b/deps/v8/src/ia32/deoptimizer-ia32.cc index db6c16b2d..02cc4ebd3 100644 --- a/deps/v8/src/ia32/deoptimizer-ia32.cc +++ b/deps/v8/src/ia32/deoptimizer-ia32.cc @@ -258,13 +258,16 @@ void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code, Assembler::set_target_address_at(call_target_address, replacement_code->entry()); - unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( - unoptimized_code, call_target_address, replacement_code); + RelocInfo rinfo(call_target_address, + RelocInfo::CODE_TARGET, + 0, + unoptimized_code); + unoptimized_code->GetHeap()->incremental_marking()->RecordWriteIntoCode( + unoptimized_code, &rinfo, replacement_code); } -void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code, - Address pc_after, +void Deoptimizer::RevertStackCheckCodeAt(Address pc_after, Code* check_code, Code* replacement_code) { Address call_target_address = pc_after - kIntSize; @@ -280,8 +283,8 @@ void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code, Assembler::set_target_address_at(call_target_address, check_code->entry()); - check_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( - unoptimized_code, call_target_address, check_code); + check_code->GetHeap()->incremental_marking()-> + RecordCodeTargetPatch(call_target_address, check_code); } diff --git a/deps/v8/src/ia32/disasm-ia32.cc b/deps/v8/src/ia32/disasm-ia32.cc index da2239011..04edc5f42 100644 --- a/deps/v8/src/ia32/disasm-ia32.cc +++ b/deps/v8/src/ia32/disasm-ia32.cc @@ -179,10 +179,6 @@ class InstructionTable { public: InstructionTable(); const InstructionDesc& Get(byte x) const { return instructions_[x]; } - static InstructionTable* get_instance() { - static InstructionTable table; - return &table; - } private: InstructionDesc instructions_[256]; @@ -263,13 +259,15 @@ void InstructionTable::AddJumpConditionalShort() { } +static InstructionTable instruction_table; + + // The IA32 disassembler implementation. class DisassemblerIA32 { public: DisassemblerIA32(const NameConverter& converter, bool abort_on_unimplemented = true) : converter_(converter), - instruction_table_(InstructionTable::get_instance()), tmp_buffer_pos_(0), abort_on_unimplemented_(abort_on_unimplemented) { tmp_buffer_[0] = '\0'; @@ -283,11 +281,11 @@ class DisassemblerIA32 { private: const NameConverter& converter_; - InstructionTable* instruction_table_; v8::internal::EmbeddedVector<char, 128> tmp_buffer_; unsigned int tmp_buffer_pos_; bool abort_on_unimplemented_; + enum { eax = 0, ecx = 1, @@ -886,7 +884,7 @@ int DisassemblerIA32::InstructionDecode(v8::internal::Vector<char> out_buffer, } bool processed = true; // Will be set to false if the current instruction // is not in 'instructions' table. - const InstructionDesc& idesc = instruction_table_->Get(*data); + const InstructionDesc& idesc = instruction_table.Get(*data); switch (idesc.type) { case ZERO_OPERANDS_INSTR: AppendToBuffer(idesc.mnem); diff --git a/deps/v8/src/ia32/full-codegen-ia32.cc b/deps/v8/src/ia32/full-codegen-ia32.cc index de5dc06eb..33d5cabad 100644 --- a/deps/v8/src/ia32/full-codegen-ia32.cc +++ b/deps/v8/src/ia32/full-codegen-ia32.cc @@ -266,10 +266,7 @@ void FullCodeGenerator::Generate(CompilationInfo* info) { // constant. if (scope()->is_function_scope() && scope()->function() != NULL) { int ignored = 0; - VariableProxy* proxy = scope()->function(); - ASSERT(proxy->var()->mode() == CONST || - proxy->var()->mode() == CONST_HARMONY); - EmitDeclaration(proxy, proxy->var()->mode(), NULL, &ignored); + EmitDeclaration(scope()->function(), CONST, NULL, &ignored); } VisitDeclarations(scope()->declarations()); } @@ -714,8 +711,6 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, // need to "declare" it at runtime to make sure it actually exists in the // local context. Variable* variable = proxy->var(); - bool binding_needs_init = - mode == CONST || mode == CONST_HARMONY || mode == LET; switch (variable->location()) { case Variable::UNALLOCATED: ++(*global_count); @@ -727,7 +722,7 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, Comment cmnt(masm_, "[ Declaration"); VisitForAccumulatorValue(function); __ mov(StackOperand(variable), result_register()); - } else if (binding_needs_init) { + } else if (mode == CONST || mode == LET) { Comment cmnt(masm_, "[ Declaration"); __ mov(StackOperand(variable), Immediate(isolate()->factory()->the_hole_value())); @@ -759,7 +754,7 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); PrepareForBailoutForId(proxy->id(), NO_REGISTERS); - } else if (binding_needs_init) { + } else if (mode == CONST || mode == LET) { Comment cmnt(masm_, "[ Declaration"); __ mov(ContextOperand(esi, variable->index()), Immediate(isolate()->factory()->the_hole_value())); @@ -772,13 +767,9 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, Comment cmnt(masm_, "[ Declaration"); __ push(esi); __ push(Immediate(variable->name())); - // Declaration nodes are always introduced in one of four modes. - ASSERT(mode == VAR || - mode == CONST || - mode == CONST_HARMONY || - mode == LET); - PropertyAttributes attr = (mode == CONST || mode == CONST_HARMONY) - ? READ_ONLY : NONE; + // Declaration nodes are always introduced in one of three modes. + ASSERT(mode == VAR || mode == CONST || mode == LET); + PropertyAttributes attr = (mode == CONST) ? READ_ONLY : NONE; __ push(Immediate(Smi::FromInt(attr))); // Push initial value, if any. // Note: For variables we must not push an initial value (such as @@ -787,7 +778,7 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, increment_stack_height(3); if (function != NULL) { VisitForStackValue(function); - } else if (binding_needs_init) { + } else if (mode == CONST || mode == LET) { __ push(Immediate(isolate()->factory()->the_hole_value())); increment_stack_height(); } else { @@ -929,17 +920,11 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { __ push(eax); increment_stack_height(); - // Check for proxies. - Label call_runtime; - STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); - __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx); - __ j(below_equal, &call_runtime); - // Check cache validity in generated code. This is a fast case for // the JSObject::IsSimpleEnum cache validity checks. If we cannot // guarantee cache validity, call the runtime system to check cache // validity or get the property names in a fixed array. - Label next; + Label next, call_runtime; __ mov(ecx, eax); __ bind(&next); @@ -1010,17 +995,9 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { __ jmp(&loop); // We got a fixed array in register eax. Iterate through that. - Label non_proxy; __ bind(&fixed_array); - __ mov(ebx, Immediate(Smi::FromInt(1))); // Smi indicates slow check - __ mov(ecx, Operand(esp, 0 * kPointerSize)); // Get enumerated object - STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); - __ CmpObjectType(ecx, LAST_JS_PROXY_TYPE, ecx); - __ j(above, &non_proxy); - __ mov(ebx, Immediate(Smi::FromInt(0))); // Zero indicates proxy - __ bind(&non_proxy); - __ push(ebx); // Smi - __ push(eax); // Array + __ push(Immediate(Smi::FromInt(0))); // Map (0) - force slow check. + __ push(eax); __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset)); __ push(eax); // Fixed array length (as smi). __ push(Immediate(Smi::FromInt(0))); // Initial index. @@ -1037,23 +1014,17 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { __ mov(ebx, Operand(esp, 2 * kPointerSize)); __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize)); - // Get the expected map from the stack or a smi in the + // Get the expected map from the stack or a zero map in the // permanent slow case into register edx. __ mov(edx, Operand(esp, 3 * kPointerSize)); // Check if the expected map still matches that of the enumerable. - // If not, we may have to filter the key. + // If not, we have to filter the key. Label update_each; __ mov(ecx, Operand(esp, 4 * kPointerSize)); __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset)); __ j(equal, &update_each, Label::kNear); - // For proxies, no filtering is done. - // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. - ASSERT(Smi::FromInt(0) == 0); - __ test(edx, edx); - __ j(zero, &update_each); - // Convert the entry to a string or null if it isn't a property // anymore. If the property has been removed while iterating, we // just skip it. @@ -1108,7 +1079,7 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, !pretenure && scope()->is_function_scope() && info->num_literals() == 0) { - FastNewClosureStub stub(info->strict_mode_flag()); + FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode); __ push(Immediate(info)); __ CallStub(&stub); } else { @@ -1138,7 +1109,7 @@ void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var, Scope* s = scope(); while (s != NULL) { if (s->num_heap_slots() > 0) { - if (s->calls_non_strict_eval()) { + if (s->calls_eval()) { // Check that extension is NULL. __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0)); @@ -1152,7 +1123,7 @@ void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var, // If no outer scope calls eval, we do not need to check more // context extensions. If we have reached an eval scope, we check // all extensions from this point. - if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break; + if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break; s = s->outer_scope(); } @@ -1197,7 +1168,7 @@ MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { if (s->num_heap_slots() > 0) { - if (s->calls_non_strict_eval()) { + if (s->calls_eval()) { // Check that extension is NULL. __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0)); @@ -1235,13 +1206,12 @@ void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var, Variable* local = var->local_if_not_shadowed(); __ mov(eax, ContextSlotOperandCheckExtensions(local, slow)); if (local->mode() == CONST || - local->mode() == CONST_HARMONY || local->mode() == LET) { __ cmp(eax, isolate()->factory()->the_hole_value()); __ j(not_equal, done); if (local->mode() == CONST) { __ mov(eax, isolate()->factory()->undefined_value()); - } else { // LET || CONST_HARMONY + } else { // LET __ push(Immediate(var->name())); __ CallRuntime(Runtime::kThrowReferenceError, 1); } @@ -1277,7 +1247,7 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { Comment cmnt(masm_, var->IsContextSlot() ? "Context variable" : "Stack variable"); - if (!var->binding_needs_init()) { + if (var->mode() != LET && var->mode() != CONST) { context()->Plug(var); } else { // Let and const need a read barrier. @@ -1285,14 +1255,10 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { GetVar(eax, var); __ cmp(eax, isolate()->factory()->the_hole_value()); __ j(not_equal, &done, Label::kNear); - if (var->mode() == LET || var->mode() == CONST_HARMONY) { - // Throw a reference error when using an uninitialized let/const - // binding in harmony mode. + if (var->mode() == LET) { __ push(Immediate(var->name())); __ CallRuntime(Runtime::kThrowReferenceError, 1); - } else { - // Uninitalized const bindings outside of harmony mode are unholed. - ASSERT(var->mode() == CONST); + } else { // CONST __ mov(eax, isolate()->factory()->undefined_value()); } __ bind(&done); @@ -1482,18 +1448,12 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { ZoneList<Expression*>* subexprs = expr->values(); int length = subexprs->length(); - Handle<FixedArray> constant_elements = expr->constant_elements(); - ASSERT_EQ(2, constant_elements->length()); - ElementsKind constant_elements_kind = - static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value()); - Handle<FixedArrayBase> constant_elements_values( - FixedArrayBase::cast(constant_elements->get(1))); __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset)); __ push(Immediate(Smi::FromInt(expr->literal_index()))); - __ push(Immediate(constant_elements)); - if (constant_elements_values->map() == + __ push(Immediate(expr->constant_elements())); + if (expr->constant_elements()->map() == isolate()->heap()->fixed_cow_array_map()) { ASSERT(expr->depth() == 1); FastCloneShallowArrayStub stub( @@ -1505,14 +1465,8 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); } else { - ASSERT(constant_elements_kind == FAST_ELEMENTS || - constant_elements_kind == FAST_SMI_ONLY_ELEMENTS || - FLAG_smi_only_arrays); - FastCloneShallowArrayStub::Mode mode = - constant_elements_kind == FAST_DOUBLE_ELEMENTS - ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS - : FastCloneShallowArrayStub::CLONE_ELEMENTS; - FastCloneShallowArrayStub stub(mode, length); + FastCloneShallowArrayStub stub( + FastCloneShallowArrayStub::CLONE_ELEMENTS, length); __ CallStub(&stub); } @@ -1538,61 +1492,22 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { // Store the subexpression value in the array's elements. __ mov(ebx, Operand(esp, 0)); // Copy of array literal. - __ mov(edi, FieldOperand(ebx, JSObject::kMapOffset)); __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset)); int offset = FixedArray::kHeaderSize + (i * kPointerSize); - - Label element_done; - Label double_elements; - Label smi_element; - Label slow_elements; - Label fast_elements; - __ CheckFastElements(edi, &double_elements); - - // FAST_SMI_ONLY_ELEMENTS or FAST_ELEMENTS - __ JumpIfSmi(result_register(), &smi_element); - __ CheckFastSmiOnlyElements(edi, &fast_elements, Label::kNear); - - // Store into the array literal requires a elements transition. Call into - // the runtime. - __ bind(&slow_elements); - __ push(Operand(esp, 0)); // Copy of array literal. - __ push(Immediate(Smi::FromInt(i))); - __ push(result_register()); - __ push(Immediate(Smi::FromInt(NONE))); // PropertyAttributes - __ push(Immediate(Smi::FromInt(strict_mode_flag()))); // Strict mode. - __ CallRuntime(Runtime::kSetProperty, 5); - __ jmp(&element_done); - - // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS. - __ bind(&double_elements); - __ mov(ecx, Immediate(Smi::FromInt(i))); - __ StoreNumberToDoubleElements(result_register(), - ebx, - ecx, - edx, - xmm0, - &slow_elements, - false); - __ jmp(&element_done); - - // Array literal has ElementsKind of FAST_ELEMENTS and value is an object. - __ bind(&fast_elements); __ mov(FieldOperand(ebx, offset), result_register()); + + Label no_map_change; + __ JumpIfSmi(result_register(), &no_map_change); // Update the write barrier for the array store. __ RecordWriteField(ebx, offset, result_register(), ecx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); - __ jmp(&element_done); - - // Array literal has ElementsKind of FAST_SMI_ONLY_ELEMENTS or - // FAST_ELEMENTS, and value is Smi. - __ bind(&smi_element); - __ mov(FieldOperand(ebx, offset), result_register()); - // Fall through - - __ bind(&element_done); + __ mov(edi, FieldOperand(ebx, JSObject::kMapOffset)); + __ CheckFastSmiOnlyElements(edi, &no_map_change, Label::kNear); + __ push(Operand(esp, 0)); + __ CallRuntime(Runtime::kNonSmiElementStored, 1); + __ bind(&no_map_change); PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); } @@ -1975,9 +1890,8 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, } } - } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) { - // Assignment to var or initializing assignment to let/const - // in harmony mode. + } else if (var->mode() != CONST) { + // Assignment to var or initializing assignment to let. if (var->IsStackAllocated() || var->IsContextSlot()) { MemOperand location = VarOperand(var, ecx); if (FLAG_debug_code && op == Token::INIT_LET) { @@ -2190,7 +2104,7 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) { flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET); } CallFunctionStub stub(arg_count, flags); - __ CallStub(&stub, expr->id()); + __ CallStub(&stub); if (record_call_target) { // There is a one element cache in the instruction stream. #ifdef DEBUG @@ -2867,10 +2781,9 @@ void FullCodeGenerator::EmitRandomHeapNumber(ZoneList<Expression*>* args) { __ bind(&heapnumber_allocated); __ PrepareCallCFunction(1, ebx); - __ mov(eax, ContextOperand(context_register(), Context::GLOBAL_INDEX)); - __ mov(eax, FieldOperand(eax, GlobalObject::kGlobalContextOffset)); - __ mov(Operand(esp, 0), eax); - __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1); + __ mov(Operand(esp, 0), Immediate(ExternalReference::isolate_address())); + __ CallCFunction(ExternalReference::random_uint32_function(isolate()), + 1); // Convert 32 random bits in eax to 0.(32 random bits) in a double // by computing: @@ -4234,25 +4147,33 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { case Token::EQ_STRICT: case Token::EQ: cc = equal; + __ pop(edx); break; case Token::LT: cc = less; + __ pop(edx); break; case Token::GT: - cc = greater; + // Reverse left and right sizes to obtain ECMA-262 conversion order. + cc = less; + __ mov(edx, result_register()); + __ pop(eax); break; case Token::LTE: - cc = less_equal; + // Reverse left and right sizes to obtain ECMA-262 conversion order. + cc = greater_equal; + __ mov(edx, result_register()); + __ pop(eax); break; case Token::GTE: cc = greater_equal; + __ pop(edx); break; case Token::IN: case Token::INSTANCEOF: default: UNREACHABLE(); } - __ pop(edx); decrement_stack_height(); bool inline_smi_code = ShouldInlineSmiCase(op); diff --git a/deps/v8/src/ia32/ic-ia32.cc b/deps/v8/src/ia32/ic-ia32.cc index 116893205..8a98b179d 100644 --- a/deps/v8/src/ia32/ic-ia32.cc +++ b/deps/v8/src/ia32/ic-ia32.cc @@ -860,10 +860,10 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm, // The generated code does not accept smi keys. // The generated code falls through if both probes miss. -void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm, - int argc, - Code::Kind kind, - Code::ExtraICState extra_state) { +static void GenerateMonomorphicCacheProbe(MacroAssembler* masm, + int argc, + Code::Kind kind, + Code::ExtraICState extra_ic_state) { // ----------- S t a t e ------------- // -- ecx : name // -- edx : receiver @@ -873,11 +873,11 @@ void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm, // Probe the stub cache. Code::Flags flags = Code::ComputeFlags(kind, MONOMORPHIC, - extra_state, + extra_ic_state, NORMAL, argc); - Isolate* isolate = masm->isolate(); - isolate->stub_cache()->GenerateProbe(masm, flags, edx, ecx, ebx, eax); + Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, edx, ecx, ebx, + eax); // If the stub cache probing failed, the receiver might be a value. // For value objects, we use the map of the prototype objects for @@ -903,9 +903,9 @@ void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm, // Check for boolean. __ bind(&non_string); - __ cmp(edx, isolate->factory()->true_value()); + __ cmp(edx, FACTORY->true_value()); __ j(equal, &boolean); - __ cmp(edx, isolate->factory()->false_value()); + __ cmp(edx, FACTORY->false_value()); __ j(not_equal, &miss); __ bind(&boolean); StubCompiler::GenerateLoadGlobalFunctionPrototype( @@ -913,7 +913,8 @@ void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm, // Probe the stub cache for the value object. __ bind(&probe); - isolate->stub_cache()->GenerateProbe(masm, flags, edx, ecx, ebx, no_reg); + Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, edx, ecx, ebx, + no_reg); __ bind(&miss); } @@ -943,9 +944,8 @@ static void GenerateFunctionTailCall(MacroAssembler* masm, NullCallWrapper(), CALL_AS_METHOD); } - // The generated code falls through if the call should be handled by runtime. -void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) { +static void GenerateCallNormal(MacroAssembler* masm, int argc) { // ----------- S t a t e ------------- // -- ecx : name // -- esp[0] : return address @@ -969,10 +969,10 @@ void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) { } -void CallICBase::GenerateMiss(MacroAssembler* masm, - int argc, - IC::UtilityId id, - Code::ExtraICState extra_state) { +static void GenerateCallMiss(MacroAssembler* masm, + int argc, + IC::UtilityId id, + Code::ExtraICState extra_ic_state) { // ----------- S t a t e ------------- // -- ecx : name // -- esp[0] : return address @@ -1029,7 +1029,7 @@ void CallICBase::GenerateMiss(MacroAssembler* masm, } // Invoke the function. - CallKind call_kind = CallICBase::Contextual::decode(extra_state) + CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state) ? CALL_AS_FUNCTION : CALL_AS_METHOD; ParameterCount actual(argc); @@ -1043,7 +1043,7 @@ void CallICBase::GenerateMiss(MacroAssembler* masm, void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc, - Code::ExtraICState extra_state) { + Code::ExtraICState extra_ic_state) { // ----------- S t a t e ------------- // -- ecx : name // -- esp[0] : return address @@ -1054,10 +1054,38 @@ void CallIC::GenerateMegamorphic(MacroAssembler* masm, // Get the receiver of the function from the stack; 1 ~ return address. __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); - CallICBase::GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC, - extra_state); + GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC, extra_ic_state); + + GenerateMiss(masm, argc, extra_ic_state); +} + + +void CallIC::GenerateNormal(MacroAssembler* masm, int argc) { + // ----------- S t a t e ------------- + // -- ecx : name + // -- esp[0] : return address + // -- esp[(argc - n) * 4] : arg[n] (zero-based) + // -- ... + // -- esp[(argc + 1) * 4] : receiver + // ----------------------------------- - GenerateMiss(masm, argc, extra_state); + GenerateCallNormal(masm, argc); + GenerateMiss(masm, argc, Code::kNoExtraICState); +} + + +void CallIC::GenerateMiss(MacroAssembler* masm, + int argc, + Code::ExtraICState extra_ic_state) { + // ----------- S t a t e ------------- + // -- ecx : name + // -- esp[0] : return address + // -- esp[(argc - n) * 4] : arg[n] (zero-based) + // -- ... + // -- esp[(argc + 1) * 4] : receiver + // ----------------------------------- + + GenerateCallMiss(masm, argc, IC::kCallIC_Miss, extra_ic_state); } @@ -1159,8 +1187,10 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) { __ bind(&lookup_monomorphic_cache); __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1); - CallICBase::GenerateMonomorphicCacheProbe(masm, argc, Code::KEYED_CALL_IC, - Code::kNoExtraICState); + GenerateMonomorphicCacheProbe(masm, + argc, + Code::KEYED_CALL_IC, + Code::kNoExtraICState); // Fall through on miss. __ bind(&slow_call); @@ -1223,12 +1253,25 @@ void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) { __ JumpIfSmi(ecx, &miss); Condition cond = masm->IsObjectStringType(ecx, eax, eax); __ j(NegateCondition(cond), &miss); - CallICBase::GenerateNormal(masm, argc); + GenerateCallNormal(masm, argc); __ bind(&miss); GenerateMiss(masm, argc); } +void KeyedCallIC::GenerateMiss(MacroAssembler* masm, int argc) { + // ----------- S t a t e ------------- + // -- ecx : name + // -- esp[0] : return address + // -- esp[(argc - n) * 4] : arg[n] (zero-based) + // -- ... + // -- esp[(argc + 1) * 4] : receiver + // ----------------------------------- + + GenerateCallMiss(masm, argc, IC::kKeyedCallIC_Miss, Code::kNoExtraICState); +} + + void LoadIC::GenerateMegamorphic(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- eax : receiver @@ -1537,51 +1580,6 @@ void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) { } -void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) { - // ----------- S t a t e ------------- - // -- ebx : target map - // -- edx : receiver - // -- esp[0] : return address - // ----------------------------------- - // Must return the modified receiver in eax. - if (!FLAG_trace_elements_transitions) { - Label fail; - ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &fail); - __ mov(eax, edx); - __ Ret(); - __ bind(&fail); - } - - __ pop(ebx); - __ push(edx); - __ push(ebx); // return address - __ TailCallRuntime(Runtime::kTransitionElementsSmiToDouble, 1, 1); -} - - -void KeyedStoreIC::GenerateTransitionElementsDoubleToObject( - MacroAssembler* masm) { - // ----------- S t a t e ------------- - // -- ebx : target map - // -- edx : receiver - // -- esp[0] : return address - // ----------------------------------- - // Must return the modified receiver in eax. - if (!FLAG_trace_elements_transitions) { - Label fail; - ElementsTransitionGenerator::GenerateDoubleToObject(masm, &fail); - __ mov(eax, edx); - __ Ret(); - __ bind(&fail); - } - - __ pop(ebx); - __ push(edx); - __ push(ebx); // return address - __ TailCallRuntime(Runtime::kTransitionElementsDoubleToObject, 1, 1); -} - - #undef __ @@ -1593,9 +1591,11 @@ Condition CompareIC::ComputeCondition(Token::Value op) { case Token::LT: return less; case Token::GT: - return greater; + // Reverse left and right operands to obtain ECMA-262 conversion order. + return less; case Token::LTE: - return less_equal; + // Reverse left and right operands to obtain ECMA-262 conversion order. + return greater_equal; case Token::GTE: return greater_equal; default: diff --git a/deps/v8/src/ia32/lithium-codegen-ia32.cc b/deps/v8/src/ia32/lithium-codegen-ia32.cc index d4cbbcec8..9e1fd34af 100644 --- a/deps/v8/src/ia32/lithium-codegen-ia32.cc +++ b/deps/v8/src/ia32/lithium-codegen-ia32.cc @@ -355,12 +355,6 @@ int LCodeGen::ToInteger32(LConstantOperand* op) const { } -double LCodeGen::ToDouble(LConstantOperand* op) const { - Handle<Object> value = chunk_->LookupLiteral(op); - return value->Number(); -} - - Immediate LCodeGen::ToImmediate(LOperand* op) { LConstantOperand* const_op = LConstantOperand::cast(op); Handle<Object> literal = chunk_->LookupLiteral(const_op); @@ -1580,40 +1574,32 @@ Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) { } +void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) { + if (right->IsConstantOperand()) { + __ cmp(ToOperand(left), ToImmediate(right)); + } else { + __ cmp(ToRegister(left), ToOperand(right)); + } +} + + void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) { LOperand* left = instr->InputAt(0); LOperand* right = instr->InputAt(1); int false_block = chunk_->LookupDestination(instr->false_block_id()); int true_block = chunk_->LookupDestination(instr->true_block_id()); - Condition cc = TokenToCondition(instr->op(), instr->is_double()); - if (left->IsConstantOperand() && right->IsConstantOperand()) { - // We can statically evaluate the comparison. - double left_val = ToDouble(LConstantOperand::cast(left)); - double right_val = ToDouble(LConstantOperand::cast(right)); - int next_block = - EvalComparison(instr->op(), left_val, right_val) ? true_block - : false_block; - EmitGoto(next_block); + if (instr->is_double()) { + // Don't base result on EFLAGS when a NaN is involved. Instead + // jump to the false block. + __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right)); + __ j(parity_even, chunk_->GetAssemblyLabel(false_block)); } else { - if (instr->is_double()) { - // Don't base result on EFLAGS when a NaN is involved. Instead - // jump to the false block. - __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right)); - __ j(parity_even, chunk_->GetAssemblyLabel(false_block)); - } else { - if (right->IsConstantOperand()) { - __ cmp(ToRegister(left), ToImmediate(right)); - } else if (left->IsConstantOperand()) { - __ cmp(ToOperand(right), ToImmediate(left)); - // We transposed the operands. Reverse the condition. - cc = ReverseCondition(cc); - } else { - __ cmp(ToRegister(left), ToOperand(right)); - } - } - EmitBranch(true_block, false_block, cc); + EmitCmpI(left, right); } + + Condition cc = TokenToCondition(instr->op(), instr->is_double()); + EmitBranch(true_block, false_block, cc); } @@ -2043,6 +2029,9 @@ void LCodeGen::DoCmpT(LCmpT* instr) { CallCode(ic, RelocInfo::CODE_TARGET, instr); Condition condition = ComputeCompareCondition(op); + if (op == Token::GT || op == Token::LTE) { + condition = ReverseCondition(condition); + } Label true_value, done; __ test(eax, Operand(eax)); __ j(condition, &true_value, Label::kNear); @@ -2127,18 +2116,12 @@ void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) { __ mov(FieldOperand(object, offset), value); // Cells are always in the remembered set. - if (instr->hydrogen()->NeedsWriteBarrier()) { - HType type = instr->hydrogen()->value()->type(); - SmiCheck check_needed = - type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; - __ RecordWriteField(object, - offset, - value, - address, - kSaveFPRegs, - OMIT_REMEMBERED_SET, - check_needed); - } + __ RecordWriteField(object, + offset, + value, + address, + kSaveFPRegs, + OMIT_REMEMBERED_SET); } @@ -2166,19 +2149,10 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { Register context = ToRegister(instr->context()); Register value = ToRegister(instr->value()); __ mov(ContextOperand(context, instr->slot_index()), value); - if (instr->hydrogen()->NeedsWriteBarrier()) { - HType type = instr->hydrogen()->value()->type(); - SmiCheck check_needed = - type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; + if (instr->needs_write_barrier()) { Register temp = ToRegister(instr->TempAt(0)); int offset = Context::SlotOffset(instr->slot_index()); - __ RecordWriteContextSlot(context, - offset, - value, - temp, - kSaveFPRegs, - EMIT_REMEMBERED_SET, - check_needed); + __ RecordWriteContextSlot(context, offset, value, temp, kSaveFPRegs); } } @@ -2199,7 +2173,7 @@ void LCodeGen::EmitLoadFieldOrConstantFunction(Register result, Register object, Handle<Map> type, Handle<String> name) { - LookupResult lookup(isolate()); + LookupResult lookup; type->LookupInDescriptors(NULL, *name, &lookup); ASSERT(lookup.IsProperty() && (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION)); @@ -2640,7 +2614,7 @@ void LCodeGen::DoPushArgument(LPushArgument* instr) { void LCodeGen::DoThisFunction(LThisFunction* instr) { Register result = ToRegister(instr->result()); - LoadHeapObject(result, instr->hydrogen()->closure()); + __ mov(result, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); } @@ -3172,36 +3146,21 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { } // Do the store. - HType type = instr->hydrogen()->value()->type(); - SmiCheck check_needed = - type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; if (instr->is_in_object()) { __ mov(FieldOperand(object, offset), value); - if (instr->hydrogen()->NeedsWriteBarrier()) { + if (instr->needs_write_barrier()) { Register temp = ToRegister(instr->TempAt(0)); // Update the write barrier for the object for in-object properties. - __ RecordWriteField(object, - offset, - value, - temp, - kSaveFPRegs, - EMIT_REMEMBERED_SET, - check_needed); + __ RecordWriteField(object, offset, value, temp, kSaveFPRegs); } } else { Register temp = ToRegister(instr->TempAt(0)); __ mov(temp, FieldOperand(object, JSObject::kPropertiesOffset)); __ mov(FieldOperand(temp, offset), value); - if (instr->hydrogen()->NeedsWriteBarrier()) { + if (instr->needs_write_barrier()) { // Update the write barrier for the properties array. // object is used as a scratch register. - __ RecordWriteField(temp, - offset, - value, - object, - kSaveFPRegs, - EMIT_REMEMBERED_SET, - check_needed); + __ RecordWriteField(temp, offset, value, object, kSaveFPRegs); } } } @@ -3300,21 +3259,13 @@ void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) { } if (instr->hydrogen()->NeedsWriteBarrier()) { - HType type = instr->hydrogen()->value()->type(); - SmiCheck check_needed = - type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; // Compute address of modified element and store it into key register. __ lea(key, FieldOperand(elements, key, times_pointer_size, FixedArray::kHeaderSize)); - __ RecordWrite(elements, - key, - value, - kSaveFPRegs, - EMIT_REMEMBERED_SET, - check_needed); + __ RecordWrite(elements, key, value, kSaveFPRegs); } } @@ -3352,48 +3303,6 @@ void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { } -void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { - Register object_reg = ToRegister(instr->object()); - Register new_map_reg = ToRegister(instr->new_map_reg()); - - Handle<Map> from_map = instr->original_map(); - Handle<Map> to_map = instr->transitioned_map(); - ElementsKind from_kind = from_map->elements_kind(); - ElementsKind to_kind = to_map->elements_kind(); - - Label not_applicable; - __ cmp(FieldOperand(object_reg, HeapObject::kMapOffset), from_map); - __ j(not_equal, ¬_applicable); - __ mov(new_map_reg, to_map); - if (from_kind == FAST_SMI_ONLY_ELEMENTS && to_kind == FAST_ELEMENTS) { - Register object_reg = ToRegister(instr->object()); - __ mov(FieldOperand(object_reg, HeapObject::kMapOffset), new_map_reg); - // Write barrier. - ASSERT_NE(instr->temp_reg(), NULL); - __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, - ToRegister(instr->temp_reg()), kDontSaveFPRegs); - } else if (from_kind == FAST_SMI_ONLY_ELEMENTS && - to_kind == FAST_DOUBLE_ELEMENTS) { - Register fixed_object_reg = ToRegister(instr->temp_reg()); - ASSERT(fixed_object_reg.is(edx)); - ASSERT(new_map_reg.is(ebx)); - __ mov(fixed_object_reg, object_reg); - CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(), - RelocInfo::CODE_TARGET, instr); - } else if (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS) { - Register fixed_object_reg = ToRegister(instr->temp_reg()); - ASSERT(fixed_object_reg.is(edx)); - ASSERT(new_map_reg.is(ebx)); - __ mov(fixed_object_reg, object_reg); - CallCode(isolate()->builtins()->TransitionElementsDoubleToObject(), - RelocInfo::CODE_TARGET, instr); - } else { - UNREACHABLE(); - } - __ bind(¬_applicable); -} - - void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { class DeferredStringCharCodeAt: public LDeferredCode { public: @@ -4186,17 +4095,11 @@ void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) { ASSERT(ToRegister(instr->context()).is(esi)); - - Handle<FixedArray> constant_elements = instr->hydrogen()->constant_elements(); - ASSERT_EQ(2, constant_elements->length()); - ElementsKind constant_elements_kind = - static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value()); - // Setup the parameters to the stub/runtime call. __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); __ push(FieldOperand(eax, JSFunction::kLiteralsOffset)); __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); - __ push(Immediate(constant_elements)); + __ push(Immediate(instr->hydrogen()->constant_elements())); // Pick the right runtime function or stub to call. int length = instr->hydrogen()->length(); @@ -4212,9 +4115,7 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) { CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr); } else { FastCloneShallowArrayStub::Mode mode = - constant_elements_kind == FAST_DOUBLE_ELEMENTS - ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS - : FastCloneShallowArrayStub::CLONE_ELEMENTS; + FastCloneShallowArrayStub::CLONE_ELEMENTS; FastCloneShallowArrayStub stub(mode, length); CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); } @@ -4313,7 +4214,8 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { Handle<SharedFunctionInfo> shared_info = instr->shared_info(); bool pretenure = instr->hydrogen()->pretenure(); if (!pretenure && shared_info->num_literals() == 0) { - FastNewClosureStub stub(shared_info->strict_mode_flag()); + FastNewClosureStub stub( + shared_info->strict_mode() ? kStrictMode : kNonStrictMode); __ push(Immediate(shared_info)); CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); } else { @@ -4345,11 +4247,12 @@ void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) { Label* true_label = chunk_->GetAssemblyLabel(true_block); Label* false_label = chunk_->GetAssemblyLabel(false_block); - Condition final_branch_condition = - EmitTypeofIs(true_label, false_label, input, instr->type_literal()); - if (final_branch_condition != no_condition) { - EmitBranch(true_block, false_block, final_branch_condition); - } + Condition final_branch_condition = EmitTypeofIs(true_label, + false_label, + input, + instr->type_literal()); + + EmitBranch(true_block, false_block, final_branch_condition); } @@ -4416,8 +4319,11 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label, final_branch_condition = zero; } else { + final_branch_condition = not_equal; __ jmp(false_label); + // A dead branch instruction will be generated after this point. } + return final_branch_condition; } diff --git a/deps/v8/src/ia32/lithium-codegen-ia32.h b/deps/v8/src/ia32/lithium-codegen-ia32.h index 412e418f2..6037c0868 100644 --- a/deps/v8/src/ia32/lithium-codegen-ia32.h +++ b/deps/v8/src/ia32/lithium-codegen-ia32.h @@ -131,8 +131,8 @@ class LCodeGen BASE_EMBEDDED { bool is_done() const { return status_ == DONE; } bool is_aborted() const { return status_ == ABORTED; } - StrictModeFlag strict_mode_flag() const { - return info()->strict_mode_flag(); + int strict_mode_flag() const { + return info()->is_strict_mode() ? kStrictMode : kNonStrictMode; } bool dynamic_frame_alignment() const { return dynamic_frame_alignment_; } void set_dynamic_frame_alignment(bool value) { @@ -227,7 +227,6 @@ class LCodeGen BASE_EMBEDDED { Register ToRegister(int index) const; XMMRegister ToDoubleRegister(int index) const; int ToInteger32(LConstantOperand* op) const; - double ToDouble(LConstantOperand* op) const; Operand BuildFastArrayOperand(LOperand* elements_pointer, LOperand* key, ElementsKind elements_kind, @@ -262,6 +261,7 @@ class LCodeGen BASE_EMBEDDED { static Condition TokenToCondition(Token::Value op, bool is_unsigned); void EmitGoto(int block); void EmitBranch(int left_block, int right_block, Condition cc); + void EmitCmpI(LOperand* left, LOperand* right); void EmitNumberUntagD(Register input, XMMRegister result, bool deoptimize_on_undefined, @@ -270,10 +270,8 @@ class LCodeGen BASE_EMBEDDED { // Emits optimized code for typeof x == "y". Modifies input register. // Returns the condition on which a final split to // true and false label should be made, to optimize fallthrough. - Condition EmitTypeofIs(Label* true_label, - Label* false_label, - Register input, - Handle<String> type_name); + Condition EmitTypeofIs(Label* true_label, Label* false_label, + Register input, Handle<String> type_name); // Emits optimized code for %_IsObject(x). Preserves input register. // Returns the condition on which a final split to diff --git a/deps/v8/src/ia32/lithium-ia32.cc b/deps/v8/src/ia32/lithium-ia32.cc index 626f899bf..856106c79 100644 --- a/deps/v8/src/ia32/lithium-ia32.cc +++ b/deps/v8/src/ia32/lithium-ia32.cc @@ -452,12 +452,6 @@ void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) { } -void LTransitionElementsKind::PrintDataTo(StringStream* stream) { - object()->PrintTo(stream); - stream->Add(" %p -> %p", *original_map(), *transitioned_map()); -} - - void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) { LInstructionGap* gap = new LInstructionGap(block); int index = -1; @@ -1440,11 +1434,13 @@ LInstruction* LChunkBuilder::DoPower(HPower* instr) { LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) { + Token::Value op = instr->token(); ASSERT(instr->left()->representation().IsTagged()); ASSERT(instr->right()->representation().IsTagged()); + bool reversed = (op == Token::GT || op == Token::LTE); LOperand* context = UseFixed(instr->context(), esi); - LOperand* left = UseFixed(instr->left(), edx); - LOperand* right = UseFixed(instr->right(), eax); + LOperand* left = UseFixed(instr->left(), reversed ? eax : edx); + LOperand* right = UseFixed(instr->right(), reversed ? edx : eax); LCmpT* result = new LCmpT(context, left, right); return MarkAsCall(DefineFixed(result, eax), instr); } @@ -1456,22 +1452,15 @@ LInstruction* LChunkBuilder::DoCompareIDAndBranch( if (r.IsInteger32()) { ASSERT(instr->left()->representation().IsInteger32()); ASSERT(instr->right()->representation().IsInteger32()); - LOperand* left = UseRegisterOrConstantAtStart(instr->left()); + LOperand* left = UseRegisterAtStart(instr->left()); LOperand* right = UseOrConstantAtStart(instr->right()); return new LCmpIDAndBranch(left, right); } else { ASSERT(r.IsDouble()); ASSERT(instr->left()->representation().IsDouble()); ASSERT(instr->right()->representation().IsDouble()); - LOperand* left; - LOperand* right; - if (instr->left()->IsConstant() && instr->right()->IsConstant()) { - left = UseRegisterOrConstantAtStart(instr->left()); - right = UseRegisterOrConstantAtStart(instr->right()); - } else { - left = UseRegisterAtStart(instr->left()); - right = UseRegisterAtStart(instr->right()); - } + LOperand* left = UseRegisterAtStart(instr->left()); + LOperand* right = UseRegisterAtStart(instr->right()); return new LCmpIDAndBranch(left, right); } } @@ -2044,27 +2033,6 @@ LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) { } -LInstruction* LChunkBuilder::DoTransitionElementsKind( - HTransitionElementsKind* instr) { - if (instr->original_map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS && - instr->transitioned_map()->elements_kind() == FAST_ELEMENTS) { - LOperand* object = UseRegister(instr->object()); - LOperand* new_map_reg = TempRegister(); - LOperand* temp_reg = TempRegister(); - LTransitionElementsKind* result = - new LTransitionElementsKind(object, new_map_reg, temp_reg); - return DefineSameAsFirst(result); - } else { - LOperand* object = UseFixed(instr->object(), eax); - LOperand* fixed_object_reg = FixedTemp(edx); - LOperand* new_map_reg = FixedTemp(ebx); - LTransitionElementsKind* result = - new LTransitionElementsKind(object, new_map_reg, fixed_object_reg); - return MarkAsCall(DefineFixed(result, eax), instr); - } -} - - LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) { bool needs_write_barrier = instr->NeedsWriteBarrier(); diff --git a/deps/v8/src/ia32/lithium-ia32.h b/deps/v8/src/ia32/lithium-ia32.h index 5f23afab0..3a06ac358 100644 --- a/deps/v8/src/ia32/lithium-ia32.h +++ b/deps/v8/src/ia32/lithium-ia32.h @@ -156,7 +156,6 @@ class LCodeGen; V(ThisFunction) \ V(Throw) \ V(ToFastProperties) \ - V(TransitionElementsKind) \ V(Typeof) \ V(TypeofIsAndBranch) \ V(UnaryMathOperation) \ @@ -1296,6 +1295,7 @@ class LStoreContextSlot: public LTemplateInstruction<0, 2, 1> { LOperand* context() { return InputAt(0); } LOperand* value() { return InputAt(1); } int slot_index() { return hydrogen()->slot_index(); } + int needs_write_barrier() { return hydrogen()->NeedsWriteBarrier(); } virtual void PrintDataTo(StringStream* stream); }; @@ -1312,9 +1312,7 @@ class LPushArgument: public LTemplateInstruction<0, 1, 0> { class LThisFunction: public LTemplateInstruction<1, 0, 0> { - public: DECLARE_CONCRETE_INSTRUCTION(ThisFunction, "this-function") - DECLARE_HYDROGEN_ACCESSOR(ThisFunction) }; @@ -1619,6 +1617,7 @@ class LStoreNamedField: public LTemplateInstruction<0, 2, 1> { Handle<Object> name() const { return hydrogen()->name(); } bool is_in_object() { return hydrogen()->is_in_object(); } int offset() { return hydrogen()->offset(); } + bool needs_write_barrier() { return hydrogen()->NeedsWriteBarrier(); } Handle<Map> transition() const { return hydrogen()->transition(); } }; @@ -1640,8 +1639,7 @@ class LStoreNamedGeneric: public LTemplateInstruction<0, 3, 0> { LOperand* object() { return inputs_[1]; } LOperand* value() { return inputs_[2]; } Handle<Object> name() const { return hydrogen()->name(); } - StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); } - bool strict_mode() { return strict_mode_flag() == kStrictMode; } + bool strict_mode() { return hydrogen()->strict_mode(); } }; @@ -1735,30 +1733,6 @@ class LStoreKeyedGeneric: public LTemplateInstruction<0, 4, 0> { }; -class LTransitionElementsKind: public LTemplateInstruction<1, 1, 2> { - public: - LTransitionElementsKind(LOperand* object, - LOperand* new_map_temp, - LOperand* temp_reg) { - inputs_[0] = object; - temps_[0] = new_map_temp; - temps_[1] = temp_reg; - } - - DECLARE_CONCRETE_INSTRUCTION(TransitionElementsKind, - "transition-elements-kind") - DECLARE_HYDROGEN_ACCESSOR(TransitionElementsKind) - - virtual void PrintDataTo(StringStream* stream); - - LOperand* object() { return inputs_[0]; } - LOperand* new_map_reg() { return temps_[0]; } - LOperand* temp_reg() { return temps_[1]; } - Handle<Map> original_map() { return hydrogen()->original_map(); } - Handle<Map> transitioned_map() { return hydrogen()->transitioned_map(); } -}; - - class LStringAdd: public LTemplateInstruction<1, 3, 0> { public: LStringAdd(LOperand* context, LOperand* left, LOperand* right) { diff --git a/deps/v8/src/ia32/macro-assembler-ia32.cc b/deps/v8/src/ia32/macro-assembler-ia32.cc index dd1ace91a..3aaa22acc 100644 --- a/deps/v8/src/ia32/macro-assembler-ia32.cc +++ b/deps/v8/src/ia32/macro-assembler-ia32.cc @@ -352,7 +352,7 @@ void MacroAssembler::SafePush(const Immediate& x) { void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) { // see ROOT_ACCESSOR macro in factory.h - Handle<Object> value(&isolate()->heap()->roots_array_start()[index]); + Handle<Object> value(&isolate()->heap()->roots_address()[index]); cmp(with, value); } @@ -1492,19 +1492,6 @@ void MacroAssembler::InitializeFieldsWithFiller(Register start_offset, } -void MacroAssembler::BooleanBitTest(Register object, - int field_offset, - int bit_index) { - bit_index += kSmiTagSize + kSmiShiftSize; - ASSERT(IsPowerOf2(kBitsPerByte)); - int byte_index = bit_index / kBitsPerByte; - int byte_bit_index = bit_index & (kBitsPerByte - 1); - test_b(FieldOperand(object, field_offset + byte_index), - static_cast<byte>(1 << byte_bit_index)); -} - - - void MacroAssembler::NegativeZeroTest(Register result, Register op, Label* then_label) { @@ -1535,8 +1522,7 @@ void MacroAssembler::NegativeZeroTest(Register result, void MacroAssembler::TryGetFunctionPrototype(Register function, Register result, Register scratch, - Label* miss, - bool miss_on_bound_function) { + Label* miss) { // Check that the receiver isn't a smi. JumpIfSmi(function, miss); @@ -1544,15 +1530,6 @@ void MacroAssembler::TryGetFunctionPrototype(Register function, CmpObjectType(function, JS_FUNCTION_TYPE, result); j(not_equal, miss); - if (miss_on_bound_function) { - // If a bound function, go to miss label. - mov(scratch, - FieldOperand(function, JSFunction::kSharedFunctionInfoOffset)); - BooleanBitTest(scratch, SharedFunctionInfo::kCompilerHintsOffset, - SharedFunctionInfo::kBoundFunction); - j(not_zero, miss); - } - // Make sure that the function has an instance prototype. Label non_instance; movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset)); @@ -2087,16 +2064,23 @@ void MacroAssembler::InvokeFunction(JSFunction* function, // You can't call a function without a valid frame. ASSERT(flag == JUMP_FUNCTION || has_frame()); + ASSERT(function->is_compiled()); // Get the function and setup the context. mov(edi, Immediate(Handle<JSFunction>(function))); mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); ParameterCount expected(function->shared()->formal_parameter_count()); - // We call indirectly through the code field in the function to - // allow recompilation to take effect without changing any of the - // call sites. - InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset), - expected, actual, flag, call_wrapper, call_kind); + if (V8::UseCrankshaft()) { + // TODO(kasperl): For now, we always call indirectly through the + // code field in the function to allow recompilation to take effect + // without changing any of the call sites. + InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset), + expected, actual, flag, call_wrapper, call_kind); + } else { + Handle<Code> code(function->code()); + InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, + flag, call_wrapper, call_kind); + } } diff --git a/deps/v8/src/ia32/macro-assembler-ia32.h b/deps/v8/src/ia32/macro-assembler-ia32.h index 8528c555a..a1b42c280 100644 --- a/deps/v8/src/ia32/macro-assembler-ia32.h +++ b/deps/v8/src/ia32/macro-assembler-ia32.h @@ -594,9 +594,6 @@ class MacroAssembler: public Assembler { // --------------------------------------------------------------------------- // Support functions. - // Check a boolean-bit of a Smi field. - void BooleanBitTest(Register object, int field_offset, int bit_index); - // Check if result is zero and op is negative. void NegativeZeroTest(Register result, Register op, Label* then_label); @@ -613,8 +610,7 @@ class MacroAssembler: public Assembler { void TryGetFunctionPrototype(Register function, Register result, Register scratch, - Label* miss, - bool miss_on_bound_function = false); + Label* miss); // Generates code for reporting that an illegal operation has // occurred. diff --git a/deps/v8/src/ia32/regexp-macro-assembler-ia32.cc b/deps/v8/src/ia32/regexp-macro-assembler-ia32.cc index dbf01abff..8b0b9ab91 100644 --- a/deps/v8/src/ia32/regexp-macro-assembler-ia32.cc +++ b/deps/v8/src/ia32/regexp-macro-assembler-ia32.cc @@ -1141,11 +1141,6 @@ int RegExpMacroAssemblerIA32::CheckStackGuardState(Address* return_address, frame_entry<const String*>(re_frame, kInputString) = *subject; frame_entry<const byte*>(re_frame, kInputStart) = new_address; frame_entry<const byte*>(re_frame, kInputEnd) = new_address + byte_length; - } else if (frame_entry<const String*>(re_frame, kInputString) != *subject) { - // Subject string might have been a ConsString that underwent - // short-circuiting during GC. That will not change start_address but - // will change pointer inside the subject handle. - frame_entry<const String*>(re_frame, kInputString) = *subject; } return 0; diff --git a/deps/v8/src/ia32/stub-cache-ia32.cc b/deps/v8/src/ia32/stub-cache-ia32.cc index af53acd17..07cb14d02 100644 --- a/deps/v8/src/ia32/stub-cache-ia32.cc +++ b/deps/v8/src/ia32/stub-cache-ia32.cc @@ -107,60 +107,12 @@ static void ProbeTable(Isolate* isolate, // must always call a backup property check that is complete. // This function is safe to call if the receiver has fast properties. // Name must be a symbol and receiver must be a heap object. -static void GenerateDictionaryNegativeLookup(MacroAssembler* masm, - Label* miss_label, - Register receiver, - Handle<String> name, - Register r0, - Register r1) { - ASSERT(name->IsSymbol()); - Counters* counters = masm->isolate()->counters(); - __ IncrementCounter(counters->negative_lookups(), 1); - __ IncrementCounter(counters->negative_lookups_miss(), 1); - - __ mov(r0, FieldOperand(receiver, HeapObject::kMapOffset)); - - const int kInterceptorOrAccessCheckNeededMask = - (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); - - // Bail out if the receiver has a named interceptor or requires access checks. - __ test_b(FieldOperand(r0, Map::kBitFieldOffset), - kInterceptorOrAccessCheckNeededMask); - __ j(not_zero, miss_label); - - // Check that receiver is a JSObject. - __ CmpInstanceType(r0, FIRST_SPEC_OBJECT_TYPE); - __ j(below, miss_label); - - // Load properties array. - Register properties = r0; - __ mov(properties, FieldOperand(receiver, JSObject::kPropertiesOffset)); - - // Check that the properties array is a dictionary. - __ cmp(FieldOperand(properties, HeapObject::kMapOffset), - Immediate(masm->isolate()->factory()->hash_table_map())); - __ j(not_equal, miss_label); - - Label done; - StringDictionaryLookupStub::GenerateNegativeLookup(masm, - miss_label, - &done, - properties, - name, - r1); - __ bind(&done); - __ DecrementCounter(counters->negative_lookups_miss(), 1); -} - - -// TODO(kmillikin): Eliminate this function when the stub cache is fully -// handlified. -static MaybeObject* TryGenerateDictionaryNegativeLookup(MacroAssembler* masm, - Label* miss_label, - Register receiver, - String* name, - Register r0, - Register r1) { +static MaybeObject* GenerateDictionaryNegativeLookup(MacroAssembler* masm, + Label* miss_label, + Register receiver, + String* name, + Register r0, + Register r1) { ASSERT(name->IsSymbol()); Counters* counters = masm->isolate()->counters(); __ IncrementCounter(counters->negative_lookups(), 1); @@ -191,12 +143,12 @@ static MaybeObject* TryGenerateDictionaryNegativeLookup(MacroAssembler* masm, Label done; MaybeObject* result = - StringDictionaryLookupStub::TryGenerateNegativeLookup(masm, - miss_label, - &done, - properties, - name, - r1); + StringDictionaryLookupStub::GenerateNegativeLookup(masm, + miss_label, + &done, + properties, + name, + r1); if (result->IsFailure()) return result; __ bind(&done); @@ -213,23 +165,25 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Register scratch, Register extra, Register extra2) { + Isolate* isolate = Isolate::Current(); Label miss; + USE(extra2); // The register extra2 is not used on the ia32 platform. - // Assert that code is valid. The shifting code relies on the entry size - // being 8. + // Make sure that code is valid. The shifting code relies on the + // entry size being 8. ASSERT(sizeof(Entry) == 8); - // Assert the flags do not name a specific type. + // Make sure the flags does not name a specific type. ASSERT(Code::ExtractTypeFromFlags(flags) == 0); - // Assert that there are no register conflicts. + // Make sure that there are no register conflicts. ASSERT(!scratch.is(receiver)); ASSERT(!scratch.is(name)); ASSERT(!extra.is(receiver)); ASSERT(!extra.is(name)); ASSERT(!extra.is(scratch)); - // Assert scratch and extra registers are valid, and extra2 is unused. + // Check scratch and extra registers are valid, and extra2 is unused. ASSERT(!scratch.is(no_reg)); ASSERT(extra2.is(no_reg)); @@ -243,7 +197,7 @@ void StubCache::GenerateProbe(MacroAssembler* masm, __ and_(scratch, (kPrimaryTableSize - 1) << kHeapObjectTagSize); // Probe the primary table. - ProbeTable(isolate(), masm, flags, kPrimary, name, scratch, extra); + ProbeTable(isolate, masm, flags, kPrimary, name, scratch, extra); // Primary miss: Compute hash for secondary probe. __ mov(scratch, FieldOperand(name, String::kHashFieldOffset)); @@ -255,7 +209,7 @@ void StubCache::GenerateProbe(MacroAssembler* masm, __ and_(scratch, (kSecondaryTableSize - 1) << kHeapObjectTagSize); // Probe the secondary table. - ProbeTable(isolate(), masm, flags, kSecondary, name, scratch, extra); + ProbeTable(isolate, masm, flags, kSecondary, name, scratch, extra); // Cache miss: Fall-through and let caller handle the miss by // entering the runtime system. @@ -373,10 +327,8 @@ void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm, // are loaded directly otherwise the property is loaded from the properties // fixed array. void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, - Register dst, - Register src, - Handle<JSObject> holder, - int index) { + Register dst, Register src, + JSObject* holder, int index) { // Adjust for the number of properties stored in the holder. index -= holder->map()->inobject_properties(); if (index < 0) { @@ -534,11 +486,11 @@ class CallInterceptorCompiler BASE_EMBEDDED { CallInterceptorCompiler(StubCompiler* stub_compiler, const ParameterCount& arguments, Register name, - Code::ExtraICState extra_state) + Code::ExtraICState extra_ic_state) : stub_compiler_(stub_compiler), arguments_(arguments), name_(name), - extra_state_(extra_state) {} + extra_ic_state_(extra_ic_state) {} MaybeObject* Compile(MacroAssembler* masm, JSObject* object, @@ -662,7 +614,7 @@ class CallInterceptorCompiler BASE_EMBEDDED { GenerateFastApiCall(masm, optimization, arguments_.immediate()); if (result->IsFailure()) return result; } else { - CallKind call_kind = CallICBase::Contextual::decode(extra_state_) + CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_) ? CALL_AS_FUNCTION : CALL_AS_METHOD; __ InvokeFunction(optimization.constant_function(), arguments_, @@ -748,16 +700,21 @@ class CallInterceptorCompiler BASE_EMBEDDED { StubCompiler* stub_compiler_; const ParameterCount& arguments_; Register name_; - Code::ExtraICState extra_state_; + Code::ExtraICState extra_ic_state_; }; void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) { ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC); - Handle<Code> code = (kind == Code::LOAD_IC) - ? masm->isolate()->builtins()->LoadIC_Miss() - : masm->isolate()->builtins()->KeyedLoadIC_Miss(); - __ jmp(code, RelocInfo::CODE_TARGET); + Code* code = NULL; + if (kind == Code::LOAD_IC) { + code = masm->isolate()->builtins()->builtin(Builtins::kLoadIC_Miss); + } else { + code = masm->isolate()->builtins()->builtin(Builtins::kKeyedLoadIC_Miss); + } + + Handle<Code> ic(code); + __ jmp(ic, RelocInfo::CODE_TARGET); } @@ -772,9 +729,9 @@ void StubCompiler::GenerateKeyedLoadMissForceGeneric(MacroAssembler* masm) { // Both name_reg and receiver_reg are preserved on jumps to miss_label, // but may be destroyed if store is successful. void StubCompiler::GenerateStoreField(MacroAssembler* masm, - Handle<JSObject> object, + JSObject* object, int index, - Handle<Map> transition, + Map* transition, Register receiver_reg, Register name_reg, Register scratch, @@ -797,12 +754,12 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm, ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); // Perform map transition for the receiver if necessary. - if (!transition.is_null() && (object->map()->unused_property_fields() == 0)) { + if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) { // The properties must be extended before we can store the value. // We jump to a runtime call that extends the properties array. __ pop(scratch); // Return address. __ push(receiver_reg); - __ push(Immediate(transition)); + __ push(Immediate(Handle<Map>(transition))); __ push(eax); __ push(scratch); __ TailCallExternalReference( @@ -813,11 +770,11 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm, return; } - if (!transition.is_null()) { + if (transition != NULL) { // Update the map of the object; no write barrier updating is // needed because the map is never in new space. __ mov(FieldOperand(receiver_reg, HeapObject::kMapOffset), - Immediate(transition)); + Immediate(Handle<Map>(transition))); } // Adjust for the number of properties stored in the object. Even in the @@ -863,29 +820,7 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm, // Generate code to check that a global property cell is empty. Create // the property cell at compilation time if no cell exists for the // property. -static void GenerateCheckPropertyCell(MacroAssembler* masm, - Handle<GlobalObject> global, - Handle<String> name, - Register scratch, - Label* miss) { - Handle<JSGlobalPropertyCell> cell = - GlobalObject::EnsurePropertyCell(global, name); - ASSERT(cell->value()->IsTheHole()); - Handle<Oddball> the_hole = masm->isolate()->factory()->the_hole_value(); - if (Serializer::enabled()) { - __ mov(scratch, Immediate(cell)); - __ cmp(FieldOperand(scratch, JSGlobalPropertyCell::kValueOffset), - Immediate(the_hole)); - } else { - __ cmp(Operand::Cell(cell), Immediate(the_hole)); - } - __ j(not_equal, miss); -} - - -// TODO(kmillikin): Eliminate this function when the stub cache is fully -// handlified. -MUST_USE_RESULT static MaybeObject* TryGenerateCheckPropertyCell( +MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell( MacroAssembler* masm, GlobalObject* global, String* name, @@ -912,29 +847,7 @@ MUST_USE_RESULT static MaybeObject* TryGenerateCheckPropertyCell( // Calls GenerateCheckPropertyCell for each global object in the prototype chain // from object to (but not including) holder. -static void GenerateCheckPropertyCells(MacroAssembler* masm, - Handle<JSObject> object, - Handle<JSObject> holder, - Handle<String> name, - Register scratch, - Label* miss) { - Handle<JSObject> current = object; - while (!current.is_identical_to(holder)) { - if (current->IsGlobalObject()) { - GenerateCheckPropertyCell(masm, - Handle<GlobalObject>::cast(current), - name, - scratch, - miss); - } - current = Handle<JSObject>(JSObject::cast(current->GetPrototype())); - } -} - - -// TODO(kmillikin): Eliminate this function when the stub cache is fully -// handlified. -MUST_USE_RESULT static MaybeObject* TryGenerateCheckPropertyCells( +MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCells( MacroAssembler* masm, JSObject* object, JSObject* holder, @@ -945,7 +858,7 @@ MUST_USE_RESULT static MaybeObject* TryGenerateCheckPropertyCells( while (current != holder) { if (current->IsGlobalObject()) { // Returns a cell or a failure. - MaybeObject* result = TryGenerateCheckPropertyCell( + MaybeObject* result = GenerateCheckPropertyCell( masm, GlobalObject::cast(current), name, @@ -964,120 +877,6 @@ MUST_USE_RESULT static MaybeObject* TryGenerateCheckPropertyCells( #define __ ACCESS_MASM(masm()) -Register StubCompiler::CheckPrototypes(Handle<JSObject> object, - Register object_reg, - Handle<JSObject> holder, - Register holder_reg, - Register scratch1, - Register scratch2, - Handle<String> name, - int save_at_depth, - Label* miss) { - // Make sure there's no overlap between holder and object registers. - ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg)); - ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg) - && !scratch2.is(scratch1)); - - // Keep track of the current object in register reg. - Register reg = object_reg; - Handle<JSObject> current = object; - int depth = 0; - - if (save_at_depth == depth) { - __ mov(Operand(esp, kPointerSize), reg); - } - - // Traverse the prototype chain and check the maps in the prototype chain for - // fast and global objects or do negative lookup for normal objects. - while (!current.is_identical_to(holder)) { - ++depth; - - // Only global objects and objects that do not require access - // checks are allowed in stubs. - ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded()); - - Handle<JSObject> prototype(JSObject::cast(current->GetPrototype())); - if (!current->HasFastProperties() && - !current->IsJSGlobalObject() && - !current->IsJSGlobalProxy()) { - if (!name->IsSymbol()) { - name = factory()->LookupSymbol(name); - } - ASSERT(current->property_dictionary()->FindEntry(*name) == - StringDictionary::kNotFound); - - GenerateDictionaryNegativeLookup(masm(), miss, reg, name, - scratch1, scratch2); - - __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset)); - reg = holder_reg; // From now on the object will be in holder_reg. - __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset)); - } else { - bool in_new_space = heap()->InNewSpace(*prototype); - Handle<Map> current_map(current->map()); - if (in_new_space) { - // Save the map in scratch1 for later. - __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset)); - __ cmp(scratch1, Immediate(current_map)); - } else { - __ cmp(FieldOperand(reg, HeapObject::kMapOffset), - Immediate(current_map)); - } - // Branch on the result of the map check. - __ j(not_equal, miss); - // Check access rights to the global object. This has to happen after - // the map check so that we know that the object is actually a global - // object. - if (current->IsJSGlobalProxy()) { - __ CheckAccessGlobalProxy(reg, scratch2, miss); - } - reg = holder_reg; // From now on the object will be in holder_reg. - - if (in_new_space) { - // The prototype is in new space; we cannot store a reference to it - // in the code. Load it from the map. - __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset)); - } else { - // The prototype is in old space; load it directly. - __ mov(reg, prototype); - } - } - - if (save_at_depth == depth) { - __ mov(Operand(esp, kPointerSize), reg); - } - - // Go to the next object in the prototype chain. - current = prototype; - } - ASSERT(current.is_identical_to(holder)); - - // Log the check depth. - LOG(isolate(), IntEvent("check-maps-depth", depth + 1)); - - // Check the holder map. - __ cmp(FieldOperand(reg, HeapObject::kMapOffset), - Immediate(Handle<Map>(holder->map()))); - __ j(not_equal, miss); - - // Perform security check for access to the global object. - ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded()); - if (holder->IsJSGlobalProxy()) { - __ CheckAccessGlobalProxy(reg, scratch1, miss); - } - - // If we've skipped any global objects, it's not enough to verify that - // their maps haven't changed. We also need to check that the property - // cell for the property is still empty. - GenerateCheckPropertyCells(masm(), object, holder, name, scratch1, miss); - - // Return the register containing the holder. - return reg; -} - - -// TODO(kmillikin): Eliminate this function when the stub cache is fully -// handlified. Register StubCompiler::CheckPrototypes(JSObject* object, Register object_reg, JSObject* holder, @@ -1127,9 +926,12 @@ Register StubCompiler::CheckPrototypes(JSObject* object, ASSERT(current->property_dictionary()->FindEntry(name) == StringDictionary::kNotFound); - MaybeObject* negative_lookup = - TryGenerateDictionaryNegativeLookup(masm(), miss, reg, name, - scratch1, scratch2); + MaybeObject* negative_lookup = GenerateDictionaryNegativeLookup(masm(), + miss, + reg, + name, + scratch1, + scratch2); if (negative_lookup->IsFailure()) { set_failure(Failure::cast(negative_lookup)); return reg; @@ -1196,17 +998,17 @@ Register StubCompiler::CheckPrototypes(JSObject* object, ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded()); if (holder->IsJSGlobalProxy()) { __ CheckAccessGlobalProxy(reg, scratch1, miss); - } + }; // If we've skipped any global objects, it's not enough to verify // that their maps haven't changed. We also need to check that the // property cell for the property is still empty. - MaybeObject* result = TryGenerateCheckPropertyCells(masm(), - object, - holder, - name, - scratch1, - miss); + MaybeObject* result = GenerateCheckPropertyCells(masm(), + object, + holder, + name, + scratch1, + miss); if (result->IsFailure()) set_failure(Failure::cast(result)); // Return the register containing the holder. @@ -1214,21 +1016,22 @@ Register StubCompiler::CheckPrototypes(JSObject* object, } -void StubCompiler::GenerateLoadField(Handle<JSObject> object, - Handle<JSObject> holder, +void StubCompiler::GenerateLoadField(JSObject* object, + JSObject* holder, Register receiver, Register scratch1, Register scratch2, Register scratch3, int index, - Handle<String> name, + String* name, Label* miss) { // Check that the receiver isn't a smi. __ JumpIfSmi(receiver, miss); // Check the prototype chain. - Register reg = CheckPrototypes( - object, receiver, holder, scratch1, scratch2, scratch3, name, miss); + Register reg = + CheckPrototypes(object, receiver, holder, + scratch1, scratch2, scratch3, name, miss); // Get the value from the properties. GenerateFastPropertyLoad(masm(), eax, reg, holder, index); @@ -1303,24 +1106,24 @@ MaybeObject* StubCompiler::GenerateLoadCallback(JSObject* object, } -void StubCompiler::GenerateLoadConstant(Handle<JSObject> object, - Handle<JSObject> holder, +void StubCompiler::GenerateLoadConstant(JSObject* object, + JSObject* holder, Register receiver, Register scratch1, Register scratch2, Register scratch3, - Handle<Object> value, - Handle<String> name, + Object* value, + String* name, Label* miss) { // Check that the receiver isn't a smi. __ JumpIfSmi(receiver, miss); // Check that the maps haven't changed. - CheckPrototypes( - object, receiver, holder, scratch1, scratch2, scratch3, name, miss); + CheckPrototypes(object, receiver, holder, + scratch1, scratch2, scratch3, name, miss); // Return the constant value. - __ mov(eax, value); + __ mov(eax, Handle<Object>(value)); __ ret(0); } @@ -1420,8 +1223,7 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object, // We found FIELD property in prototype chain of interceptor's holder. // Retrieve a field from field's holder. GenerateFastPropertyLoad(masm(), eax, holder_reg, - Handle<JSObject>(lookup->holder()), - lookup->GetFieldIndex()); + lookup->holder(), lookup->GetFieldIndex()); __ ret(0); } else { // We found CALLBACKS property in prototype chain of interceptor's @@ -1468,9 +1270,9 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object, } -void CallStubCompiler::GenerateNameCheck(Handle<String> name, Label* miss) { +void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) { if (kind_ == Code::KEYED_CALL_IC) { - __ cmp(ecx, Immediate(name)); + __ cmp(ecx, Immediate(Handle<String>(name))); __ j(not_equal, miss); } } @@ -1533,22 +1335,11 @@ void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell, } -void CallStubCompiler::GenerateMissBranch() { - Handle<Code> code = +MaybeObject* CallStubCompiler::GenerateMissBranch() { + MaybeObject* maybe_obj = isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(), kind_, - extra_state_); - __ jmp(code, RelocInfo::CODE_TARGET); -} - - -// TODO(kmillikin): Eliminate this function when the stub cache is fully -// handlified. -MaybeObject* CallStubCompiler::TryGenerateMissBranch() { - MaybeObject* maybe_obj = - isolate()->stub_cache()->TryComputeCallMiss(arguments().immediate(), - kind_, - extra_state_); + extra_ic_state_); Object* obj; if (!maybe_obj->ToObject(&obj)) return maybe_obj; __ jmp(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET); @@ -1556,10 +1347,11 @@ MaybeObject* CallStubCompiler::TryGenerateMissBranch() { } -Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object, - Handle<JSObject> holder, - int index, - Handle<String> name) { +MUST_USE_RESULT MaybeObject* CallStubCompiler::CompileCallField( + JSObject* object, + JSObject* holder, + int index, + String* name) { // ----------- S t a t e ------------- // -- ecx : name // -- esp[0] : return address @@ -1597,7 +1389,7 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object, } // Invoke the function. - CallKind call_kind = CallICBase::Contextual::decode(extra_state_) + CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_) ? CALL_AS_FUNCTION : CALL_AS_METHOD; __ InvokeFunction(edi, arguments(), JUMP_FUNCTION, @@ -1605,7 +1397,8 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object, // Handle call cache miss. __ bind(&miss); - GenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); + if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. return GetCode(FIELD, name); @@ -1632,7 +1425,7 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object, Label miss; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); // Get the receiver from the stack. const int argc = arguments().immediate(); @@ -1720,8 +1513,8 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object, // the new element is non-Smi. For now, delegate to the builtin. Label no_fast_elements_check; __ JumpIfSmi(edi, &no_fast_elements_check); - __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset)); - __ CheckFastObjectElements(ecx, &call_builtin, Label::kFar); + __ mov(esi, FieldOperand(edx, HeapObject::kMapOffset)); + __ CheckFastObjectElements(esi, &call_builtin, Label::kFar); __ bind(&no_fast_elements_check); // We could be lucky and the elements array could be at the top of @@ -1789,11 +1582,11 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object, } __ bind(&miss); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(function); + return GetCode(function); } @@ -1817,7 +1610,7 @@ MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object, Label miss, return_undefined, call_builtin; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); // Get the receiver from the stack. const int argc = arguments().immediate(); @@ -1872,11 +1665,11 @@ MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object, 1); __ bind(&miss); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(function); + return GetCode(function); } @@ -1907,12 +1700,12 @@ MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall( Label* index_out_of_range_label = &index_out_of_range; if (kind_ == Code::CALL_IC && - (CallICBase::StringStubState::decode(extra_state_) == + (CallICBase::StringStubState::decode(extra_ic_state_) == DEFAULT_STRING_STUB)) { index_out_of_range_label = &miss; } - GenerateNameCheck(Handle<String>(name), &name_miss); + GenerateNameCheck(name, &name_miss); // Check that the maps starting from the prototype haven't changed. GenerateDirectLoadGlobalFunctionPrototype(masm(), @@ -1958,11 +1751,11 @@ MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall( // Restore function name in ecx. __ Set(ecx, Immediate(Handle<String>(name))); __ bind(&name_miss); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(function); + return GetCode(function); } @@ -1993,12 +1786,12 @@ MaybeObject* CallStubCompiler::CompileStringCharAtCall( Label* index_out_of_range_label = &index_out_of_range; if (kind_ == Code::CALL_IC && - (CallICBase::StringStubState::decode(extra_state_) == + (CallICBase::StringStubState::decode(extra_ic_state_) == DEFAULT_STRING_STUB)) { index_out_of_range_label = &miss; } - GenerateNameCheck(Handle<String>(name), &name_miss); + GenerateNameCheck(name, &name_miss); // Check that the maps starting from the prototype haven't changed. GenerateDirectLoadGlobalFunctionPrototype(masm(), @@ -2046,11 +1839,11 @@ MaybeObject* CallStubCompiler::CompileStringCharAtCall( // Restore function name in ecx. __ Set(ecx, Immediate(Handle<String>(name))); __ bind(&name_miss); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(function); + return GetCode(function); } @@ -2077,7 +1870,7 @@ MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall( } Label miss; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); if (cell == NULL) { __ mov(edx, Operand(esp, 2 * kPointerSize)); @@ -2115,7 +1908,7 @@ MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall( // Tail call the full function. We do not have to patch the receiver // because the function makes no use of it. __ bind(&slow); - CallKind call_kind = CallICBase::Contextual::decode(extra_state_) + CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_) ? CALL_AS_FUNCTION : CALL_AS_METHOD; __ InvokeFunction(function, arguments(), JUMP_FUNCTION, @@ -2123,11 +1916,11 @@ MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall( __ bind(&miss); // ecx: function name. - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return (cell == NULL) ? TryGetCode(function) : TryGetCode(NORMAL, name); + return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name); } @@ -2159,7 +1952,7 @@ MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object, } Label miss; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); if (cell == NULL) { __ mov(edx, Operand(esp, 2 * kPointerSize)); @@ -2252,11 +2045,11 @@ MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object, __ bind(&miss); // ecx: function name. - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return (cell == NULL) ? TryGetCode(function) : TryGetCode(NORMAL, name); + return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name); } @@ -2282,7 +2075,7 @@ MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object, } Label miss; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); if (cell == NULL) { __ mov(edx, Operand(esp, 2 * kPointerSize)); @@ -2356,11 +2149,11 @@ MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object, __ bind(&miss); // ecx: function name. - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return (cell == NULL) ? TryGetCode(function) : TryGetCode(NORMAL, name); + return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name); } @@ -2383,7 +2176,7 @@ MaybeObject* CallStubCompiler::CompileFastApiCall( Label miss, miss_before_stack_reserved; - GenerateNameCheck(Handle<String>(name), &miss_before_stack_reserved); + GenerateNameCheck(name, &miss_before_stack_reserved); // Get the receiver from the stack. const int argc = arguments().immediate(); @@ -2417,11 +2210,11 @@ MaybeObject* CallStubCompiler::CompileFastApiCall( __ add(esp, Immediate(kFastApiCallArguments * kPointerSize)); __ bind(&miss_before_stack_reserved); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(function); + return GetCode(function); } @@ -2450,7 +2243,7 @@ MaybeObject* CallStubCompiler::CompileCallConstant( Label miss; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); // Get the receiver from the stack. const int argc = arguments().immediate(); @@ -2546,7 +2339,7 @@ MaybeObject* CallStubCompiler::CompileCallConstant( UNREACHABLE(); } - CallKind call_kind = CallICBase::Contextual::decode(extra_state_) + CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_) ? CALL_AS_FUNCTION : CALL_AS_METHOD; __ InvokeFunction(function, arguments(), JUMP_FUNCTION, @@ -2554,11 +2347,11 @@ MaybeObject* CallStubCompiler::CompileCallConstant( // Handle call cache miss. __ bind(&miss); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(function); + return GetCode(function); } @@ -2574,18 +2367,18 @@ MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object, // ----------------------------------- Label miss; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); // Get the number of arguments. const int argc = arguments().immediate(); - LookupResult lookup(isolate()); + LookupResult lookup; LookupPostInterceptor(holder, name, &lookup); // Get the receiver from the stack. __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); - CallInterceptorCompiler compiler(this, arguments(), ecx, extra_state_); + CallInterceptorCompiler compiler(this, arguments(), ecx, extra_ic_state_); MaybeObject* result = compiler.Compile(masm(), object, holder, @@ -2615,7 +2408,7 @@ MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object, // Invoke the function. __ mov(edi, eax); - CallKind call_kind = CallICBase::Contextual::decode(extra_state_) + CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_) ? CALL_AS_FUNCTION : CALL_AS_METHOD; __ InvokeFunction(edi, arguments(), JUMP_FUNCTION, @@ -2623,11 +2416,11 @@ MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object, // Handle load cache miss. __ bind(&miss); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(INTERCEPTOR, name); + return GetCode(INTERCEPTOR, name); } @@ -2656,7 +2449,7 @@ MaybeObject* CallStubCompiler::CompileCallGlobal( Label miss; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); // Get the number of arguments. const int argc = arguments().immediate(); @@ -2677,32 +2470,40 @@ MaybeObject* CallStubCompiler::CompileCallGlobal( // Jump to the cached code (tail call). Counters* counters = isolate()->counters(); __ IncrementCounter(counters->call_global_inline(), 1); + ASSERT(function->is_compiled()); ParameterCount expected(function->shared()->formal_parameter_count()); - CallKind call_kind = CallICBase::Contextual::decode(extra_state_) + CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_) ? CALL_AS_FUNCTION : CALL_AS_METHOD; - // We call indirectly through the code field in the function to - // allow recompilation to take effect without changing any of the - // call sites. - __ InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset), - expected, arguments(), JUMP_FUNCTION, - NullCallWrapper(), call_kind); + if (V8::UseCrankshaft()) { + // TODO(kasperl): For now, we always call indirectly through the + // code field in the function to allow recompilation to take effect + // without changing any of the call sites. + __ InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset), + expected, arguments(), JUMP_FUNCTION, + NullCallWrapper(), call_kind); + } else { + Handle<Code> code(function->code()); + __ InvokeCode(code, expected, arguments(), + RelocInfo::CODE_TARGET, JUMP_FUNCTION, + NullCallWrapper(), call_kind); + } // Handle call cache miss. __ bind(&miss); __ IncrementCounter(counters->call_global_inline_miss(), 1); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(NORMAL, name); + return GetCode(NORMAL, name); } -Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object, +MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object, int index, - Handle<Map> transition, - Handle<String> name) { + Map* transition, + String* name) { // ----------- S t a t e ------------- // -- eax : value // -- ecx : name @@ -2712,23 +2513,27 @@ Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object, Label miss; // Generate store field code. Trashes the name register. - GenerateStoreField(masm(), object, index, transition, edx, ecx, ebx, &miss); + GenerateStoreField(masm(), + object, + index, + transition, + edx, ecx, ebx, + &miss); // Handle store cache miss. __ bind(&miss); - __ mov(ecx, Immediate(name)); // restore name + __ mov(ecx, Immediate(Handle<String>(name))); // restore name Handle<Code> ic = isolate()->builtins()->StoreIC_Miss(); __ jmp(ic, RelocInfo::CODE_TARGET); // Return the generated code. - return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name); + return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name); } -Handle<Code> StoreStubCompiler::CompileStoreCallback( - Handle<JSObject> object, - Handle<AccessorInfo> callback, - Handle<String> name) { +MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object, + AccessorInfo* callback, + String* name) { // ----------- S t a t e ------------- // -- eax : value // -- ecx : name @@ -2756,7 +2561,7 @@ Handle<Code> StoreStubCompiler::CompileStoreCallback( __ pop(ebx); // remove the return address __ push(edx); // receiver - __ push(Immediate(callback)); // callback info + __ push(Immediate(Handle<AccessorInfo>(callback))); // callback info __ push(ecx); // name __ push(eax); // value __ push(ebx); // restore return address @@ -2776,9 +2581,8 @@ Handle<Code> StoreStubCompiler::CompileStoreCallback( } -Handle<Code> StoreStubCompiler::CompileStoreInterceptor( - Handle<JSObject> receiver, - Handle<String> name) { +MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver, + String* name) { // ----------- S t a t e ------------- // -- eax : value // -- ecx : name @@ -2826,10 +2630,9 @@ Handle<Code> StoreStubCompiler::CompileStoreInterceptor( } -Handle<Code> StoreStubCompiler::CompileStoreGlobal( - Handle<GlobalObject> object, - Handle<JSGlobalPropertyCell> cell, - Handle<String> name) { +MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object, + JSGlobalPropertyCell* cell, + String* name) { // ----------- S t a t e ------------- // -- eax : value // -- ecx : name @@ -2844,7 +2647,7 @@ Handle<Code> StoreStubCompiler::CompileStoreGlobal( __ j(not_equal, &miss); // Compute the cell operand to use. - __ mov(ebx, Immediate(cell)); + __ mov(ebx, Immediate(Handle<JSGlobalPropertyCell>(cell))); Operand cell_operand = FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset); // Check that the value in the cell is not the hole. If it is, this @@ -2888,10 +2691,10 @@ Handle<Code> StoreStubCompiler::CompileStoreGlobal( } -Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object, +MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object, int index, - Handle<Map> transition, - Handle<String> name) { + Map* transition, + String* name) { // ----------- S t a t e ------------- // -- eax : value // -- ecx : key @@ -2904,11 +2707,16 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object, __ IncrementCounter(counters->keyed_store_field(), 1); // Check that the name has not changed. - __ cmp(ecx, Immediate(name)); + __ cmp(ecx, Immediate(Handle<String>(name))); __ j(not_equal, &miss); // Generate store field code. Trashes the name register. - GenerateStoreField(masm(), object, index, transition, edx, ecx, ebx, &miss); + GenerateStoreField(masm(), + object, + index, + transition, + edx, ecx, ebx, + &miss); // Handle store cache miss. __ bind(&miss); @@ -2917,37 +2725,40 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object, __ jmp(ic, RelocInfo::CODE_TARGET); // Return the generated code. - return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name); + return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name); } -Handle<Code> KeyedStoreStubCompiler::CompileStoreElement( - Handle<Map> receiver_map) { +MaybeObject* KeyedStoreStubCompiler::CompileStoreElement(Map* receiver_map) { // ----------- S t a t e ------------- // -- eax : value // -- ecx : key // -- edx : receiver // -- esp[0] : return address // ----------------------------------- + Code* stub; ElementsKind elements_kind = receiver_map->elements_kind(); bool is_jsarray = receiver_map->instance_type() == JS_ARRAY_TYPE; - Handle<Code> stub = - KeyedStoreElementStub(is_jsarray, elements_kind).GetCode(); - - __ DispatchMap(edx, receiver_map, stub, DO_SMI_CHECK); + MaybeObject* maybe_stub = + KeyedStoreElementStub(is_jsarray, elements_kind).TryGetCode(); + if (!maybe_stub->To(&stub)) return maybe_stub; + __ DispatchMap(edx, + Handle<Map>(receiver_map), + Handle<Code>(stub), + DO_SMI_CHECK); Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss(); __ jmp(ic, RelocInfo::CODE_TARGET); // Return the generated code. - return GetCode(NORMAL, factory()->empty_string()); + return GetCode(NORMAL, NULL); } -Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic( - MapHandleList* receiver_maps, - CodeHandleList* handler_stubs, - MapHandleList* transitioned_maps) { +MaybeObject* KeyedStoreStubCompiler::CompileStorePolymorphic( + MapList* receiver_maps, + CodeList* handler_stubs, + MapList* transitioned_maps) { // ----------- S t a t e ------------- // -- eax : value // -- ecx : key @@ -2959,14 +2770,15 @@ Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic( __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset)); // ebx: receiver->map(). for (int i = 0; i < receiver_maps->length(); ++i) { - __ cmp(edi, receiver_maps->at(i)); - if (transitioned_maps->at(i).is_null()) { - __ j(equal, handler_stubs->at(i)); + Handle<Map> map(receiver_maps->at(i)); + __ cmp(edi, map); + if (transitioned_maps->at(i) == NULL) { + __ j(equal, Handle<Code>(handler_stubs->at(i))); } else { Label next_map; __ j(not_equal, &next_map, Label::kNear); - __ mov(ebx, Immediate(transitioned_maps->at(i))); - __ jmp(handler_stubs->at(i), RelocInfo::CODE_TARGET); + __ mov(ebx, Immediate(Handle<Map>(transitioned_maps->at(i)))); + __ jmp(Handle<Code>(handler_stubs->at(i)), RelocInfo::CODE_TARGET); __ bind(&next_map); } } @@ -2975,13 +2787,13 @@ Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic( __ jmp(miss_ic, RelocInfo::CODE_TARGET); // Return the generated code. - return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC); + return GetCode(NORMAL, NULL, MEGAMORPHIC); } -Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name, - Handle<JSObject> object, - Handle<JSObject> last) { +MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name, + JSObject* object, + JSObject* last) { // ----------- S t a t e ------------- // -- eax : receiver // -- ecx : name @@ -3002,8 +2814,15 @@ Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name, // If the last object in the prototype chain is a global object, // check that the global property cell is empty. if (last->IsGlobalObject()) { - GenerateCheckPropertyCell( - masm(), Handle<GlobalObject>::cast(last), name, edx, &miss); + MaybeObject* cell = GenerateCheckPropertyCell(masm(), + GlobalObject::cast(last), + name, + edx, + &miss); + if (cell->IsFailure()) { + miss.Unuse(); + return cell; + } } // Return undefined if maps of the full prototype chain are still the @@ -3015,14 +2834,14 @@ Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name, GenerateLoadMiss(masm(), Code::LOAD_IC); // Return the generated code. - return GetCode(NONEXISTENT, factory()->empty_string()); + return GetCode(NONEXISTENT, isolate()->heap()->empty_string()); } -Handle<Code> LoadStubCompiler::CompileLoadField(Handle<JSObject> object, - Handle<JSObject> holder, +MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object, + JSObject* holder, int index, - Handle<String> name) { + String* name) { // ----------- S t a t e ------------- // -- eax : receiver // -- ecx : name @@ -3061,14 +2880,14 @@ MaybeObject* LoadStubCompiler::CompileLoadCallback(String* name, GenerateLoadMiss(masm(), Code::LOAD_IC); // Return the generated code. - return TryGetCode(CALLBACKS, name); + return GetCode(CALLBACKS, name); } -Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object, - Handle<JSObject> holder, - Handle<Object> value, - Handle<String> name) { +MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object, + JSObject* holder, + Object* value, + String* name) { // ----------- S t a t e ------------- // -- eax : receiver // -- ecx : name @@ -3095,7 +2914,7 @@ MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, // ----------------------------------- Label miss; - LookupResult lookup(isolate()); + LookupResult lookup; LookupPostInterceptor(holder, name, &lookup); // TODO(368): Compile in the whole chain: all the interceptors in @@ -3115,16 +2934,15 @@ MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, GenerateLoadMiss(masm(), Code::LOAD_IC); // Return the generated code. - return TryGetCode(INTERCEPTOR, name); + return GetCode(INTERCEPTOR, name); } -Handle<Code> LoadStubCompiler::CompileLoadGlobal( - Handle<JSObject> object, - Handle<GlobalObject> holder, - Handle<JSGlobalPropertyCell> cell, - Handle<String> name, - bool is_dont_delete) { +MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object, + GlobalObject* holder, + JSGlobalPropertyCell* cell, + String* name, + bool is_dont_delete) { // ----------- S t a t e ------------- // -- eax : receiver // -- ecx : name @@ -3135,7 +2953,7 @@ Handle<Code> LoadStubCompiler::CompileLoadGlobal( // If the object is the holder then we know that it's a global // object which can only happen for contextual loads. In this case, // the receiver cannot be a smi. - if (!object.is_identical_to(holder)) { + if (object != holder) { __ JumpIfSmi(eax, &miss); } @@ -3144,10 +2962,10 @@ Handle<Code> LoadStubCompiler::CompileLoadGlobal( // Get the value from the cell. if (Serializer::enabled()) { - __ mov(ebx, Immediate(cell)); + __ mov(ebx, Immediate(Handle<JSGlobalPropertyCell>(cell))); __ mov(ebx, FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset)); } else { - __ mov(ebx, Operand::Cell(cell)); + __ mov(ebx, Operand::Cell(Handle<JSGlobalPropertyCell>(cell))); } // Check for deleted property if property can actually be deleted. @@ -3173,9 +2991,9 @@ Handle<Code> LoadStubCompiler::CompileLoadGlobal( } -Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name, - Handle<JSObject> receiver, - Handle<JSObject> holder, +MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name, + JSObject* receiver, + JSObject* holder, int index) { // ----------- S t a t e ------------- // -- eax : key @@ -3188,7 +3006,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name, __ IncrementCounter(counters->keyed_load_field(), 1); // Check that the name has not changed. - __ cmp(eax, Immediate(name)); + __ cmp(eax, Immediate(Handle<String>(name))); __ j(not_equal, &miss); GenerateLoadField(receiver, holder, edx, ebx, ecx, edi, index, name, &miss); @@ -3234,15 +3052,14 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback( GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); // Return the generated code. - return TryGetCode(CALLBACKS, name); + return GetCode(CALLBACKS, name); } -Handle<Code> KeyedLoadStubCompiler::CompileLoadConstant( - Handle<String> name, - Handle<JSObject> receiver, - Handle<JSObject> holder, - Handle<Object> value) { +MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name, + JSObject* receiver, + JSObject* holder, + Object* value) { // ----------- S t a t e ------------- // -- eax : key // -- edx : receiver @@ -3254,11 +3071,11 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadConstant( __ IncrementCounter(counters->keyed_load_constant_function(), 1); // Check that the name has not changed. - __ cmp(eax, Immediate(name)); + __ cmp(eax, Immediate(Handle<String>(name))); __ j(not_equal, &miss); - GenerateLoadConstant( - receiver, holder, edx, ebx, ecx, edi, value, name, &miss); + GenerateLoadConstant(receiver, holder, edx, ebx, ecx, edi, + value, name, &miss); __ bind(&miss); __ DecrementCounter(counters->keyed_load_constant_function(), 1); GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); @@ -3285,7 +3102,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, __ cmp(eax, Immediate(Handle<String>(name))); __ j(not_equal, &miss); - LookupResult lookup(isolate()); + LookupResult lookup; LookupPostInterceptor(holder, name, &lookup); GenerateLoadInterceptor(receiver, holder, @@ -3302,12 +3119,11 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); // Return the generated code. - return TryGetCode(INTERCEPTOR, name); + return GetCode(INTERCEPTOR, name); } -Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength( - Handle<String> name) { +MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) { // ----------- S t a t e ------------- // -- eax : key // -- edx : receiver @@ -3319,7 +3135,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength( __ IncrementCounter(counters->keyed_load_array_length(), 1); // Check that the name has not changed. - __ cmp(eax, Immediate(name)); + __ cmp(eax, Immediate(Handle<String>(name))); __ j(not_equal, &miss); GenerateLoadArrayLength(masm(), edx, ecx, &miss); @@ -3332,8 +3148,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength( } -Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength( - Handle<String> name) { +MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) { // ----------- S t a t e ------------- // -- eax : key // -- edx : receiver @@ -3345,7 +3160,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength( __ IncrementCounter(counters->keyed_load_string_length(), 1); // Check that the name has not changed. - __ cmp(eax, Immediate(name)); + __ cmp(eax, Immediate(Handle<String>(name))); __ j(not_equal, &miss); GenerateLoadStringLength(masm(), edx, ecx, ebx, &miss, true); @@ -3358,8 +3173,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength( } -Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype( - Handle<String> name) { +MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) { // ----------- S t a t e ------------- // -- eax : key // -- edx : receiver @@ -3371,7 +3185,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype( __ IncrementCounter(counters->keyed_load_function_prototype(), 1); // Check that the name has not changed. - __ cmp(eax, Immediate(name)); + __ cmp(eax, Immediate(Handle<String>(name))); __ j(not_equal, &miss); GenerateLoadFunctionPrototype(masm(), edx, ecx, ebx, &miss); @@ -3384,29 +3198,31 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype( } -Handle<Code> KeyedLoadStubCompiler::CompileLoadElement( - Handle<Map> receiver_map) { +MaybeObject* KeyedLoadStubCompiler::CompileLoadElement(Map* receiver_map) { // ----------- S t a t e ------------- // -- eax : key // -- edx : receiver // -- esp[0] : return address // ----------------------------------- - + Code* stub; ElementsKind elements_kind = receiver_map->elements_kind(); - Handle<Code> stub = KeyedLoadElementStub(elements_kind).GetCode(); - - __ DispatchMap(edx, receiver_map, stub, DO_SMI_CHECK); + MaybeObject* maybe_stub = KeyedLoadElementStub(elements_kind).TryGetCode(); + if (!maybe_stub->To(&stub)) return maybe_stub; + __ DispatchMap(edx, + Handle<Map>(receiver_map), + Handle<Code>(stub), + DO_SMI_CHECK); GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); // Return the generated code. - return GetCode(NORMAL, factory()->empty_string()); + return GetCode(NORMAL, NULL); } -Handle<Code> KeyedLoadStubCompiler::CompileLoadPolymorphic( - MapHandleList* receiver_maps, - CodeHandleList* handler_ics) { +MaybeObject* KeyedLoadStubCompiler::CompileLoadPolymorphic( + MapList* receiver_maps, + CodeList* handler_ics) { // ----------- S t a t e ------------- // -- eax : key // -- edx : receiver @@ -3419,15 +3235,16 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadPolymorphic( __ mov(map_reg, FieldOperand(edx, HeapObject::kMapOffset)); int receiver_count = receiver_maps->length(); for (int current = 0; current < receiver_count; ++current) { - __ cmp(map_reg, receiver_maps->at(current)); - __ j(equal, handler_ics->at(current)); + Handle<Map> map(receiver_maps->at(current)); + __ cmp(map_reg, map); + __ j(equal, Handle<Code>(handler_ics->at(current))); } __ bind(&miss); GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); // Return the generated code. - return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC); + return GetCode(NORMAL, NULL, MEGAMORPHIC); } diff --git a/deps/v8/src/ic.cc b/deps/v8/src/ic.cc index fbe77b09d..d5056a9ce 100644 --- a/deps/v8/src/ic.cc +++ b/deps/v8/src/ic.cc @@ -100,11 +100,7 @@ void IC::TraceIC(const char* type, PrintF("]\n"); } } -#endif // DEBUG - - -#define TRACE_IC(type, name, old_state, new_target) \ - ASSERT((TraceIC(type, name, old_state, new_target), true)) +#endif IC::IC(FrameDepth depth, Isolate* isolate) : isolate_(isolate) { @@ -372,13 +368,15 @@ static bool HasInterceptorGetter(JSObject* object) { } -static void LookupForRead(Handle<Object> object, - Handle<String> name, +static void LookupForRead(Object* object, + String* name, LookupResult* lookup) { + AssertNoAllocation no_gc; // pointers must stay valid + // Skip all the objects with named interceptors, but // without actual getter. while (true) { - object->Lookup(*name, lookup); + object->Lookup(name, lookup); // Besides normal conditions (property not found or it's not // an interceptor), bail out if lookup is not cacheable: we won't // be able to IC it anyway and regular lookup should work fine. @@ -388,18 +386,18 @@ static void LookupForRead(Handle<Object> object, return; } - Handle<JSObject> holder(lookup->holder()); - if (HasInterceptorGetter(*holder)) { + JSObject* holder = lookup->holder(); + if (HasInterceptorGetter(holder)) { return; } - holder->LocalLookupRealNamedProperty(*name, lookup); + holder->LocalLookupRealNamedProperty(name, lookup); if (lookup->IsProperty()) { ASSERT(lookup->type() != INTERCEPTOR); return; } - Handle<Object> proto(holder->GetPrototype()); + Object* proto = holder->GetPrototype(); if (proto->IsNull()) { lookup->NotFound(); return; @@ -410,29 +408,28 @@ static void LookupForRead(Handle<Object> object, } -Handle<Object> CallICBase::TryCallAsFunction(Handle<Object> object) { - Handle<Object> delegate = Execution::GetFunctionDelegate(object); +Object* CallICBase::TryCallAsFunction(Object* object) { + HandleScope scope(isolate()); + Handle<Object> target(object, isolate()); + Handle<Object> delegate = Execution::GetFunctionDelegate(target); - if (delegate->IsJSFunction() && !object->IsJSFunctionProxy()) { + if (delegate->IsJSFunction()) { // Patch the receiver and use the delegate as the function to - // invoke. This is used for invoking objects as if they were functions. - const int argc = target()->arguments_count(); + // invoke. This is used for invoking objects as if they were + // functions. + const int argc = this->target()->arguments_count(); StackFrameLocator locator; JavaScriptFrame* frame = locator.FindJavaScriptFrame(0); int index = frame->ComputeExpressionsCount() - (argc + 1); - frame->SetExpression(index, *object); + frame->SetExpression(index, *target); } - return delegate; + return *delegate; } void CallICBase::ReceiverToObjectIfRequired(Handle<Object> callee, Handle<Object> object) { - while (callee->IsJSFunctionProxy()) { - callee = Handle<Object>(JSFunctionProxy::cast(*callee)->call_trap()); - } - if (callee->IsJSFunction()) { Handle<JSFunction> function = Handle<JSFunction>::cast(callee); if (function->shared()->strict_mode() || function->IsBuiltin()) { @@ -467,27 +464,31 @@ MaybeObject* CallICBase::LoadFunction(State state, // the element if so. uint32_t index; if (name->AsArrayIndex(&index)) { - Handle<Object> result = Object::GetElement(object, index); - RETURN_IF_EMPTY_HANDLE(isolate(), result); - if (result->IsJSFunction()) return *result; + Object* result; + { MaybeObject* maybe_result = object->GetElement(index); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + + if (result->IsJSFunction()) return result; // Try to find a suitable function delegate for the object at hand. result = TryCallAsFunction(result); - if (result->IsJSFunction()) return *result; + if (result->IsJSFunction()) return result; // Otherwise, it will fail in the lookup step. } // Lookup the property in the object. - LookupResult lookup(isolate()); - LookupForRead(object, name, &lookup); + LookupResult lookup; + LookupForRead(*object, *name, &lookup); if (!lookup.IsProperty()) { // If the object does not have the requested property, check which // exception we need to throw. - return IsContextual(object) - ? ReferenceError("not_defined", name) - : TypeError("undefined_method", object, name); + if (IsContextual(object)) { + return ReferenceError("not_defined", name); + } + return TypeError("undefined_method", object, name); } // Lookup is valid: Update inline cache and stub cache. @@ -497,42 +498,53 @@ MaybeObject* CallICBase::LoadFunction(State state, // Get the property. PropertyAttributes attr; - Handle<Object> result = - Object::GetProperty(object, object, &lookup, name, &attr); - RETURN_IF_EMPTY_HANDLE(isolate(), result); + Object* result; + { MaybeObject* maybe_result = + object->GetProperty(*object, &lookup, *name, &attr); + if (!maybe_result->ToObject(&result)) return maybe_result; + } - if (lookup.type() == INTERCEPTOR && attr == ABSENT) { + if (lookup.type() == INTERCEPTOR) { // If the object does not have the requested property, check which // exception we need to throw. - return IsContextual(object) - ? ReferenceError("not_defined", name) - : TypeError("undefined_method", object, name); + if (attr == ABSENT) { + if (IsContextual(object)) { + return ReferenceError("not_defined", name); + } + return TypeError("undefined_method", object, name); + } } ASSERT(!result->IsTheHole()); + HandleScope scope(isolate()); + // Wrap result in a handle because ReceiverToObjectIfRequired may allocate + // new object and cause GC. + Handle<Object> result_handle(result); // Make receiver an object if the callee requires it. Strict mode or builtin // functions do not wrap the receiver, non-strict functions and objects // called as functions do. - ReceiverToObjectIfRequired(result, object); + ReceiverToObjectIfRequired(result_handle, object); - if (result->IsJSFunction()) { - Handle<JSFunction> function = Handle<JSFunction>::cast(result); + if (result_handle->IsJSFunction()) { #ifdef ENABLE_DEBUGGER_SUPPORT // Handle stepping into a function if step into is active. Debug* debug = isolate()->debug(); if (debug->StepInActive()) { // Protect the result in a handle as the debugger can allocate and might // cause GC. + Handle<JSFunction> function(JSFunction::cast(*result_handle), isolate()); debug->HandleStepIn(function, object, fp(), false); + return *function; } #endif - return *function; + + return *result_handle; } // Try to find a suitable function delegate for the object at hand. - result = TryCallAsFunction(result); - if (result->IsJSFunction()) return *result; + result_handle = Handle<Object>(TryCallAsFunction(*result_handle)); + if (result_handle->IsJSFunction()) return *result_handle; return TypeError("property_not_function", object, name); } @@ -582,57 +594,89 @@ bool CallICBase::TryUpdateExtraICState(LookupResult* lookup, } -Handle<Code> CallICBase::ComputeMonomorphicStub(LookupResult* lookup, - State state, - Code::ExtraICState extra_state, - Handle<Object> object, - Handle<String> name) { +MaybeObject* CallICBase::ComputeMonomorphicStub( + LookupResult* lookup, + State state, + Code::ExtraICState extra_ic_state, + Handle<Object> object, + Handle<String> name) { int argc = target()->arguments_count(); - Handle<JSObject> holder(lookup->holder()); + MaybeObject* maybe_code = NULL; switch (lookup->type()) { case FIELD: { int index = lookup->GetFieldIndex(); - return isolate()->stub_cache()->ComputeCallField( - argc, kind_, extra_state, name, object, holder, index); + maybe_code = isolate()->stub_cache()->ComputeCallField(argc, + kind_, + extra_ic_state, + *name, + *object, + lookup->holder(), + index); + break; } case CONSTANT_FUNCTION: { // Get the constant function and compute the code stub for this // call; used for rewriting to monomorphic state and making sure // that the code stub is in the stub cache. - Handle<JSFunction> function(lookup->GetConstantFunction()); - return isolate()->stub_cache()->ComputeCallConstant( - argc, kind_, extra_state, name, object, holder, function); + JSFunction* function = lookup->GetConstantFunction(); + maybe_code = + isolate()->stub_cache()->ComputeCallConstant(argc, + kind_, + extra_ic_state, + *name, + *object, + lookup->holder(), + function); + break; } case NORMAL: { - // If we return a null handle, the IC will not be patched. - if (!object->IsJSObject()) return Handle<Code>::null(); + if (!object->IsJSObject()) return NULL; Handle<JSObject> receiver = Handle<JSObject>::cast(object); - if (holder->IsGlobalObject()) { - Handle<GlobalObject> global = Handle<GlobalObject>::cast(holder); - Handle<JSGlobalPropertyCell> cell(global->GetPropertyCell(lookup)); - if (!cell->value()->IsJSFunction()) return Handle<Code>::null(); - Handle<JSFunction> function(JSFunction::cast(cell->value())); - return isolate()->stub_cache()->ComputeCallGlobal( - argc, kind_, extra_state, name, receiver, global, cell, function); + if (lookup->holder()->IsGlobalObject()) { + GlobalObject* global = GlobalObject::cast(lookup->holder()); + JSGlobalPropertyCell* cell = + JSGlobalPropertyCell::cast(global->GetPropertyCell(lookup)); + if (!cell->value()->IsJSFunction()) return NULL; + JSFunction* function = JSFunction::cast(cell->value()); + maybe_code = isolate()->stub_cache()->ComputeCallGlobal(argc, + kind_, + extra_ic_state, + *name, + *receiver, + global, + cell, + function); } else { // There is only one shared stub for calling normalized // properties. It does not traverse the prototype chain, so the // property must be found in the receiver for the stub to be // applicable. - if (!holder.is_identical_to(receiver)) return Handle<Code>::null(); - return isolate()->stub_cache()->ComputeCallNormal( - argc, kind_, extra_state); + if (lookup->holder() != *receiver) return NULL; + maybe_code = isolate()->stub_cache()->ComputeCallNormal(argc, + kind_, + extra_ic_state, + *name, + *receiver); } break; } - case INTERCEPTOR: - ASSERT(HasInterceptorGetter(*holder)); - return isolate()->stub_cache()->ComputeCallInterceptor( - argc, kind_, extra_state, name, object, holder); + case INTERCEPTOR: { + ASSERT(HasInterceptorGetter(lookup->holder())); + maybe_code = isolate()->stub_cache()->ComputeCallInterceptor( + argc, + kind_, + extra_ic_state, + *name, + *object, + lookup->holder()); + break; + } default: - return Handle<Code>::null(); + maybe_code = NULL; + break; } + return maybe_code; } @@ -654,57 +698,75 @@ void CallICBase::UpdateCaches(LookupResult* lookup, // Compute the number of arguments. int argc = target()->arguments_count(); + MaybeObject* maybe_code = NULL; bool had_proto_failure = false; - Handle<Code> code; if (state == UNINITIALIZED) { // This is the first time we execute this inline cache. // Set the target to the pre monomorphic stub to delay // setting the monomorphic state. - code = isolate()->stub_cache()->ComputeCallPreMonomorphic( - argc, kind_, extra_ic_state); + maybe_code = + isolate()->stub_cache()->ComputeCallPreMonomorphic(argc, + kind_, + extra_ic_state); } else if (state == MONOMORPHIC) { if (kind_ == Code::CALL_IC && TryUpdateExtraICState(lookup, object, &extra_ic_state)) { - code = ComputeMonomorphicStub(lookup, state, extra_ic_state, - object, name); + maybe_code = ComputeMonomorphicStub(lookup, + state, + extra_ic_state, + object, + name); } else if (kind_ == Code::CALL_IC && TryRemoveInvalidPrototypeDependentStub(target(), *object, *name)) { had_proto_failure = true; - code = ComputeMonomorphicStub(lookup, state, extra_ic_state, - object, name); + maybe_code = ComputeMonomorphicStub(lookup, + state, + extra_ic_state, + object, + name); } else { - code = isolate()->stub_cache()->ComputeCallMegamorphic( - argc, kind_, extra_ic_state); + maybe_code = + isolate()->stub_cache()->ComputeCallMegamorphic(argc, + kind_, + extra_ic_state); } } else { - code = ComputeMonomorphicStub(lookup, state, extra_ic_state, - object, name); + maybe_code = ComputeMonomorphicStub(lookup, + state, + extra_ic_state, + object, + name); } - // If there's no appropriate stub we simply avoid updating the caches. - if (code.is_null()) return; + // If we're unable to compute the stub (not enough memory left), we + // simply avoid updating the caches. + Object* code; + if (maybe_code == NULL || !maybe_code->ToObject(&code)) return; // Patch the call site depending on the state of the cache. if (state == UNINITIALIZED || state == PREMONOMORPHIC || state == MONOMORPHIC || state == MONOMORPHIC_PROTOTYPE_FAILURE) { - set_target(*code); + set_target(Code::cast(code)); } else if (state == MEGAMORPHIC) { // Cache code holding map should be consistent with // GenerateMonomorphicCacheProbe. It is not the map which holds the stub. - Handle<JSObject> cache_object = object->IsJSObject() - ? Handle<JSObject>::cast(object) - : Handle<JSObject>(JSObject::cast(object->GetPrototype())); + Map* map = JSObject::cast(object->IsJSObject() ? *object : + object->GetPrototype())->map(); + // Update the stub cache. - isolate()->stub_cache()->Set(*name, cache_object->map(), *code); + isolate()->stub_cache()->Set(*name, map, Code::cast(code)); } + USE(had_proto_failure); +#ifdef DEBUG if (had_proto_failure) state = MONOMORPHIC_PROTOTYPE_FAILURE; - TRACE_IC(kind_ == Code::CALL_IC ? "CallIC" : "KeyedCallIC", - name, state, target()); + TraceIC(kind_ == Code::CALL_IC ? "CallIC" : "KeyedCallIC", + name, state, target()); +#endif } @@ -724,22 +786,34 @@ MaybeObject* KeyedCallIC::LoadFunction(State state, if (FLAG_use_ic && state != MEGAMORPHIC && object->IsHeapObject()) { int argc = target()->arguments_count(); - Handle<Map> map = - isolate()->factory()->non_strict_arguments_elements_map(); + Heap* heap = Handle<HeapObject>::cast(object)->GetHeap(); + Map* map = heap->non_strict_arguments_elements_map(); if (object->IsJSObject() && - Handle<JSObject>::cast(object)->elements()->map() == *map) { - Handle<Code> code = isolate()->stub_cache()->ComputeCallArguments( + Handle<JSObject>::cast(object)->elements()->map() == map) { + MaybeObject* maybe_code = isolate()->stub_cache()->ComputeCallArguments( argc, Code::KEYED_CALL_IC); - set_target(*code); - TRACE_IC("KeyedCallIC", key, state, target()); - } else if (!object->IsAccessCheckNeeded()) { - Handle<Code> code = isolate()->stub_cache()->ComputeCallMegamorphic( + Object* code; + if (maybe_code->ToObject(&code)) { + set_target(Code::cast(code)); +#ifdef DEBUG + TraceIC("KeyedCallIC", key, state, target()); +#endif + } + } else if (FLAG_use_ic && state != MEGAMORPHIC && + !object->IsAccessCheckNeeded()) { + MaybeObject* maybe_code = isolate()->stub_cache()->ComputeCallMegamorphic( argc, Code::KEYED_CALL_IC, Code::kNoExtraICState); - set_target(*code); - TRACE_IC("KeyedCallIC", key, state, target()); + Object* code; + if (maybe_code->ToObject(&code)) { + set_target(Code::cast(code)); +#ifdef DEBUG + TraceIC("KeyedCallIC", key, state, target()); +#endif + } } } + HandleScope scope(isolate()); Handle<Object> result = GetProperty(object, key); RETURN_IF_EMPTY_HANDLE(isolate(), result); @@ -747,9 +821,9 @@ MaybeObject* KeyedCallIC::LoadFunction(State state, // functions do not wrap the receiver, non-strict functions and objects // called as functions do. ReceiverToObjectIfRequired(result, object); - if (result->IsJSFunction()) return *result; - result = TryCallAsFunction(result); + if (result->IsJSFunction()) return *result; + result = Handle<Object>(TryCallAsFunction(*result)); if (result->IsJSFunction()) return *result; return TypeError("property_not_function", object, key); @@ -772,44 +846,53 @@ MaybeObject* LoadIC::Load(State state, // the underlying string value. See ECMA-262 15.5.5.1. if ((object->IsString() || object->IsStringWrapper()) && name->Equals(isolate()->heap()->length_symbol())) { - Handle<Code> stub; + AssertNoAllocation no_allocation; + Code* stub = NULL; if (state == UNINITIALIZED) { stub = pre_monomorphic_stub(); } else if (state == PREMONOMORPHIC) { - stub = object->IsString() - ? isolate()->builtins()->LoadIC_StringLength() - : isolate()->builtins()->LoadIC_StringWrapperLength(); + if (object->IsString()) { + stub = isolate()->builtins()->builtin( + Builtins::kLoadIC_StringLength); + } else { + stub = isolate()->builtins()->builtin( + Builtins::kLoadIC_StringWrapperLength); + } } else if (state == MONOMORPHIC && object->IsStringWrapper()) { - stub = isolate()->builtins()->LoadIC_StringWrapperLength(); + stub = isolate()->builtins()->builtin( + Builtins::kLoadIC_StringWrapperLength); } else if (state != MEGAMORPHIC) { stub = megamorphic_stub(); } - if (!stub.is_null()) { - set_target(*stub); + if (stub != NULL) { + set_target(stub); #ifdef DEBUG if (FLAG_trace_ic) PrintF("[LoadIC : +#length /string]\n"); #endif } // Get the string if we have a string wrapper object. - Handle<Object> string = object->IsJSValue() - ? Handle<Object>(Handle<JSValue>::cast(object)->value()) - : object; - return Smi::FromInt(String::cast(*string)->length()); + if (object->IsJSValue()) { + return Smi::FromInt( + String::cast(Handle<JSValue>::cast(object)->value())->length()); + } + return Smi::FromInt(String::cast(*object)->length()); } // Use specialized code for getting the length of arrays. if (object->IsJSArray() && name->Equals(isolate()->heap()->length_symbol())) { - Handle<Code> stub; + AssertNoAllocation no_allocation; + Code* stub = NULL; if (state == UNINITIALIZED) { stub = pre_monomorphic_stub(); } else if (state == PREMONOMORPHIC) { - stub = isolate()->builtins()->LoadIC_ArrayLength(); + stub = isolate()->builtins()->builtin( + Builtins::kLoadIC_ArrayLength); } else if (state != MEGAMORPHIC) { stub = megamorphic_stub(); } - if (!stub.is_null()) { - set_target(*stub); + if (stub != NULL) { + set_target(stub); #ifdef DEBUG if (FLAG_trace_ic) PrintF("[LoadIC : +#length /array]\n"); #endif @@ -820,20 +903,23 @@ MaybeObject* LoadIC::Load(State state, // Use specialized code for getting prototype of functions. if (object->IsJSFunction() && name->Equals(isolate()->heap()->prototype_symbol()) && - Handle<JSFunction>::cast(object)->should_have_prototype()) { - Handle<Code> stub; - if (state == UNINITIALIZED) { - stub = pre_monomorphic_stub(); - } else if (state == PREMONOMORPHIC) { - stub = isolate()->builtins()->LoadIC_FunctionPrototype(); - } else if (state != MEGAMORPHIC) { - stub = megamorphic_stub(); - } - if (!stub.is_null()) { - set_target(*stub); + JSFunction::cast(*object)->should_have_prototype()) { + { AssertNoAllocation no_allocation; + Code* stub = NULL; + if (state == UNINITIALIZED) { + stub = pre_monomorphic_stub(); + } else if (state == PREMONOMORPHIC) { + stub = isolate()->builtins()->builtin( + Builtins::kLoadIC_FunctionPrototype); + } else if (state != MEGAMORPHIC) { + stub = megamorphic_stub(); + } + if (stub != NULL) { + set_target(stub); #ifdef DEBUG - if (FLAG_trace_ic) PrintF("[LoadIC : +#prototype /function]\n"); + if (FLAG_trace_ic) PrintF("[LoadIC : +#prototype /function]\n"); #endif + } } return Accessors::FunctionGetPrototype(*object, 0); } @@ -845,8 +931,8 @@ MaybeObject* LoadIC::Load(State state, if (name->AsArrayIndex(&index)) return object->GetElement(index); // Named lookup in the object. - LookupResult lookup(isolate()); - LookupForRead(object, name, &lookup); + LookupResult lookup; + LookupForRead(*object, *name, &lookup); // If we did not find a property, check if we need to throw an exception. if (!lookup.IsProperty()) { @@ -865,15 +951,17 @@ MaybeObject* LoadIC::Load(State state, if (lookup.IsProperty() && (lookup.type() == INTERCEPTOR || lookup.type() == HANDLER)) { // Get the property. - Handle<Object> result = - Object::GetProperty(object, object, &lookup, name, &attr); - RETURN_IF_EMPTY_HANDLE(isolate(), result); + Object* result; + { MaybeObject* maybe_result = + object->GetProperty(*object, &lookup, *name, &attr); + if (!maybe_result->ToObject(&result)) return maybe_result; + } // If the property is not present, check if we need to throw an // exception. if (attr == ABSENT && IsContextual(object)) { return ReferenceError("not_defined", name); } - return *result; + return result; } // Get the property. @@ -896,105 +984,128 @@ void LoadIC::UpdateCaches(LookupResult* lookup, if (HasNormalObjectsInPrototypeChain(isolate(), lookup, *object)) return; // Compute the code stub for this load. - Handle<Code> code; + MaybeObject* maybe_code = NULL; + Object* code; if (state == UNINITIALIZED) { // This is the first time we execute this inline cache. // Set the target to the pre monomorphic stub to delay // setting the monomorphic state. - code = pre_monomorphic_stub(); + maybe_code = pre_monomorphic_stub(); } else if (!lookup->IsProperty()) { // Nonexistent property. The result is undefined. - code = isolate()->stub_cache()->ComputeLoadNonexistent(name, receiver); + maybe_code = isolate()->stub_cache()->ComputeLoadNonexistent(*name, + *receiver); } else { // Compute monomorphic stub. - Handle<JSObject> holder(lookup->holder()); switch (lookup->type()) { - case FIELD: - code = isolate()->stub_cache()->ComputeLoadField( - name, receiver, holder, lookup->GetFieldIndex()); + case FIELD: { + maybe_code = isolate()->stub_cache()->ComputeLoadField( + *name, + *receiver, + lookup->holder(), + lookup->GetFieldIndex()); break; + } case CONSTANT_FUNCTION: { - Handle<Object> constant(lookup->GetConstantFunction()); - code = isolate()->stub_cache()->ComputeLoadConstant( - name, receiver, holder, constant); + Object* constant = lookup->GetConstantFunction(); + maybe_code = isolate()->stub_cache()->ComputeLoadConstant( + *name, *receiver, lookup->holder(), constant); break; } - case NORMAL: - if (holder->IsGlobalObject()) { - Handle<GlobalObject> global = Handle<GlobalObject>::cast(holder); - Handle<JSGlobalPropertyCell> cell(global->GetPropertyCell(lookup)); - code = isolate()->stub_cache()->ComputeLoadGlobal( - name, receiver, global, cell, lookup->IsDontDelete()); + case NORMAL: { + if (lookup->holder()->IsGlobalObject()) { + GlobalObject* global = GlobalObject::cast(lookup->holder()); + JSGlobalPropertyCell* cell = + JSGlobalPropertyCell::cast(global->GetPropertyCell(lookup)); + maybe_code = isolate()->stub_cache()->ComputeLoadGlobal(*name, + *receiver, + global, + cell, + lookup->IsDontDelete()); } else { // There is only one shared stub for loading normalized // properties. It does not traverse the prototype chain, so the // property must be found in the receiver for the stub to be // applicable. - if (!holder.is_identical_to(receiver)) return; - code = isolate()->stub_cache()->ComputeLoadNormal(); + if (lookup->holder() != *receiver) return; + maybe_code = isolate()->stub_cache()->ComputeLoadNormal(); } break; + } case CALLBACKS: { - Handle<Object> callback_object(lookup->GetCallbackObject()); - if (!callback_object->IsAccessorInfo()) return; - Handle<AccessorInfo> callback = - Handle<AccessorInfo>::cast(callback_object); + if (!lookup->GetCallbackObject()->IsAccessorInfo()) return; + AccessorInfo* callback = + AccessorInfo::cast(lookup->GetCallbackObject()); if (v8::ToCData<Address>(callback->getter()) == 0) return; - code = isolate()->stub_cache()->ComputeLoadCallback( - name, receiver, holder, callback); + maybe_code = isolate()->stub_cache()->ComputeLoadCallback( + *name, *receiver, lookup->holder(), callback); break; } - case INTERCEPTOR: - ASSERT(HasInterceptorGetter(*holder)); - code = isolate()->stub_cache()->ComputeLoadInterceptor( - name, receiver, holder); + case INTERCEPTOR: { + ASSERT(HasInterceptorGetter(lookup->holder())); + maybe_code = isolate()->stub_cache()->ComputeLoadInterceptor( + *name, *receiver, lookup->holder()); break; + } default: return; } } + // If we're unable to compute the stub (not enough memory left), we + // simply avoid updating the caches. + if (maybe_code == NULL || !maybe_code->ToObject(&code)) return; + // Patch the call site depending on the state of the cache. - if (state == UNINITIALIZED || - state == PREMONOMORPHIC || + if (state == UNINITIALIZED || state == PREMONOMORPHIC || state == MONOMORPHIC_PROTOTYPE_FAILURE) { - set_target(*code); + set_target(Code::cast(code)); } else if (state == MONOMORPHIC) { - set_target(*megamorphic_stub()); + set_target(megamorphic_stub()); } else if (state == MEGAMORPHIC) { // Cache code holding map should be consistent with // GenerateMonomorphicCacheProbe. - isolate()->stub_cache()->Set(*name, receiver->map(), *code); + Map* map = JSObject::cast(object->IsJSObject() ? *object : + object->GetPrototype())->map(); + + isolate()->stub_cache()->Set(*name, map, Code::cast(code)); } - TRACE_IC("LoadIC", name, state, target()); +#ifdef DEBUG + TraceIC("LoadIC", name, state, target()); +#endif } -Handle<Code> KeyedLoadIC::GetElementStubWithoutMapCheck( +MaybeObject* KeyedLoadIC::GetElementStubWithoutMapCheck( bool is_js_array, ElementsKind elements_kind) { - return KeyedLoadElementStub(elements_kind).GetCode(); + return KeyedLoadElementStub(elements_kind).TryGetCode(); } -Handle<Code> KeyedLoadIC::ComputePolymorphicStub( - MapHandleList* receiver_maps, +MaybeObject* KeyedLoadIC::ComputePolymorphicStub( + MapList* receiver_maps, StrictModeFlag strict_mode) { - CodeHandleList handler_ics(receiver_maps->length()); + CodeList handler_ics(receiver_maps->length()); for (int i = 0; i < receiver_maps->length(); ++i) { - Handle<Map> receiver_map = receiver_maps->at(i); - Handle<Code> cached_stub = ComputeMonomorphicStubWithoutMapCheck( + Map* receiver_map(receiver_maps->at(i)); + MaybeObject* maybe_cached_stub = ComputeMonomorphicStubWithoutMapCheck( receiver_map, strict_mode); + Code* cached_stub; + if (!maybe_cached_stub->To(&cached_stub)) return maybe_cached_stub; handler_ics.Add(cached_stub); } - KeyedLoadStubCompiler compiler(isolate()); - Handle<Code> code = compiler.CompileLoadPolymorphic( - receiver_maps, &handler_ics); + Object* object; + KeyedLoadStubCompiler compiler; + MaybeObject* maybe_code = compiler.CompileLoadPolymorphic(receiver_maps, + &handler_ics); + if (!maybe_code->ToObject(&object)) return maybe_code; isolate()->counters()->keyed_load_polymorphic_stubs()->Increment(); - PROFILE(isolate(), - CodeCreateEvent(Logger::KEYED_LOAD_MEGAMORPHIC_IC_TAG, *code, 0)); - return code; + PROFILE(isolate(), CodeCreateEvent( + Logger::KEYED_LOAD_MEGAMORPHIC_IC_TAG, + Code::cast(object), 0)); + return object; } @@ -1004,8 +1115,9 @@ MaybeObject* KeyedLoadIC::Load(State state, bool force_generic_stub) { // Check for values that can be converted into a symbol. // TODO(1295): Remove this code. + HandleScope scope(isolate()); if (key->IsHeapNumber() && - isnan(Handle<HeapNumber>::cast(key)->value())) { + isnan(HeapNumber::cast(*key)->value())) { key = isolate()->factory()->nan_symbol(); } else if (key->IsUndefined()) { key = isolate()->factory()->undefined_symbol(); @@ -1027,11 +1139,16 @@ MaybeObject* KeyedLoadIC::Load(State state, if (object->IsString() && name->Equals(isolate()->heap()->length_symbol())) { Handle<String> string = Handle<String>::cast(object); - Handle<Code> code = - isolate()->stub_cache()->ComputeKeyedLoadStringLength(name, string); - ASSERT(!code.is_null()); - set_target(*code); - TRACE_IC("KeyedLoadIC", name, state, target()); + Object* code = NULL; + { MaybeObject* maybe_code = + isolate()->stub_cache()->ComputeKeyedLoadStringLength(*name, + *string); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + set_target(Code::cast(code)); +#ifdef DEBUG + TraceIC("KeyedLoadIC", name, state, target()); +#endif // DEBUG return Smi::FromInt(string->length()); } @@ -1039,25 +1156,34 @@ MaybeObject* KeyedLoadIC::Load(State state, if (object->IsJSArray() && name->Equals(isolate()->heap()->length_symbol())) { Handle<JSArray> array = Handle<JSArray>::cast(object); - Handle<Code> code = - isolate()->stub_cache()->ComputeKeyedLoadArrayLength(name, array); - ASSERT(!code.is_null()); - set_target(*code); - TRACE_IC("KeyedLoadIC", name, state, target()); - return array->length(); + Object* code; + { MaybeObject* maybe_code = + isolate()->stub_cache()->ComputeKeyedLoadArrayLength(*name, + *array); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + set_target(Code::cast(code)); +#ifdef DEBUG + TraceIC("KeyedLoadIC", name, state, target()); +#endif // DEBUG + return JSArray::cast(*object)->length(); } // Use specialized code for getting prototype of functions. if (object->IsJSFunction() && name->Equals(isolate()->heap()->prototype_symbol()) && - Handle<JSFunction>::cast(object)->should_have_prototype()) { + JSFunction::cast(*object)->should_have_prototype()) { Handle<JSFunction> function = Handle<JSFunction>::cast(object); - Handle<Code> code = - isolate()->stub_cache()->ComputeKeyedLoadFunctionPrototype( - name, function); - ASSERT(!code.is_null()); - set_target(*code); - TRACE_IC("KeyedLoadIC", name, state, target()); + Object* code; + { MaybeObject* maybe_code = + isolate()->stub_cache()->ComputeKeyedLoadFunctionPrototype( + *name, *function); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + set_target(Code::cast(code)); +#ifdef DEBUG + TraceIC("KeyedLoadIC", name, state, target()); +#endif // DEBUG return Accessors::FunctionGetPrototype(*object, 0); } } @@ -1066,14 +1192,15 @@ MaybeObject* KeyedLoadIC::Load(State state, // the element or char if so. uint32_t index = 0; if (name->AsArrayIndex(&index)) { + HandleScope scope(isolate()); // Rewrite to the generic keyed load stub. - if (FLAG_use_ic) set_target(*generic_stub()); + if (FLAG_use_ic) set_target(generic_stub()); return Runtime::GetElementOrCharAt(isolate(), object, index); } // Named lookup. - LookupResult lookup(isolate()); - LookupForRead(object, name, &lookup); + LookupResult lookup; + LookupForRead(*object, *name, &lookup); // If we did not find a property, check if we need to throw an exception. if (!lookup.IsProperty() && IsContextual(object)) { @@ -1087,15 +1214,17 @@ MaybeObject* KeyedLoadIC::Load(State state, PropertyAttributes attr; if (lookup.IsProperty() && lookup.type() == INTERCEPTOR) { // Get the property. - Handle<Object> result = - Object::GetProperty(object, object, &lookup, name, &attr); - RETURN_IF_EMPTY_HANDLE(isolate(), result); + Object* result; + { MaybeObject* maybe_result = + object->GetProperty(*object, &lookup, *name, &attr); + if (!maybe_result->ToObject(&result)) return maybe_result; + } // If the property is not present, check if we need to throw an // exception. if (attr == ABSENT && IsContextual(object)) { return ReferenceError("not_defined", name); } - return *result; + return result; } return object->GetProperty(*object, &lookup, *name, &attr); @@ -1106,38 +1235,44 @@ MaybeObject* KeyedLoadIC::Load(State state, bool use_ic = FLAG_use_ic && !object->IsAccessCheckNeeded(); if (use_ic) { - Handle<Code> stub = generic_stub(); + Code* stub = generic_stub(); if (!force_generic_stub) { if (object->IsString() && key->IsNumber()) { if (state == UNINITIALIZED) { stub = string_stub(); } } else if (object->IsJSObject()) { - Handle<JSObject> receiver = Handle<JSObject>::cast(object); - if (receiver->elements()->map() == - isolate()->heap()->non_strict_arguments_elements_map()) { + JSObject* receiver = JSObject::cast(*object); + Heap* heap = Handle<JSObject>::cast(object)->GetHeap(); + Map* elements_map = Handle<JSObject>::cast(object)->elements()->map(); + if (elements_map == heap->non_strict_arguments_elements_map()) { stub = non_strict_arguments_stub(); } else if (receiver->HasIndexedInterceptor()) { stub = indexed_interceptor_stub(); - } else if (key->IsSmi() && (target() != *non_strict_arguments_stub())) { - stub = ComputeStub(receiver, LOAD, kNonStrictMode, stub); + } else if (key->IsSmi() && (target() != non_strict_arguments_stub())) { + MaybeObject* maybe_stub = ComputeStub(receiver, + LOAD, + kNonStrictMode, + stub); + stub = maybe_stub->IsFailure() ? + NULL : Code::cast(maybe_stub->ToObjectUnchecked()); } } } - if (!stub.is_null()) set_target(*stub); + if (stub != NULL) set_target(stub); } - TRACE_IC("KeyedLoadIC", key, state, target()); +#ifdef DEBUG + TraceIC("KeyedLoadIC", key, state, target()); +#endif // DEBUG // Get the property. return Runtime::GetObjectProperty(isolate(), object, key); } -void KeyedLoadIC::UpdateCaches(LookupResult* lookup, - State state, - Handle<Object> object, - Handle<String> name) { +void KeyedLoadIC::UpdateCaches(LookupResult* lookup, State state, + Handle<Object> object, Handle<String> name) { // Bail out if we didn't find a result. if (!lookup->IsProperty() || !lookup->IsCacheable()) return; @@ -1147,60 +1282,68 @@ void KeyedLoadIC::UpdateCaches(LookupResult* lookup, if (HasNormalObjectsInPrototypeChain(isolate(), lookup, *object)) return; // Compute the code stub for this load. - Handle<Code> code; + MaybeObject* maybe_code = NULL; + Object* code; if (state == UNINITIALIZED) { // This is the first time we execute this inline cache. // Set the target to the pre monomorphic stub to delay // setting the monomorphic state. - code = pre_monomorphic_stub(); + maybe_code = pre_monomorphic_stub(); } else { // Compute a monomorphic stub. - Handle<JSObject> holder(lookup->holder()); switch (lookup->type()) { - case FIELD: - code = isolate()->stub_cache()->ComputeKeyedLoadField( - name, receiver, holder, lookup->GetFieldIndex()); + case FIELD: { + maybe_code = isolate()->stub_cache()->ComputeKeyedLoadField( + *name, *receiver, lookup->holder(), lookup->GetFieldIndex()); break; + } case CONSTANT_FUNCTION: { - Handle<Object> constant(lookup->GetConstantFunction()); - code = isolate()->stub_cache()->ComputeKeyedLoadConstant( - name, receiver, holder, constant); + Object* constant = lookup->GetConstantFunction(); + maybe_code = isolate()->stub_cache()->ComputeKeyedLoadConstant( + *name, *receiver, lookup->holder(), constant); break; } case CALLBACKS: { - Handle<Object> callback_object(lookup->GetCallbackObject()); - if (!callback_object->IsAccessorInfo()) return; - Handle<AccessorInfo> callback = - Handle<AccessorInfo>::cast(callback_object); + if (!lookup->GetCallbackObject()->IsAccessorInfo()) return; + AccessorInfo* callback = + AccessorInfo::cast(lookup->GetCallbackObject()); if (v8::ToCData<Address>(callback->getter()) == 0) return; - code = isolate()->stub_cache()->ComputeKeyedLoadCallback( - name, receiver, holder, callback); + maybe_code = isolate()->stub_cache()->ComputeKeyedLoadCallback( + *name, *receiver, lookup->holder(), callback); break; } - case INTERCEPTOR: + case INTERCEPTOR: { ASSERT(HasInterceptorGetter(lookup->holder())); - code = isolate()->stub_cache()->ComputeKeyedLoadInterceptor( - name, receiver, holder); + maybe_code = isolate()->stub_cache()->ComputeKeyedLoadInterceptor( + *name, *receiver, lookup->holder()); break; - default: + } + default: { // Always rewrite to the generic case so that we do not // repeatedly try to rewrite. - code = generic_stub(); + maybe_code = generic_stub(); break; + } } } + // If we're unable to compute the stub (not enough memory left), we + // simply avoid updating the caches. + if (maybe_code == NULL || !maybe_code->ToObject(&code)) return; + // Patch the call site depending on the state of the cache. Make // sure to always rewrite from monomorphic to megamorphic. ASSERT(state != MONOMORPHIC_PROTOTYPE_FAILURE); if (state == UNINITIALIZED || state == PREMONOMORPHIC) { - set_target(*code); + set_target(Code::cast(code)); } else if (state == MONOMORPHIC) { - set_target(*megamorphic_stub()); + set_target(megamorphic_stub()); } - TRACE_IC("KeyedLoadIC", name, state, target()); +#ifdef DEBUG + TraceIC("KeyedLoadIC", name, state, target()); +#endif } @@ -1216,17 +1359,17 @@ static bool StoreICableLookup(LookupResult* lookup) { } -static bool LookupForWrite(Handle<JSObject> receiver, - Handle<String> name, +static bool LookupForWrite(JSObject* receiver, + String* name, LookupResult* lookup) { - receiver->LocalLookup(*name, lookup); + receiver->LocalLookup(name, lookup); if (!StoreICableLookup(lookup)) { return false; } if (lookup->type() == INTERCEPTOR && receiver->GetNamedInterceptor()->setter()->IsUndefined()) { - receiver->LocalLookupRealNamedProperty(*name, lookup); + receiver->LocalLookupRealNamedProperty(name, lookup); return StoreICableLookup(lookup); } @@ -1258,7 +1401,6 @@ MaybeObject* StoreIC::Store(State state, return TypeError("strict_read_only_property", object, name); } // Ignore other stores where the receiver is not a JSObject. - // TODO(1475): Must check prototype chains of object wrappers. return *value; } @@ -1267,30 +1409,31 @@ MaybeObject* StoreIC::Store(State state, // Check if the given name is an array index. uint32_t index; if (name->AsArrayIndex(&index)) { + HandleScope scope(isolate()); Handle<Object> result = SetElement(receiver, index, value, strict_mode); - RETURN_IF_EMPTY_HANDLE(isolate(), result); + if (result.is_null()) return Failure::Exception(); return *value; } // Use specialized code for setting the length of arrays. if (receiver->IsJSArray() && name->Equals(isolate()->heap()->length_symbol()) - && Handle<JSArray>::cast(receiver)->AllowsSetElementsLength()) { + && JSArray::cast(*receiver)->AllowsSetElementsLength()) { #ifdef DEBUG if (FLAG_trace_ic) PrintF("[StoreIC : +#length /array]\n"); #endif - Handle<Code> stub = (strict_mode == kStrictMode) - ? isolate()->builtins()->StoreIC_ArrayLength_Strict() - : isolate()->builtins()->StoreIC_ArrayLength(); - set_target(*stub); + Builtins::Name target = (strict_mode == kStrictMode) + ? Builtins::kStoreIC_ArrayLength_Strict + : Builtins::kStoreIC_ArrayLength; + set_target(isolate()->builtins()->builtin(target)); return receiver->SetProperty(*name, *value, NONE, strict_mode); } // Lookup the property locally in the receiver. if (FLAG_use_ic && !receiver->IsJSGlobalProxy()) { - LookupResult lookup(isolate()); + LookupResult lookup; - if (LookupForWrite(receiver, name, &lookup)) { + if (LookupForWrite(*receiver, *name, &lookup)) { // Generate a stub for this store. UpdateCaches(&lookup, state, strict_mode, receiver, name, value); } else { @@ -1307,15 +1450,16 @@ MaybeObject* StoreIC::Store(State state, } if (receiver->IsJSGlobalProxy()) { - // TODO(ulan): find out why we patch this site even with --no-use-ic // Generate a generic stub that goes to the runtime when we see a global // proxy as receiver. - Handle<Code> stub = (strict_mode == kStrictMode) + Code* stub = (strict_mode == kStrictMode) ? global_proxy_stub_strict() : global_proxy_stub(); - if (target() != *stub) { - set_target(*stub); - TRACE_IC("StoreIC", name, state, target()); + if (target() != stub) { + set_target(stub); +#ifdef DEBUG + TraceIC("StoreIC", name, state, target()); +#endif } } @@ -1343,82 +1487,89 @@ void StoreIC::UpdateCaches(LookupResult* lookup, // Compute the code stub for this store; used for rewriting to // monomorphic state and making sure that the code stub is in the // stub cache. - Handle<Code> code; + MaybeObject* maybe_code = NULL; + Object* code = NULL; switch (type) { - case FIELD: - code = isolate()->stub_cache()->ComputeStoreField(name, - receiver, - lookup->GetFieldIndex(), - Handle<Map>::null(), - strict_mode); + case FIELD: { + maybe_code = isolate()->stub_cache()->ComputeStoreField( + *name, *receiver, lookup->GetFieldIndex(), NULL, strict_mode); break; + } case MAP_TRANSITION: { if (lookup->GetAttributes() != NONE) return; + HandleScope scope(isolate()); ASSERT(type == MAP_TRANSITION); Handle<Map> transition(lookup->GetTransitionMap()); int index = transition->PropertyIndexFor(*name); - code = isolate()->stub_cache()->ComputeStoreField( - name, receiver, index, transition, strict_mode); + maybe_code = isolate()->stub_cache()->ComputeStoreField( + *name, *receiver, index, *transition, strict_mode); break; } - case NORMAL: + case NORMAL: { if (receiver->IsGlobalObject()) { // The stub generated for the global object picks the value directly // from the property cell. So the property must be directly on the // global object. Handle<GlobalObject> global = Handle<GlobalObject>::cast(receiver); - Handle<JSGlobalPropertyCell> cell(global->GetPropertyCell(lookup)); - code = isolate()->stub_cache()->ComputeStoreGlobal( - name, global, cell, strict_mode); + JSGlobalPropertyCell* cell = + JSGlobalPropertyCell::cast(global->GetPropertyCell(lookup)); + maybe_code = isolate()->stub_cache()->ComputeStoreGlobal( + *name, *global, cell, strict_mode); } else { if (lookup->holder() != *receiver) return; - code = isolate()->stub_cache()->ComputeStoreNormal(strict_mode); + maybe_code = isolate()->stub_cache()->ComputeStoreNormal(strict_mode); } break; + } case CALLBACKS: { - Handle<Object> callback_object(lookup->GetCallbackObject()); - if (!callback_object->IsAccessorInfo()) return; - Handle<AccessorInfo> callback = - Handle<AccessorInfo>::cast(callback_object); + if (!lookup->GetCallbackObject()->IsAccessorInfo()) return; + AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject()); if (v8::ToCData<Address>(callback->setter()) == 0) return; - code = isolate()->stub_cache()->ComputeStoreCallback( - name, receiver, callback, strict_mode); + maybe_code = isolate()->stub_cache()->ComputeStoreCallback( + *name, *receiver, callback, strict_mode); break; } - case INTERCEPTOR: + case INTERCEPTOR: { ASSERT(!receiver->GetNamedInterceptor()->setter()->IsUndefined()); - code = isolate()->stub_cache()->ComputeStoreInterceptor( - name, receiver, strict_mode); + maybe_code = isolate()->stub_cache()->ComputeStoreInterceptor( + *name, *receiver, strict_mode); break; + } default: return; } + // If we're unable to compute the stub (not enough memory left), we + // simply avoid updating the caches. + if (maybe_code == NULL || !maybe_code->ToObject(&code)) return; + // Patch the call site depending on the state of the cache. if (state == UNINITIALIZED || state == MONOMORPHIC_PROTOTYPE_FAILURE) { - set_target(*code); + set_target(Code::cast(code)); } else if (state == MONOMORPHIC) { // Only move to megamorphic if the target changes. - if (target() != *code) { + if (target() != Code::cast(code)) { set_target((strict_mode == kStrictMode) ? megamorphic_stub_strict() : megamorphic_stub()); } } else if (state == MEGAMORPHIC) { // Update the stub cache. - isolate()->stub_cache()->Set(*name, receiver->map(), *code); + isolate()->stub_cache()->Set(*name, + receiver->map(), + Code::cast(code)); } - TRACE_IC("StoreIC", name, state, target()); +#ifdef DEBUG + TraceIC("StoreIC", name, state, target()); +#endif } -static bool AddOneReceiverMapIfMissing(MapHandleList* receiver_maps, - Handle<Map> new_receiver_map) { - ASSERT(!new_receiver_map.is_null()); +static bool AddOneReceiverMapIfMissing(MapList* receiver_maps, + Map* new_receiver_map) { for (int current = 0; current < receiver_maps->length(); ++current) { - if (!receiver_maps->at(current).is_null() && - receiver_maps->at(current).is_identical_to(new_receiver_map)) { + if (receiver_maps->at(current) == new_receiver_map) { return false; } } @@ -1427,40 +1578,45 @@ static bool AddOneReceiverMapIfMissing(MapHandleList* receiver_maps, } -void KeyedIC::GetReceiverMapsForStub(Handle<Code> stub, - MapHandleList* result) { +void KeyedIC::GetReceiverMapsForStub(Code* stub, MapList* result) { ASSERT(stub->is_inline_cache_stub()); - if (!string_stub().is_null() && stub.is_identical_to(string_stub())) { - return result->Add(isolate()->factory()->string_map()); + if (stub == string_stub()) { + return result->Add(isolate()->heap()->string_map()); } else if (stub->is_keyed_load_stub() || stub->is_keyed_store_stub()) { if (stub->ic_state() == MONOMORPHIC) { - result->Add(Handle<Map>(stub->FindFirstMap())); + result->Add(Map::cast(stub->FindFirstMap())); } else { ASSERT(stub->ic_state() == MEGAMORPHIC); AssertNoAllocation no_allocation; int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); - for (RelocIterator it(*stub, mask); !it.done(); it.next()) { + for (RelocIterator it(stub, mask); !it.done(); it.next()) { RelocInfo* info = it.rinfo(); - Handle<Object> object(info->target_object()); + Object* object = info->target_object(); ASSERT(object->IsMap()); - AddOneReceiverMapIfMissing(result, Handle<Map>::cast(object)); + result->Add(Map::cast(object)); } } } } -Handle<Code> KeyedIC::ComputeStub(Handle<JSObject> receiver, +MaybeObject* KeyedIC::ComputeStub(JSObject* receiver, StubKind stub_kind, StrictModeFlag strict_mode, - Handle<Code> generic_stub) { + Code* generic_stub) { State ic_state = target()->ic_state(); if ((ic_state == UNINITIALIZED || ic_state == PREMONOMORPHIC) && !IsTransitionStubKind(stub_kind)) { - return ComputeMonomorphicStub( - receiver, stub_kind, strict_mode, generic_stub); + Code* monomorphic_stub; + MaybeObject* maybe_stub = ComputeMonomorphicStub(receiver, + stub_kind, + strict_mode, + generic_stub); + if (!maybe_stub->To(&monomorphic_stub)) return maybe_stub; + + return monomorphic_stub; } - ASSERT(target() != *generic_stub); + ASSERT(target() != generic_stub); // Don't handle megamorphic property accesses for INTERCEPTORS or CALLBACKS // via megamorphic stubs, since they don't have a map in their relocation info @@ -1471,17 +1627,18 @@ Handle<Code> KeyedIC::ComputeStub(Handle<JSObject> receiver, // Determine the list of receiver maps that this call site has seen, // adding the map that was just encountered. - MapHandleList target_receiver_maps; - Handle<Map> receiver_map(receiver->map()); + MapList target_receiver_maps; if (ic_state == UNINITIALIZED || ic_state == PREMONOMORPHIC) { - target_receiver_maps.Add(receiver_map); + target_receiver_maps.Add(receiver->map()); } else { - GetReceiverMapsForStub(Handle<Code>(target()), &target_receiver_maps); + GetReceiverMapsForStub(target(), &target_receiver_maps); } bool map_added = - AddOneReceiverMapIfMissing(&target_receiver_maps, receiver_map); + AddOneReceiverMapIfMissing(&target_receiver_maps, receiver->map()); if (IsTransitionStubKind(stub_kind)) { - Handle<Map> new_map = ComputeTransitionedMap(receiver, stub_kind); + MaybeObject* maybe_map = ComputeTransitionedMap(receiver, stub_kind); + Map* new_map = NULL; + if (!maybe_map->To(&new_map)) return maybe_map; map_added |= AddOneReceiverMapIfMissing(&target_receiver_maps, new_map); } if (!map_added) { @@ -1496,24 +1653,31 @@ Handle<Code> KeyedIC::ComputeStub(Handle<JSObject> receiver, return generic_stub; } - Handle<PolymorphicCodeCache> cache = - isolate()->factory()->polymorphic_code_cache(); - Code::Flags flags = Code::ComputeFlags(kind(), MEGAMORPHIC, strict_mode); - Handle<Object> probe = cache->Lookup(&target_receiver_maps, flags); - if (probe->IsCode()) return Handle<Code>::cast(probe); - - Handle<Code> stub = + PolymorphicCodeCache* cache = isolate()->heap()->polymorphic_code_cache(); + Code::Flags flags = Code::ComputeFlags(this->kind(), + MEGAMORPHIC, + strict_mode); + Object* maybe_cached_stub = cache->Lookup(&target_receiver_maps, flags); + // If there is a cached stub, use it. + if (!maybe_cached_stub->IsUndefined()) { + ASSERT(maybe_cached_stub->IsCode()); + return Code::cast(maybe_cached_stub); + } + MaybeObject* maybe_stub = ComputePolymorphicStub(&target_receiver_maps, strict_mode); - PolymorphicCodeCache::Update(cache, &target_receiver_maps, flags, stub); + Code* stub; + if (!maybe_stub->To(&stub)) return maybe_stub; + MaybeObject* maybe_update = cache->Update(&target_receiver_maps, flags, stub); + if (maybe_update->IsFailure()) return maybe_update; return stub; } -Handle<Code> KeyedIC::ComputeMonomorphicStubWithoutMapCheck( - Handle<Map> receiver_map, +MaybeObject* KeyedIC::ComputeMonomorphicStubWithoutMapCheck( + Map* receiver_map, StrictModeFlag strict_mode) { if ((receiver_map->instance_type() & kNotStringTag) == 0) { - ASSERT(!string_stub().is_null()); + ASSERT(string_stub() != NULL); return string_stub(); } else { ASSERT(receiver_map->has_dictionary_elements() || @@ -1528,78 +1692,137 @@ Handle<Code> KeyedIC::ComputeMonomorphicStubWithoutMapCheck( } -Handle<Code> KeyedIC::ComputeMonomorphicStub(Handle<JSObject> receiver, +MaybeObject* KeyedIC::ComputeMonomorphicStub(JSObject* receiver, StubKind stub_kind, StrictModeFlag strict_mode, - Handle<Code> generic_stub) { + Code* generic_stub) { + Code* result = NULL; if (receiver->HasFastElements() || receiver->HasFastSmiOnlyElements() || receiver->HasExternalArrayElements() || receiver->HasFastDoubleElements() || receiver->HasDictionaryElements()) { - return isolate()->stub_cache()->ComputeKeyedLoadOrStoreElement( - receiver, stub_kind, strict_mode); + MaybeObject* maybe_stub = + isolate()->stub_cache()->ComputeKeyedLoadOrStoreElement( + receiver, stub_kind, strict_mode); + if (!maybe_stub->To(&result)) return maybe_stub; } else { - return generic_stub; + result = generic_stub; } + return result; } -Handle<Map> KeyedIC::ComputeTransitionedMap(Handle<JSObject> receiver, - StubKind stub_kind) { +MaybeObject* KeyedIC::ComputeTransitionedMap(JSObject* receiver, + StubKind stub_kind) { switch (stub_kind) { case KeyedIC::STORE_TRANSITION_SMI_TO_OBJECT: case KeyedIC::STORE_TRANSITION_DOUBLE_TO_OBJECT: - return JSObject::GetElementsTransitionMap(receiver, FAST_ELEMENTS); - break; + return receiver->GetElementsTransitionMap(FAST_ELEMENTS); case KeyedIC::STORE_TRANSITION_SMI_TO_DOUBLE: - return JSObject::GetElementsTransitionMap(receiver, FAST_DOUBLE_ELEMENTS); - break; + return receiver->GetElementsTransitionMap(FAST_DOUBLE_ELEMENTS); default: UNREACHABLE(); - return Handle<Map>::null(); + return NULL; } } -Handle<Code> KeyedStoreIC::GetElementStubWithoutMapCheck( +MaybeObject* KeyedStoreIC::GetElementStubWithoutMapCheck( bool is_js_array, ElementsKind elements_kind) { - return KeyedStoreElementStub(is_js_array, elements_kind).GetCode(); + return KeyedStoreElementStub(is_js_array, elements_kind).TryGetCode(); +} + + +// If |map| is contained in |maps_list|, returns |map|; otherwise returns NULL. +Map* GetMapIfPresent(Map* map, MapList* maps_list) { + for (int i = 0; i < maps_list->length(); ++i) { + if (maps_list->at(i) == map) return map; + } + return NULL; +} + + +// Returns the most generic transitioned map for |map| that's found in +// |maps_list|, or NULL if no transitioned map for |map| is found at all. +Map* GetTransitionedMap(Map* map, MapList* maps_list) { + ElementsKind elements_kind = map->elements_kind(); + if (elements_kind == FAST_ELEMENTS) { + return NULL; + } + if (elements_kind == FAST_DOUBLE_ELEMENTS) { + bool dummy = true; + Map* fast_map = map->LookupElementsTransitionMap(FAST_ELEMENTS, &dummy); + if (fast_map == NULL) return NULL; + return GetMapIfPresent(fast_map, maps_list); + } + if (elements_kind == FAST_SMI_ONLY_ELEMENTS) { + bool dummy = true; + Map* double_map = map->LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, + &dummy); + // In the current implementation, if the DOUBLE map doesn't exist, the + // FAST map can't exist either. + if (double_map == NULL) return NULL; + Map* fast_map = map->LookupElementsTransitionMap(FAST_ELEMENTS, &dummy); + if (fast_map == NULL) { + return GetMapIfPresent(double_map, maps_list); + } + // Both double_map and fast_map are non-NULL. Return fast_map if it's in + // maps_list, double_map otherwise. + Map* fast_map_present = GetMapIfPresent(fast_map, maps_list); + if (fast_map_present != NULL) return fast_map_present; + return GetMapIfPresent(double_map, maps_list); + } + return NULL; } -Handle<Code> KeyedStoreIC::ComputePolymorphicStub(MapHandleList* receiver_maps, - StrictModeFlag strict_mode) { +MaybeObject* KeyedStoreIC::ComputePolymorphicStub( + MapList* receiver_maps, + StrictModeFlag strict_mode) { + // TODO(yangguo): <remove> + Code* generic_stub = (strict_mode == kStrictMode) + ? isolate()->builtins()->builtin(Builtins::kKeyedStoreIC_Generic_Strict) + : isolate()->builtins()->builtin(Builtins::kKeyedStoreIC_Generic); + // </remove> + // Collect MONOMORPHIC stubs for all target_receiver_maps. - CodeHandleList handler_ics(receiver_maps->length()); - MapHandleList transitioned_maps(receiver_maps->length()); + CodeList handler_ics(receiver_maps->length()); + MapList transitioned_maps(receiver_maps->length()); for (int i = 0; i < receiver_maps->length(); ++i) { - Handle<Map> receiver_map(receiver_maps->at(i)); - Handle<Code> cached_stub; - Handle<Map> transitioned_map = - receiver_map->FindTransitionedMap(receiver_maps); - if (!transitioned_map.is_null()) { - cached_stub = ElementsTransitionAndStoreStub( - receiver_map->elements_kind(), // original elements_kind - transitioned_map->elements_kind(), - receiver_map->instance_type() == JS_ARRAY_TYPE, // is_js_array - strict_mode).GetCode(); + Map* receiver_map(receiver_maps->at(i)); + MaybeObject* maybe_cached_stub = NULL; + Map* transitioned_map = GetTransitionedMap(receiver_map, receiver_maps); + if (transitioned_map != NULL) { + // TODO(yangguo): Enable this code! + // maybe_cached_stub = FastElementsConversionStub( + // receiver_map->elements_kind(), // original elements_kind + // transitioned_map->elements_kind(), + // receiver_map->instance_type() == JS_ARRAY_TYPE, // is_js_array + // strict_mode_).TryGetCode(); + // TODO(yangguo): <remove> + maybe_cached_stub = generic_stub; + // </remove> } else { - cached_stub = ComputeMonomorphicStubWithoutMapCheck(receiver_map, - strict_mode); + maybe_cached_stub = ComputeMonomorphicStubWithoutMapCheck( + receiver_map, strict_mode); } - ASSERT(!cached_stub.is_null()); + Code* cached_stub; + if (!maybe_cached_stub->To(&cached_stub)) return maybe_cached_stub; handler_ics.Add(cached_stub); transitioned_maps.Add(transitioned_map); } - KeyedStoreStubCompiler compiler(isolate(), strict_mode); - Handle<Code> code = compiler.CompileStorePolymorphic( + Object* object; + KeyedStoreStubCompiler compiler(strict_mode); + MaybeObject* maybe_code = compiler.CompileStorePolymorphic( receiver_maps, &handler_ics, &transitioned_maps); + if (!maybe_code->ToObject(&object)) return maybe_code; isolate()->counters()->keyed_store_polymorphic_stubs()->Increment(); - PROFILE(isolate(), - CodeCreateEvent(Logger::KEYED_STORE_MEGAMORPHIC_IC_TAG, *code, 0)); - return code; + PROFILE(isolate(), CodeCreateEvent( + Logger::KEYED_STORE_MEGAMORPHIC_IC_TAG, + Code::cast(object), 0)); + return object; } @@ -1612,12 +1835,6 @@ MaybeObject* KeyedStoreIC::Store(State state, if (key->IsSymbol()) { Handle<String> name = Handle<String>::cast(key); - // Handle proxies. - if (object->IsJSProxy()) { - return JSProxy::cast(*object)->SetProperty( - *name, *value, NONE, strict_mode); - } - // If the object is undefined or null it's illegal to try to set any // properties on it; throw a TypeError in that case. if (object->IsUndefined() || object->IsNull()) { @@ -1631,13 +1848,14 @@ MaybeObject* KeyedStoreIC::Store(State state, // Check if the given name is an array index. uint32_t index; if (name->AsArrayIndex(&index)) { + HandleScope scope(isolate()); Handle<Object> result = SetElement(receiver, index, value, strict_mode); - RETURN_IF_EMPTY_HANDLE(isolate(), result); + if (result.is_null()) return Failure::Exception(); return *value; } // Lookup the property locally in the receiver. - LookupResult lookup(isolate()); + LookupResult lookup; receiver->LocalLookup(*name, &lookup); // Update inline cache and stub cache. @@ -1655,16 +1873,17 @@ MaybeObject* KeyedStoreIC::Store(State state, ASSERT(!(use_ic && object->IsJSGlobalProxy())); if (use_ic) { - Handle<Code> stub = (strict_mode == kStrictMode) + Code* stub = (strict_mode == kStrictMode) ? generic_stub_strict() : generic_stub(); if (object->IsJSObject()) { - Handle<JSObject> receiver = Handle<JSObject>::cast(object); - if (receiver->elements()->map() == - isolate()->heap()->non_strict_arguments_elements_map()) { + JSObject* receiver = JSObject::cast(*object); + Heap* heap = Handle<JSObject>::cast(object)->GetHeap(); + Map* elements_map = Handle<JSObject>::cast(object)->elements()->map(); + if (elements_map == heap->non_strict_arguments_elements_map()) { stub = non_strict_arguments_stub(); } else if (!force_generic) { - if (key->IsSmi() && (target() != *non_strict_arguments_stub())) { + if (key->IsSmi() && (target() != non_strict_arguments_stub())) { StubKind stub_kind = STORE_NO_TRANSITION; if (receiver->GetElementsKind() == FAST_SMI_ONLY_ELEMENTS) { if (value->IsHeapNumber()) { @@ -1677,14 +1896,22 @@ MaybeObject* KeyedStoreIC::Store(State state, stub_kind = STORE_TRANSITION_DOUBLE_TO_OBJECT; } } - stub = ComputeStub(receiver, stub_kind, strict_mode, stub); + HandleScope scope(isolate()); + MaybeObject* maybe_stub = ComputeStub(receiver, + stub_kind, + strict_mode, + stub); + stub = maybe_stub->IsFailure() ? + NULL : Code::cast(maybe_stub->ToObjectUnchecked()); } } } - if (!stub.is_null()) set_target(*stub); + if (stub != NULL) set_target(stub); } - TRACE_IC("KeyedStoreIC", key, state, target()); +#ifdef DEBUG + TraceIC("KeyedStoreIC", key, state, target()); +#endif // Set the property. return Runtime::SetObjectProperty( @@ -1716,60 +1943,75 @@ void KeyedStoreIC::UpdateCaches(LookupResult* lookup, // Compute the code stub for this store; used for rewriting to // monomorphic state and making sure that the code stub is in the // stub cache. - Handle<Code> code; + MaybeObject* maybe_code = NULL; + Object* code = NULL; switch (type) { - case FIELD: - code = isolate()->stub_cache()->ComputeKeyedStoreField( - name, receiver, lookup->GetFieldIndex(), - Handle<Map>::null(), strict_mode); + case FIELD: { + maybe_code = isolate()->stub_cache()->ComputeKeyedStoreField( + *name, *receiver, lookup->GetFieldIndex(), NULL, strict_mode); break; - case MAP_TRANSITION: + } + case MAP_TRANSITION: { if (lookup->GetAttributes() == NONE) { + HandleScope scope(isolate()); ASSERT(type == MAP_TRANSITION); Handle<Map> transition(lookup->GetTransitionMap()); int index = transition->PropertyIndexFor(*name); - code = isolate()->stub_cache()->ComputeKeyedStoreField( - name, receiver, index, transition, strict_mode); + maybe_code = isolate()->stub_cache()->ComputeKeyedStoreField( + *name, *receiver, index, *transition, strict_mode); break; } // fall through. - default: + } + default: { // Always rewrite to the generic case so that we do not // repeatedly try to rewrite. - code = (strict_mode == kStrictMode) + maybe_code = (strict_mode == kStrictMode) ? generic_stub_strict() : generic_stub(); break; + } } - ASSERT(!code.is_null()); + // If we're unable to compute the stub (not enough memory left), we + // simply avoid updating the caches. + if (maybe_code == NULL || !maybe_code->ToObject(&code)) return; // Patch the call site depending on the state of the cache. Make // sure to always rewrite from monomorphic to megamorphic. ASSERT(state != MONOMORPHIC_PROTOTYPE_FAILURE); if (state == UNINITIALIZED || state == PREMONOMORPHIC) { - set_target(*code); + set_target(Code::cast(code)); } else if (state == MONOMORPHIC) { set_target((strict_mode == kStrictMode) - ? *megamorphic_stub_strict() - : *megamorphic_stub()); + ? megamorphic_stub_strict() + : megamorphic_stub()); } - TRACE_IC("KeyedStoreIC", name, state, target()); +#ifdef DEBUG + TraceIC("KeyedStoreIC", name, state, target()); +#endif } -#undef TRACE_IC - - // ---------------------------------------------------------------------------- // Static IC stub generators. // +static JSFunction* CompileFunction(Isolate* isolate, + JSFunction* function) { + // Compile now with optimization. + HandleScope scope(isolate); + Handle<JSFunction> function_handle(function, isolate); + CompileLazy(function_handle, CLEAR_EXCEPTION); + return *function_handle; +} + + // Used from ic-<arch>.cc. RUNTIME_FUNCTION(MaybeObject*, CallIC_Miss) { - HandleScope scope(isolate); + NoHandleAllocation na; ASSERT(args.length() == 2); CallIC ic(isolate); IC::State state = IC::StateFrom(ic.target(), args[0], args[1]); @@ -1778,46 +2020,45 @@ RUNTIME_FUNCTION(MaybeObject*, CallIC_Miss) { extra_ic_state, args.at<Object>(0), args.at<String>(1)); - // Result could be a function or a failure. - JSFunction* raw_function = NULL; - if (!maybe_result->To(&raw_function)) return maybe_result; + Object* result; + if (!maybe_result->ToObject(&result)) return maybe_result; // The first time the inline cache is updated may be the first time the - // function it references gets called. If the function is lazily compiled + // function it references gets called. If the function was lazily compiled // then the first call will trigger a compilation. We check for this case // and we do the compilation immediately, instead of waiting for the stub - // currently attached to the JSFunction object to trigger compilation. - if (raw_function->is_compiled()) return raw_function; - - Handle<JSFunction> function(raw_function); - JSFunction::CompileLazy(function, CLEAR_EXCEPTION); - return *function; + // currently attached to the JSFunction object to trigger compilation. We + // do this in the case where we know that the inline cache is inside a loop, + // because then we know that we want to optimize the function. + if (!result->IsJSFunction() || JSFunction::cast(result)->is_compiled()) { + return result; + } + return CompileFunction(isolate, JSFunction::cast(result)); } // Used from ic-<arch>.cc. RUNTIME_FUNCTION(MaybeObject*, KeyedCallIC_Miss) { - HandleScope scope(isolate); + NoHandleAllocation na; ASSERT(args.length() == 2); KeyedCallIC ic(isolate); IC::State state = IC::StateFrom(ic.target(), args[0], args[1]); - MaybeObject* maybe_result = + Object* result; + { MaybeObject* maybe_result = ic.LoadFunction(state, args.at<Object>(0), args.at<Object>(1)); - // Result could be a function or a failure. - JSFunction* raw_function = NULL; - if (!maybe_result->To(&raw_function)) return maybe_result; - - if (raw_function->is_compiled()) return raw_function; + if (!maybe_result->ToObject(&result)) return maybe_result; + } - Handle<JSFunction> function(raw_function); - JSFunction::CompileLazy(function, CLEAR_EXCEPTION); - return *function; + if (!result->IsJSFunction() || JSFunction::cast(result)->is_compiled()) { + return result; + } + return CompileFunction(isolate, JSFunction::cast(result)); } // Used from ic-<arch>.cc. RUNTIME_FUNCTION(MaybeObject*, LoadIC_Miss) { - HandleScope scope(isolate); + NoHandleAllocation na; ASSERT(args.length() == 2); LoadIC ic(isolate); IC::State state = IC::StateFrom(ic.target(), args[0], args[1]); @@ -1827,7 +2068,7 @@ RUNTIME_FUNCTION(MaybeObject*, LoadIC_Miss) { // Used from ic-<arch>.cc RUNTIME_FUNCTION(MaybeObject*, KeyedLoadIC_Miss) { - HandleScope scope(isolate); + NoHandleAllocation na; ASSERT(args.length() == 2); KeyedLoadIC ic(isolate); IC::State state = IC::StateFrom(ic.target(), args[0], args[1]); @@ -1836,7 +2077,7 @@ RUNTIME_FUNCTION(MaybeObject*, KeyedLoadIC_Miss) { RUNTIME_FUNCTION(MaybeObject*, KeyedLoadIC_MissForceGeneric) { - HandleScope scope(isolate); + NoHandleAllocation na; ASSERT(args.length() == 2); KeyedLoadIC ic(isolate); IC::State state = IC::StateFrom(ic.target(), args[0], args[1]); @@ -1846,7 +2087,7 @@ RUNTIME_FUNCTION(MaybeObject*, KeyedLoadIC_MissForceGeneric) { // Used from ic-<arch>.cc. RUNTIME_FUNCTION(MaybeObject*, StoreIC_Miss) { - HandleScope scope; + NoHandleAllocation na; ASSERT(args.length() == 3); StoreIC ic(isolate); IC::State state = IC::StateFrom(ic.target(), args[0], args[1]); @@ -1915,7 +2156,7 @@ RUNTIME_FUNCTION(MaybeObject*, SharedStoreIC_ExtendStorage) { // Used from ic-<arch>.cc. RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_Miss) { - HandleScope scope(isolate); + NoHandleAllocation na; ASSERT(args.length() == 3); KeyedStoreIC ic(isolate); IC::State state = IC::StateFrom(ic.target(), args[0], args[1]); @@ -1949,7 +2190,7 @@ RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_Slow) { RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_MissForceGeneric) { - HandleScope scope(isolate); + NoHandleAllocation na; ASSERT(args.length() == 3); KeyedStoreIC ic(isolate); IC::State state = IC::StateFrom(ic.target(), args[0], args[1]); diff --git a/deps/v8/src/ic.h b/deps/v8/src/ic.h index 81aa6b7c2..ca8447eb8 100644 --- a/deps/v8/src/ic.h +++ b/deps/v8/src/ic.h @@ -198,60 +198,47 @@ class CallICBase: public IC { class Contextual: public BitField<bool, 0, 1> {}; class StringStubState: public BitField<StringStubFeedback, 1, 1> {}; - // Returns a JSFunction or a Failure. + protected: + CallICBase(Code::Kind kind, Isolate* isolate) + : IC(EXTRA_CALL_FRAME, isolate), kind_(kind) {} + + public: MUST_USE_RESULT MaybeObject* LoadFunction(State state, Code::ExtraICState extra_ic_state, Handle<Object> object, Handle<String> name); protected: - CallICBase(Code::Kind kind, Isolate* isolate) - : IC(EXTRA_CALL_FRAME, isolate), kind_(kind) {} + Code::Kind kind_; bool TryUpdateExtraICState(LookupResult* lookup, Handle<Object> object, Code::ExtraICState* extra_ic_state); - // Compute a monomorphic stub if possible, otherwise return a null handle. - Handle<Code> ComputeMonomorphicStub(LookupResult* lookup, - State state, - Code::ExtraICState extra_state, - Handle<Object> object, - Handle<String> name); + MUST_USE_RESULT MaybeObject* ComputeMonomorphicStub( + LookupResult* lookup, + State state, + Code::ExtraICState extra_ic_state, + Handle<Object> object, + Handle<String> name); - // Update the inline cache and the global stub cache based on the lookup - // result. + // Update the inline cache and the global stub cache based on the + // lookup result. void UpdateCaches(LookupResult* lookup, State state, Code::ExtraICState extra_ic_state, Handle<Object> object, Handle<String> name); - // Returns a JSFunction if the object can be called as a function, and - // patches the stack to be ready for the call. Otherwise, it returns the - // undefined value. - Handle<Object> TryCallAsFunction(Handle<Object> object); + // Returns a JSFunction if the object can be called as a function, + // and patches the stack to be ready for the call. + // Otherwise, it returns the undefined value. + Object* TryCallAsFunction(Object* object); void ReceiverToObjectIfRequired(Handle<Object> callee, Handle<Object> object); static void Clear(Address address, Code* target); - // Platform-specific code generation functions used by both call and - // keyed call. - static void GenerateMiss(MacroAssembler* masm, - int argc, - IC::UtilityId id, - Code::ExtraICState extra_state); - - static void GenerateNormal(MacroAssembler* masm, int argc); - - static void GenerateMonomorphicCacheProbe(MacroAssembler* masm, - int argc, - Code::Kind kind, - Code::ExtraICState extra_state); - - Code::Kind kind_; - friend class IC; }; @@ -265,24 +252,16 @@ class CallIC: public CallICBase { // Code generator routines. static void GenerateInitialize(MacroAssembler* masm, int argc, - Code::ExtraICState extra_state) { - GenerateMiss(masm, argc, extra_state); + Code::ExtraICState extra_ic_state) { + GenerateMiss(masm, argc, extra_ic_state); } - static void GenerateMiss(MacroAssembler* masm, int argc, - Code::ExtraICState extra_state) { - CallICBase::GenerateMiss(masm, argc, IC::kCallIC_Miss, extra_state); - } - + Code::ExtraICState extra_ic_state); static void GenerateMegamorphic(MacroAssembler* masm, int argc, Code::ExtraICState extra_ic_state); - - static void GenerateNormal(MacroAssembler* masm, int argc) { - CallICBase::GenerateNormal(masm, argc); - GenerateMiss(masm, argc, Code::kNoExtraICState); - } + static void GenerateNormal(MacroAssembler* masm, int argc); }; @@ -301,12 +280,7 @@ class KeyedCallIC: public CallICBase { static void GenerateInitialize(MacroAssembler* masm, int argc) { GenerateMiss(masm, argc); } - - static void GenerateMiss(MacroAssembler* masm, int argc) { - CallICBase::GenerateMiss(masm, argc, IC::kKeyedCallIC_Miss, - Code::kNoExtraICState); - } - + static void GenerateMiss(MacroAssembler* masm, int argc); static void GenerateMegamorphic(MacroAssembler* masm, int argc); static void GenerateNormal(MacroAssembler* masm, int argc); static void GenerateNonStrictArguments(MacroAssembler* masm, int argc); @@ -347,15 +321,17 @@ class LoadIC: public IC { Handle<String> name); // Stub accessors. - Handle<Code> megamorphic_stub() { - return isolate()->builtins()->LoadIC_Megamorphic(); + Code* megamorphic_stub() { + return isolate()->builtins()->builtin( + Builtins::kLoadIC_Megamorphic); } static Code* initialize_stub() { return Isolate::Current()->builtins()->builtin( Builtins::kLoadIC_Initialize); } - Handle<Code> pre_monomorphic_stub() { - return isolate()->builtins()->LoadIC_PreMonomorphic(); + Code* pre_monomorphic_stub() { + return isolate()->builtins()->builtin( + Builtins::kLoadIC_PreMonomorphic); } static void Clear(Address address, Code* target); @@ -376,39 +352,38 @@ class KeyedIC: public IC { explicit KeyedIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) {} virtual ~KeyedIC() {} - virtual Handle<Code> GetElementStubWithoutMapCheck( + virtual MaybeObject* GetElementStubWithoutMapCheck( bool is_js_array, ElementsKind elements_kind) = 0; protected: - virtual Handle<Code> string_stub() { - return Handle<Code>::null(); + virtual Code* string_stub() { + return NULL; } virtual Code::Kind kind() const = 0; - Handle<Code> ComputeStub(Handle<JSObject> receiver, + MaybeObject* ComputeStub(JSObject* receiver, StubKind stub_kind, StrictModeFlag strict_mode, - Handle<Code> default_stub); + Code* default_stub); - virtual Handle<Code> ComputePolymorphicStub(MapHandleList* receiver_maps, + virtual MaybeObject* ComputePolymorphicStub(MapList* receiver_maps, StrictModeFlag strict_mode) = 0; - Handle<Code> ComputeMonomorphicStubWithoutMapCheck( - Handle<Map> receiver_map, + MaybeObject* ComputeMonomorphicStubWithoutMapCheck( + Map* receiver_map, StrictModeFlag strict_mode); private: - void GetReceiverMapsForStub(Handle<Code> stub, MapHandleList* result); + void GetReceiverMapsForStub(Code* stub, MapList* result); - Handle<Code> ComputeMonomorphicStub(Handle<JSObject> receiver, + MaybeObject* ComputeMonomorphicStub(JSObject* receiver, StubKind stub_kind, StrictModeFlag strict_mode, - Handle<Code> default_stub); + Code* default_stub); - Handle<Map> ComputeTransitionedMap(Handle<JSObject> receiver, - StubKind stub_kind); + MaybeObject* ComputeTransitionedMap(JSObject* receiver, StubKind stub_kind); static bool IsTransitionStubKind(StubKind stub_kind) { return stub_kind > STORE_NO_TRANSITION; @@ -448,18 +423,20 @@ class KeyedLoadIC: public KeyedIC { static const int kSlowCaseBitFieldMask = (1 << Map::kIsAccessCheckNeeded) | (1 << Map::kHasIndexedInterceptor); - virtual Handle<Code> GetElementStubWithoutMapCheck( + virtual MaybeObject* GetElementStubWithoutMapCheck( bool is_js_array, ElementsKind elements_kind); protected: virtual Code::Kind kind() const { return Code::KEYED_LOAD_IC; } - virtual Handle<Code> ComputePolymorphicStub(MapHandleList* receiver_maps, - StrictModeFlag strict_mode); + virtual MaybeObject* ComputePolymorphicStub( + MapList* receiver_maps, + StrictModeFlag strict_mode); - virtual Handle<Code> string_stub() { - return isolate()->builtins()->KeyedLoadIC_String(); + virtual Code* string_stub() { + return isolate()->builtins()->builtin( + Builtins::kKeyedLoadIC_String); } private: @@ -474,20 +451,25 @@ class KeyedLoadIC: public KeyedIC { return Isolate::Current()->builtins()->builtin( Builtins::kKeyedLoadIC_Initialize); } - Handle<Code> megamorphic_stub() { - return isolate()->builtins()->KeyedLoadIC_Generic(); + Code* megamorphic_stub() { + return isolate()->builtins()->builtin( + Builtins::kKeyedLoadIC_Generic); } - Handle<Code> generic_stub() { - return isolate()->builtins()->KeyedLoadIC_Generic(); + Code* generic_stub() { + return isolate()->builtins()->builtin( + Builtins::kKeyedLoadIC_Generic); } - Handle<Code> pre_monomorphic_stub() { - return isolate()->builtins()->KeyedLoadIC_PreMonomorphic(); + Code* pre_monomorphic_stub() { + return isolate()->builtins()->builtin( + Builtins::kKeyedLoadIC_PreMonomorphic); } - Handle<Code> indexed_interceptor_stub() { - return isolate()->builtins()->KeyedLoadIC_IndexedInterceptor(); + Code* indexed_interceptor_stub() { + return isolate()->builtins()->builtin( + Builtins::kKeyedLoadIC_IndexedInterceptor); } - Handle<Code> non_strict_arguments_stub() { - return isolate()->builtins()->KeyedLoadIC_NonStrictArguments(); + Code* non_strict_arguments_stub() { + return isolate()->builtins()->builtin( + Builtins::kKeyedLoadIC_NonStrictArguments); } static void Clear(Address address, Code* target); @@ -552,11 +534,13 @@ class StoreIC: public IC { return Isolate::Current()->builtins()->builtin( Builtins::kStoreIC_Initialize_Strict); } - Handle<Code> global_proxy_stub() { - return isolate()->builtins()->StoreIC_GlobalProxy(); + Code* global_proxy_stub() { + return isolate()->builtins()->builtin( + Builtins::kStoreIC_GlobalProxy); } - Handle<Code> global_proxy_stub_strict() { - return isolate()->builtins()->StoreIC_GlobalProxy_Strict(); + Code* global_proxy_stub_strict() { + return isolate()->builtins()->builtin( + Builtins::kStoreIC_GlobalProxy_Strict); } static void Clear(Address address, Code* target); @@ -588,18 +572,17 @@ class KeyedStoreIC: public KeyedIC { StrictModeFlag strict_mode); static void GenerateGeneric(MacroAssembler* masm, StrictModeFlag strict_mode); static void GenerateNonStrictArguments(MacroAssembler* masm); - static void GenerateTransitionElementsSmiToDouble(MacroAssembler* masm); - static void GenerateTransitionElementsDoubleToObject(MacroAssembler* masm); - virtual Handle<Code> GetElementStubWithoutMapCheck( + virtual MaybeObject* GetElementStubWithoutMapCheck( bool is_js_array, ElementsKind elements_kind); protected: virtual Code::Kind kind() const { return Code::KEYED_STORE_IC; } - virtual Handle<Code> ComputePolymorphicStub(MapHandleList* receiver_maps, - StrictModeFlag strict_mode); + virtual MaybeObject* ComputePolymorphicStub( + MapList* receiver_maps, + StrictModeFlag strict_mode); private: // Update the inline cache. @@ -622,24 +605,29 @@ class KeyedStoreIC: public KeyedIC { return Isolate::Current()->builtins()->builtin( Builtins::kKeyedStoreIC_Initialize); } + Code* megamorphic_stub() { + return isolate()->builtins()->builtin( + Builtins::kKeyedStoreIC_Generic); + } static Code* initialize_stub_strict() { return Isolate::Current()->builtins()->builtin( Builtins::kKeyedStoreIC_Initialize_Strict); } - Handle<Code> megamorphic_stub() { - return isolate()->builtins()->KeyedStoreIC_Generic(); - } - Handle<Code> megamorphic_stub_strict() { - return isolate()->builtins()->KeyedStoreIC_Generic_Strict(); + Code* megamorphic_stub_strict() { + return isolate()->builtins()->builtin( + Builtins::kKeyedStoreIC_Generic_Strict); } - Handle<Code> generic_stub() { - return isolate()->builtins()->KeyedStoreIC_Generic(); + Code* generic_stub() { + return isolate()->builtins()->builtin( + Builtins::kKeyedStoreIC_Generic); } - Handle<Code> generic_stub_strict() { - return isolate()->builtins()->KeyedStoreIC_Generic_Strict(); + Code* generic_stub_strict() { + return isolate()->builtins()->builtin( + Builtins::kKeyedStoreIC_Generic_Strict); } - Handle<Code> non_strict_arguments_stub() { - return isolate()->builtins()->KeyedStoreIC_NonStrictArguments(); + Code* non_strict_arguments_stub() { + return isolate()->builtins()->builtin( + Builtins::kKeyedStoreIC_NonStrictArguments); } static void Clear(Address address, Code* target); diff --git a/deps/v8/src/incremental-marking-inl.h b/deps/v8/src/incremental-marking-inl.h index 2a7fba756..43fe0f553 100644 --- a/deps/v8/src/incremental-marking-inl.h +++ b/deps/v8/src/incremental-marking-inl.h @@ -143,6 +143,9 @@ void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj, MarkBit mark_bit) { void IncrementalMarking::WhiteToGrey(HeapObject* obj, MarkBit mark_bit) { + ASSERT(Marking::MarkBitFrom(obj) == mark_bit); + ASSERT(obj->Size() >= 2*kPointerSize); + ASSERT(IsMarking()); Marking::WhiteToGrey(mark_bit); } diff --git a/deps/v8/src/incremental-marking.cc b/deps/v8/src/incremental-marking.cc index 68b830a4d..88ebd783e 100644 --- a/deps/v8/src/incremental-marking.cc +++ b/deps/v8/src/incremental-marking.cc @@ -50,8 +50,7 @@ IncrementalMarking::IncrementalMarking(Heap* heap) steps_took_since_last_gc_(0), should_hurry_(false), allocation_marking_factor_(0), - allocated_(0), - no_marking_scope_depth_(0) { + allocated_(0) { } @@ -88,16 +87,6 @@ void IncrementalMarking::RecordWriteForEvacuationFromCode(HeapObject* obj, } -void IncrementalMarking::RecordCodeTargetPatch(Code* host, - Address pc, - HeapObject* value) { - if (IsMarking()) { - RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host); - RecordWriteIntoCode(host, &rinfo, value); - } -} - - void IncrementalMarking::RecordCodeTargetPatch(Address pc, HeapObject* value) { if (IsMarking()) { Code* host = heap_->isolate()->inner_pointer_to_code_cache()-> @@ -354,8 +343,7 @@ bool IncrementalMarking::WorthActivating() { static const intptr_t kActivationThreshold = 0; #endif - return !FLAG_expose_gc && - FLAG_incremental_marking && + return FLAG_incremental_marking && !Serializer::enabled() && heap_->PromotedSpaceSize() > kActivationThreshold; } @@ -473,9 +461,7 @@ void IncrementalMarking::StartMarking(CompactionFlag flag) { #ifdef DEBUG // Marking bits are cleared by the sweeper. - if (FLAG_verify_heap) { - heap_->mark_compact_collector()->VerifyMarkbitsAreClean(); - } + heap_->mark_compact_collector()->VerifyMarkbitsAreClean(); #endif heap_->CompletelyClearInstanceofCache(); @@ -706,8 +692,6 @@ void IncrementalMarking::Step(intptr_t allocated_bytes) { if (allocated_ < kAllocatedThreshold) return; - if (state_ == MARKING && no_marking_scope_depth_ > 0) return; - intptr_t bytes_to_process = allocated_ * allocation_marking_factor_; double start = 0; @@ -755,8 +739,8 @@ void IncrementalMarking::Step(intptr_t allocated_bytes) { } MarkBit obj_mark_bit = Marking::MarkBitFrom(obj); - SLOW_ASSERT(Marking::IsGrey(obj_mark_bit) || - (obj->IsFiller() && Marking::IsWhite(obj_mark_bit))); + ASSERT(Marking::IsGrey(obj_mark_bit) || + (obj->IsFiller() && Marking::IsWhite(obj_mark_bit))); Marking::MarkBlack(obj_mark_bit); MemoryChunk::IncrementLiveBytes(obj->address(), size); } diff --git a/deps/v8/src/incremental-marking.h b/deps/v8/src/incremental-marking.h index fa7337b78..d1627bcba 100644 --- a/deps/v8/src/incremental-marking.h +++ b/deps/v8/src/incremental-marking.h @@ -127,7 +127,6 @@ class IncrementalMarking { inline void RecordWriteIntoCode(HeapObject* obj, RelocInfo* rinfo, Object* value); - void RecordCodeTargetPatch(Code* host, Address pc, HeapObject* value); void RecordCodeTargetPatch(Address pc, HeapObject* value); void RecordWriteOfCodeEntry(JSFunction* host, Object** slot, Code* value); @@ -198,14 +197,6 @@ class IncrementalMarking { } } - void EnterNoMarkingScope() { - no_marking_scope_depth_++; - } - - void LeaveNoMarkingScope() { - no_marking_scope_depth_--; - } - private: void set_should_hurry(bool val) { should_hurry_ = val; @@ -257,8 +248,6 @@ class IncrementalMarking { int allocation_marking_factor_; intptr_t allocated_; - int no_marking_scope_depth_; - DISALLOW_IMPLICIT_CONSTRUCTORS(IncrementalMarking); }; diff --git a/deps/v8/src/interpreter-irregexp.cc b/deps/v8/src/interpreter-irregexp.cc index b337e8845..796a447e2 100644 --- a/deps/v8/src/interpreter-irregexp.cc +++ b/deps/v8/src/interpreter-irregexp.cc @@ -33,9 +33,9 @@ #include "utils.h" #include "ast.h" #include "bytecodes-irregexp.h" -#include "jsregexp.h" #include "interpreter-irregexp.h" + namespace v8 { namespace internal { @@ -187,12 +187,12 @@ class BacktrackStack { template <typename Char> -static RegExpImpl::IrregexpResult RawMatch(Isolate* isolate, - const byte* code_base, - Vector<const Char> subject, - int* registers, - int current, - uint32_t current_char) { +static bool RawMatch(Isolate* isolate, + const byte* code_base, + Vector<const Char> subject, + int* registers, + int current, + uint32_t current_char) { const byte* pc = code_base; // BacktrackStack ensures that the memory allocated for the backtracking stack // is returned to the system or cached if there is no stack being cached at @@ -211,24 +211,24 @@ static RegExpImpl::IrregexpResult RawMatch(Isolate* isolate, switch (insn & BYTECODE_MASK) { BYTECODE(BREAK) UNREACHABLE(); - return RegExpImpl::RE_FAILURE; + return false; BYTECODE(PUSH_CP) if (--backtrack_stack_space < 0) { - return RegExpImpl::RE_EXCEPTION; + return false; // No match on backtrack stack overflow. } *backtrack_sp++ = current; pc += BC_PUSH_CP_LENGTH; break; BYTECODE(PUSH_BT) if (--backtrack_stack_space < 0) { - return RegExpImpl::RE_EXCEPTION; + return false; // No match on backtrack stack overflow. } *backtrack_sp++ = Load32Aligned(pc + 4); pc += BC_PUSH_BT_LENGTH; break; BYTECODE(PUSH_REGISTER) if (--backtrack_stack_space < 0) { - return RegExpImpl::RE_EXCEPTION; + return false; // No match on backtrack stack overflow. } *backtrack_sp++ = registers[insn >> BYTECODE_SHIFT]; pc += BC_PUSH_REGISTER_LENGTH; @@ -278,9 +278,9 @@ static RegExpImpl::IrregexpResult RawMatch(Isolate* isolate, pc += BC_POP_REGISTER_LENGTH; break; BYTECODE(FAIL) - return RegExpImpl::RE_FAILURE; + return false; BYTECODE(SUCCEED) - return RegExpImpl::RE_SUCCESS; + return true; BYTECODE(ADVANCE_CP) current += insn >> BYTECODE_SHIFT; pc += BC_ADVANCE_CP_LENGTH; @@ -625,12 +625,11 @@ static RegExpImpl::IrregexpResult RawMatch(Isolate* isolate, } -RegExpImpl::IrregexpResult IrregexpInterpreter::Match( - Isolate* isolate, - Handle<ByteArray> code_array, - Handle<String> subject, - int* registers, - int start_position) { +bool IrregexpInterpreter::Match(Isolate* isolate, + Handle<ByteArray> code_array, + Handle<String> subject, + int* registers, + int start_position) { ASSERT(subject->IsFlat()); AssertNoAllocation a; diff --git a/deps/v8/src/interpreter-irregexp.h b/deps/v8/src/interpreter-irregexp.h index 0f45d9820..076f0c508 100644 --- a/deps/v8/src/interpreter-irregexp.h +++ b/deps/v8/src/interpreter-irregexp.h @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2008 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -36,11 +36,11 @@ namespace internal { class IrregexpInterpreter { public: - static RegExpImpl::IrregexpResult Match(Isolate* isolate, - Handle<ByteArray> code, - Handle<String> subject, - int* captures, - int start_position); + static bool Match(Isolate* isolate, + Handle<ByteArray> code, + Handle<String> subject, + int* captures, + int start_position); }; diff --git a/deps/v8/src/isolate.cc b/deps/v8/src/isolate.cc index a073af9c3..492694e60 100644 --- a/deps/v8/src/isolate.cc +++ b/deps/v8/src/isolate.cc @@ -98,7 +98,6 @@ void ThreadLocalTop::InitializeInternal() { failed_access_check_callback_ = NULL; save_context_ = NULL; catcher_ = NULL; - top_lookup_result_ = NULL; // These members are re-initialized later after deserialization // is complete. @@ -481,9 +480,6 @@ void Isolate::Iterate(ObjectVisitor* v, ThreadLocalTop* thread) { for (StackFrameIterator it(this, thread); !it.done(); it.Advance()) { it.frame()->Iterate(v); } - - // Iterate pointers in live lookup results. - thread->top_lookup_result_->Iterate(v); } @@ -1072,16 +1068,6 @@ void Isolate::DoThrow(MaybeObject* exception, MessageLocation* location) { message_obj = MessageHandler::MakeMessageObject("uncaught_exception", location, HandleVector<Object>(&exception_handle, 1), stack_trace, stack_trace_object); - } else if (location != NULL && !location->script().is_null()) { - // We are bootstrapping and caught an error where the location is set - // and we have a script for the location. - // In this case we could have an extension (or an internal error - // somewhere) and we print out the line number at which the error occured - // to the console for easier debugging. - int line_number = GetScriptLineNumberSafe(location->script(), - location->start_pos()); - OS::PrintError("Extension or internal compilation error at line %d.\n", - line_number); } } diff --git a/deps/v8/src/isolate.h b/deps/v8/src/isolate.h index 5453bf249..01ab04e60 100644 --- a/deps/v8/src/isolate.h +++ b/deps/v8/src/isolate.h @@ -255,9 +255,6 @@ class ThreadLocalTop BASE_EMBEDDED { // Call back function to report unsafe JS accesses. v8::FailedAccessCheckCallback failed_access_check_callback_; - // Head of the list of live LookupResults. - LookupResult* top_lookup_result_; - // Whether out of memory exceptions should be ignored. bool ignore_out_of_memory_; @@ -314,6 +311,7 @@ class HashMap; V(int, bad_char_shift_table, kUC16AlphabetSize) \ V(int, good_suffix_shift_table, (kBMMaxShift + 1)) \ V(int, suffix_table, (kBMMaxShift + 1)) \ + V(uint32_t, random_seed, 2) \ V(uint32_t, private_random_seed, 2) \ ISOLATE_INIT_DEBUG_ARRAY_LIST(V) @@ -997,13 +995,6 @@ class Isolate { void SetData(void* data) { embedder_data_ = data; } void* GetData() { return embedder_data_; } - LookupResult* top_lookup_result() { - return thread_local_top_.top_lookup_result_; - } - void SetTopLookupResult(LookupResult* top) { - thread_local_top_.top_lookup_result_ = top; - } - private: Isolate(); diff --git a/deps/v8/src/jsregexp.cc b/deps/v8/src/jsregexp.cc index 18ff2570e..c1a9e067c 100644 --- a/deps/v8/src/jsregexp.cc +++ b/deps/v8/src/jsregexp.cc @@ -509,16 +509,14 @@ RegExpImpl::IrregexpResult RegExpImpl::IrregexpExecOnce( } Handle<ByteArray> byte_codes(IrregexpByteCode(*irregexp, is_ascii), isolate); - IrregexpResult result = IrregexpInterpreter::Match(isolate, - byte_codes, - subject, - register_vector, - index); - if (result == RE_EXCEPTION) { - ASSERT(!isolate->has_pending_exception()); - isolate->StackOverflow(); - } - return result; + if (IrregexpInterpreter::Match(isolate, + byte_codes, + subject, + register_vector, + index)) { + return RE_SUCCESS; + } + return RE_FAILURE; #endif // V8_INTERPRETED_REGEXP } diff --git a/deps/v8/src/list-inl.h b/deps/v8/src/list-inl.h index e2c358cee..80bccc9bc 100644 --- a/deps/v8/src/list-inl.h +++ b/deps/v8/src/list-inl.h @@ -216,11 +216,11 @@ int SortedListBSearch( int mid = (low + high) / 2; T mid_elem = list[mid]; - if (cmp(&mid_elem, &elem) > 0) { + if (mid_elem > elem) { high = mid - 1; continue; } - if (cmp(&mid_elem, &elem) < 0) { + if (mid_elem < elem) { low = mid + 1; continue; } @@ -236,7 +236,6 @@ int SortedListBSearch(const List<T>& list, T elem) { return SortedListBSearch<T>(list, elem, PointerValueCompare<T>); } - } } // namespace v8::internal #endif // V8_LIST_INL_H_ diff --git a/deps/v8/src/list.h b/deps/v8/src/list.h index 57504e075..055870904 100644 --- a/deps/v8/src/list.h +++ b/deps/v8/src/list.h @@ -165,11 +165,8 @@ class List { class Map; class Code; -template<typename T> class Handle; typedef List<Map*> MapList; typedef List<Code*> CodeList; -typedef List<Handle<Map> > MapHandleList; -typedef List<Handle<Code> > CodeHandleList; // Perform binary search for an element in an already sorted // list. Returns the index of the element of -1 if it was not found. @@ -179,7 +176,6 @@ int SortedListBSearch( template <typename T> int SortedListBSearch(const List<T>& list, T elem); - } } // namespace v8::internal diff --git a/deps/v8/src/liveobjectlist.cc b/deps/v8/src/liveobjectlist.cc index 408e2a316..d62c4d176 100644 --- a/deps/v8/src/liveobjectlist.cc +++ b/deps/v8/src/liveobjectlist.cc @@ -1085,7 +1085,7 @@ void LiveObjectList::SortAll() { static int CountHeapObjects() { int count = 0; // Iterate over all the heap spaces and count the number of objects. - HeapIterator iterator; + HeapIterator iterator(HeapIterator::kFilterFreeListNodes); HeapObject* heap_obj = NULL; while ((heap_obj = iterator.next()) != NULL) { count++; @@ -1122,7 +1122,7 @@ MaybeObject* LiveObjectList::Capture() { // allocation, and we need allocate below. { // Iterate over all the heap spaces and add the objects. - HeapIterator iterator; + HeapIterator iterator(HeapIterator::kFilterFreeListNodes); HeapObject* heap_obj = NULL; bool failed = false; while (!failed && (heap_obj = iterator.next()) != NULL) { @@ -2513,7 +2513,7 @@ void LiveObjectList::Verify(bool match_heap_exactly) { OS::Print(" Start verify ...\n"); OS::Print(" Verifying ..."); Flush(); - HeapIterator iterator; + HeapIterator iterator(HeapIterator::kFilterFreeListNodes); HeapObject* heap_obj = NULL; while ((heap_obj = iterator.next()) != NULL) { number_of_heap_objects++; diff --git a/deps/v8/src/macros.py b/deps/v8/src/macros.py index a42e83c60..7a493ca70 100644 --- a/deps/v8/src/macros.py +++ b/deps/v8/src/macros.py @@ -128,11 +128,6 @@ macro IS_SPEC_OBJECT(arg) = (%_IsSpecObject(arg)); # we cannot handle those anyway. macro IS_SPEC_FUNCTION(arg) = (%_ClassOf(arg) === 'Function'); -# Indices in bound function info retrieved by %BoundFunctionGetBindings(...). -const kBoundFunctionIndex = 0; -const kBoundThisIndex = 1; -const kBoundArgumentsStartIndex = 2; - # Inline macros. Use %IS_VAR to make sure arg is evaluated only once. macro NUMBER_IS_NAN(arg) = (!%_IsSmi(%IS_VAR(arg)) && !(arg == arg)); macro NUMBER_IS_FINITE(arg) = (%_IsSmi(%IS_VAR(arg)) || ((arg == arg) && (arg != 1/0) && (arg != -1/0))); diff --git a/deps/v8/src/mark-compact-inl.h b/deps/v8/src/mark-compact-inl.h index 573715e28..20f11a78a 100644 --- a/deps/v8/src/mark-compact-inl.h +++ b/deps/v8/src/mark-compact-inl.h @@ -38,7 +38,7 @@ namespace internal { MarkBit Marking::MarkBitFrom(Address addr) { - MemoryChunk* p = MemoryChunk::FromAddress(addr); + MemoryChunk *p = MemoryChunk::FromAddress(addr); return p->markbits()->MarkBitFromIndex(p->AddressToMarkbitIndex(addr), p->ContainsOnlyData()); } @@ -54,6 +54,9 @@ void MarkCompactCollector::MarkObject(HeapObject* obj, MarkBit mark_bit) { if (!mark_bit.Get()) { mark_bit.Set(); MemoryChunk::IncrementLiveBytes(obj->address(), obj->Size()); +#ifdef DEBUG + UpdateLiveObjectCount(obj); +#endif ProcessNewlyMarkedObject(obj); } } @@ -64,6 +67,9 @@ void MarkCompactCollector::SetMark(HeapObject* obj, MarkBit mark_bit) { ASSERT(Marking::MarkBitFrom(obj) == mark_bit); mark_bit.Set(); MemoryChunk::IncrementLiveBytes(obj->address(), obj->Size()); +#ifdef DEBUG + UpdateLiveObjectCount(obj); +#endif } diff --git a/deps/v8/src/mark-compact.cc b/deps/v8/src/mark-compact.cc index b41b03367..9fa79ca74 100644 --- a/deps/v8/src/mark-compact.cc +++ b/deps/v8/src/mark-compact.cc @@ -65,6 +65,16 @@ MarkCompactCollector::MarkCompactCollector() : // NOLINT collect_maps_(FLAG_collect_maps), tracer_(NULL), migration_slots_buffer_(NULL), +#ifdef DEBUG + live_young_objects_size_(0), + live_old_pointer_objects_size_(0), + live_old_data_objects_size_(0), + live_code_objects_size_(0), + live_map_objects_size_(0), + live_cell_objects_size_(0), + live_lo_objects_size_(0), + live_bytes_(0), +#endif heap_(NULL), code_flusher_(NULL), encountered_weak_maps_(NULL) { } @@ -320,7 +330,7 @@ void MarkCompactCollector::VerifyMarkbitsAreClean() { #endif -static void ClearMarkbitsInPagedSpace(PagedSpace* space) { +static void ClearMarkbits(PagedSpace* space) { PageIterator it(space); while (it.has_next()) { @@ -329,7 +339,7 @@ static void ClearMarkbitsInPagedSpace(PagedSpace* space) { } -static void ClearMarkbitsInNewSpace(NewSpace* space) { +static void ClearMarkbits(NewSpace* space) { NewSpacePageIterator it(space->ToSpaceStart(), space->ToSpaceEnd()); while (it.has_next()) { @@ -338,15 +348,15 @@ static void ClearMarkbitsInNewSpace(NewSpace* space) { } -void MarkCompactCollector::ClearMarkbits() { - ClearMarkbitsInPagedSpace(heap_->code_space()); - ClearMarkbitsInPagedSpace(heap_->map_space()); - ClearMarkbitsInPagedSpace(heap_->old_pointer_space()); - ClearMarkbitsInPagedSpace(heap_->old_data_space()); - ClearMarkbitsInPagedSpace(heap_->cell_space()); - ClearMarkbitsInNewSpace(heap_->new_space()); +static void ClearMarkbits(Heap* heap) { + ClearMarkbits(heap->code_space()); + ClearMarkbits(heap->map_space()); + ClearMarkbits(heap->old_pointer_space()); + ClearMarkbits(heap->old_data_space()); + ClearMarkbits(heap->cell_space()); + ClearMarkbits(heap->new_space()); - LargeObjectIterator it(heap_->lo_space()); + LargeObjectIterator it(heap->lo_space()); for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { MarkBit mark_bit = Marking::MarkBitFrom(obj); mark_bit.Clear(); @@ -494,7 +504,7 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) { // Clear marking bits for precise sweeping to collect all garbage. if (was_marked_incrementally_ && PreciseSweepingRequired()) { heap()->incremental_marking()->Abort(); - ClearMarkbits(); + ClearMarkbits(heap_); AbortCompaction(); was_marked_incrementally_ = false; } @@ -513,10 +523,21 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) { } #ifdef DEBUG - if (!was_marked_incrementally_ && FLAG_verify_heap) { + if (!was_marked_incrementally_) { VerifyMarkbitsAreClean(); } #endif + +#ifdef DEBUG + live_bytes_ = 0; + live_young_objects_size_ = 0; + live_old_pointer_objects_size_ = 0; + live_old_data_objects_size_ = 0; + live_code_objects_size_ = 0; + live_map_objects_size_ = 0; + live_cell_objects_size_ = 0; + live_lo_objects_size_ = 0; +#endif } @@ -2155,6 +2176,32 @@ void MarkCompactCollector::ProcessMapCaches() { } +#ifdef DEBUG +void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) { + live_bytes_ += obj->Size(); + if (heap()->new_space()->Contains(obj)) { + live_young_objects_size_ += obj->Size(); + } else if (heap()->map_space()->Contains(obj)) { + ASSERT(obj->IsMap()); + live_map_objects_size_ += obj->Size(); + } else if (heap()->cell_space()->Contains(obj)) { + ASSERT(obj->IsJSGlobalPropertyCell()); + live_cell_objects_size_ += obj->Size(); + } else if (heap()->old_pointer_space()->Contains(obj)) { + live_old_pointer_objects_size_ += obj->Size(); + } else if (heap()->old_data_space()->Contains(obj)) { + live_old_data_objects_size_ += obj->Size(); + } else if (heap()->code_space()->Contains(obj)) { + live_code_objects_size_ += obj->Size(); + } else if (heap()->lo_space()->Contains(obj)) { + live_lo_objects_size_ += obj->Size(); + } else { + UNREACHABLE(); + } +} +#endif // DEBUG + + void MarkCompactCollector::ReattachInitialMaps() { HeapObjectIterator map_iterator(heap()->map_space()); for (HeapObject* obj = map_iterator.Next(); @@ -3602,6 +3649,8 @@ void MarkCompactCollector::SweepSpaces() { // of the previous ones. SweepSpace(heap()->map_space(), PRECISE); + ASSERT(live_map_objects_size_ <= heap()->map_space()->Size()); + // Deallocate unmarked objects and clear marked bits for marked objects. heap_->lo_space()->FreeUnmarkedObjects(); } diff --git a/deps/v8/src/mark-compact.h b/deps/v8/src/mark-compact.h index 254f175b6..d54d82249 100644 --- a/deps/v8/src/mark-compact.h +++ b/deps/v8/src/mark-compact.h @@ -61,52 +61,68 @@ class Marking { // Impossible markbits: 01 static const char* kImpossibleBitPattern; static inline bool IsImpossible(MarkBit mark_bit) { + ASSERT(strcmp(kImpossibleBitPattern, "01") == 0); return !mark_bit.Get() && mark_bit.Next().Get(); } // Black markbits: 10 - this is required by the sweeper. static const char* kBlackBitPattern; static inline bool IsBlack(MarkBit mark_bit) { + ASSERT(strcmp(kBlackBitPattern, "10") == 0); + ASSERT(!IsImpossible(mark_bit)); return mark_bit.Get() && !mark_bit.Next().Get(); } // White markbits: 00 - this is required by the mark bit clearer. static const char* kWhiteBitPattern; static inline bool IsWhite(MarkBit mark_bit) { + ASSERT(strcmp(kWhiteBitPattern, "00") == 0); + ASSERT(!IsImpossible(mark_bit)); return !mark_bit.Get(); } // Grey markbits: 11 static const char* kGreyBitPattern; static inline bool IsGrey(MarkBit mark_bit) { + ASSERT(strcmp(kGreyBitPattern, "11") == 0); + ASSERT(!IsImpossible(mark_bit)); return mark_bit.Get() && mark_bit.Next().Get(); } static inline void MarkBlack(MarkBit mark_bit) { mark_bit.Set(); mark_bit.Next().Clear(); + ASSERT(Marking::IsBlack(mark_bit)); } static inline void BlackToGrey(MarkBit markbit) { + ASSERT(IsBlack(markbit)); markbit.Next().Set(); + ASSERT(IsGrey(markbit)); } static inline void WhiteToGrey(MarkBit markbit) { + ASSERT(IsWhite(markbit)); markbit.Set(); markbit.Next().Set(); + ASSERT(IsGrey(markbit)); } static inline void GreyToBlack(MarkBit markbit) { + ASSERT(IsGrey(markbit)); markbit.Next().Clear(); + ASSERT(IsBlack(markbit)); } static inline void BlackToGrey(HeapObject* obj) { + ASSERT(obj->Size() >= 2 * kPointerSize); BlackToGrey(MarkBitFrom(obj)); } static inline void AnyToGrey(MarkBit markbit) { markbit.Set(); markbit.Next().Set(); + ASSERT(IsGrey(markbit)); } // Returns true if the the object whose mark is transferred is marked black. @@ -157,6 +173,8 @@ class Marking { to_mark_bit.Next().Set(); is_black = false; // Was actually gray. } + ASSERT(Color(from) == Color(to)); + ASSERT(is_black == (Color(to) == BLACK_OBJECT)); return is_black; } @@ -209,6 +227,7 @@ class MarkingDeque { inline void PushGrey(HeapObject* object) { ASSERT(object->IsHeapObject()); if (IsFull()) { + ASSERT(Marking::IsGrey(Marking::MarkBitFrom(object))); SetOverflowed(); } else { array_[top_] = object; @@ -227,6 +246,7 @@ class MarkingDeque { inline void UnshiftGrey(HeapObject* object) { ASSERT(object->IsHeapObject()); if (IsFull()) { + ASSERT(Marking::IsGrey(Marking::MarkBitFrom(object))); SetOverflowed(); } else { bottom_ = ((bottom_ - 1) & mask_); @@ -538,8 +558,6 @@ class MarkCompactCollector { void InvalidateCode(Code* code); - void ClearMarkbits(); - private: MarkCompactCollector(); ~MarkCompactCollector(); @@ -669,6 +687,10 @@ class MarkCompactCollector { // heap object. static bool IsUnmarkedHeapObject(Object** p); +#ifdef DEBUG + void UpdateLiveObjectCount(HeapObject* obj); +#endif + // Map transitions from a live map to a dead map must be killed. // We replace them with a null descriptor, with the same key. void ClearNonLiveTransitions(); @@ -715,7 +737,37 @@ class MarkCompactCollector { void SweepSpace(PagedSpace* space, SweeperType sweeper); + #ifdef DEBUG + // ----------------------------------------------------------------------- + // Debugging variables, functions and classes + // Counters used for debugging the marking phase of mark-compact or + // mark-sweep collection. + + // Size of live objects in Heap::to_space_. + int live_young_objects_size_; + + // Size of live objects in Heap::old_pointer_space_. + int live_old_pointer_objects_size_; + + // Size of live objects in Heap::old_data_space_. + int live_old_data_objects_size_; + + // Size of live objects in Heap::code_space_. + int live_code_objects_size_; + + // Size of live objects in Heap::map_space_. + int live_map_objects_size_; + + // Size of live objects in Heap::cell_space_. + int live_cell_objects_size_; + + // Size of live objects in Heap::lo_space_. + int live_lo_objects_size_; + + // Number of live bytes in this collection. + int live_bytes_; + friend class MarkObjectVisitor; static void VisitObject(HeapObject* obj); diff --git a/deps/v8/src/messages.js b/deps/v8/src/messages.js index e4607abd2..a9993af22 100644 --- a/deps/v8/src/messages.js +++ b/deps/v8/src/messages.js @@ -83,7 +83,7 @@ function IsNativeErrorObject(obj) { // objects between script tags in a browser setting. function ToStringCheckErrorObject(obj) { if (IsNativeErrorObject(obj)) { - return %_CallFunction(obj, ErrorToString); + return %_CallFunction(obj, errorToString); } else { return ToString(obj); } @@ -185,15 +185,14 @@ function FormatMessage(message) { "define_disallowed", ["Cannot define property:", "%0", ", object is not extensible."], "non_extensible_proto", ["%0", " is not extensible"], "handler_non_object", ["Proxy.", "%0", " called with non-object as handler"], - "proto_non_object", ["Proxy.", "%0", " called with non-object as prototype"], - "trap_function_expected", ["Proxy.", "%0", " called with non-function for '", "%1", "' trap"], + "trap_function_expected", ["Proxy.", "%0", " called with non-function for ", "%1", " trap"], "handler_trap_missing", ["Proxy handler ", "%0", " has no '", "%1", "' trap"], "handler_trap_must_be_callable", ["Proxy handler ", "%0", " has non-callable '", "%1", "' trap"], - "handler_returned_false", ["Proxy handler ", "%0", " returned false from '", "%1", "' trap"], - "handler_returned_undefined", ["Proxy handler ", "%0", " returned undefined from '", "%1", "' trap"], - "proxy_prop_not_configurable", ["Proxy handler ", "%0", " returned non-configurable descriptor for property '", "%2", "' from '", "%1", "' trap"], - "proxy_non_object_prop_names", ["Trap '", "%1", "' returned non-object ", "%0"], - "proxy_repeated_prop_name", ["Trap '", "%1", "' returned repeated property name '", "%2", "'"], + "handler_returned_false", ["Proxy handler ", "%0", " returned false for '", "%1", "' trap"], + "handler_returned_undefined", ["Proxy handler ", "%0", " returned undefined for '", "%1", "' trap"], + "proxy_prop_not_configurable", ["Trap ", "%1", " of proxy handler ", "%0", " returned non-configurable descriptor for property ", "%2"], + "proxy_non_object_prop_names", ["Trap ", "%1", " returned non-object ", "%0"], + "proxy_repeated_prop_name", ["Trap ", "%1", " returned repeated property name ", "%2"], "invalid_weakmap_key", ["Invalid value used as weak map key"], // RangeError "invalid_array_length", ["Invalid array length"], @@ -241,7 +240,6 @@ function FormatMessage(message) { "strict_poison_pill", ["'caller', 'callee', and 'arguments' properties may not be accessed on strict mode functions or the arguments objects for calls to them"], "strict_caller", ["Illegal access to a strict mode caller function."], "unprotected_let", ["Illegal let declaration in unprotected statement context."], - "unprotected_const", ["Illegal const declaration in unprotected statement context."], "cant_prevent_ext_external_array_elements", ["Cannot prevent extension of an object with external array elements"], "redef_external_array_element", ["Cannot redefine a property of an object with external array elements"], ]; @@ -1128,7 +1126,6 @@ function SetUpError() { return new f(m); } }); - %SetNativeFlag(f); } DefineError(function Error() { }); @@ -1146,43 +1143,42 @@ $Error.captureStackTrace = captureStackTrace; %SetProperty($Error.prototype, 'message', '', DONT_ENUM); -// Global list of error objects visited during ErrorToString. This is +// Global list of error objects visited during errorToString. This is // used to detect cycles in error toString formatting. const visited_errors = new InternalArray(); const cyclic_error_marker = new $Object(); -function ErrorToStringDetectCycle(error) { +function errorToStringDetectCycle(error) { if (!%PushIfAbsent(visited_errors, error)) throw cyclic_error_marker; try { var type = error.type; - var name = error.name - name = IS_UNDEFINED(name) ? "Error" : TO_STRING_INLINE(name); - var message = error.message; var hasMessage = %_CallFunction(error, "message", ObjectHasOwnProperty); if (type && !hasMessage) { - message = FormatMessage(%NewMessageObject(type, error.arguments)); + var formatted = FormatMessage(%NewMessageObject(type, error.arguments)); + return error.name + ": " + formatted; } - message = IS_UNDEFINED(message) ? "" : TO_STRING_INLINE(message); - if (name === "") return message; - if (message === "") return name; - return name + ": " + message; + var message = hasMessage ? (": " + error.message) : ""; + return error.name + message; } finally { visited_errors.length = visited_errors.length - 1; } } -function ErrorToString() { +function errorToString() { if (IS_NULL_OR_UNDEFINED(this) && !IS_UNDETECTABLE(this)) { throw MakeTypeError("called_on_null_or_undefined", ["Error.prototype.toString"]); } + // This helper function is needed because access to properties on + // the builtins object do not work inside of a catch clause. + function isCyclicErrorMarker(o) { return o === cyclic_error_marker; } try { - return ErrorToStringDetectCycle(this); + return errorToStringDetectCycle(this); } catch(e) { // If this error message was encountered already return the empty // string for it instead of recursively formatting it. - if (e === cyclic_error_marker) { + if (isCyclicErrorMarker(e)) { return ''; } throw e; @@ -1190,7 +1186,7 @@ function ErrorToString() { } -InstallFunctions($Error.prototype, DONT_ENUM, ['toString', ErrorToString]); +InstallFunctions($Error.prototype, DONT_ENUM, ['toString', errorToString]); // Boilerplate for exceptions for stack overflows. Used from // Isolate::StackOverflow(). diff --git a/deps/v8/src/mips/assembler-mips-inl.h b/deps/v8/src/mips/assembler-mips-inl.h index 2ba9760e2..553c511c3 100644 --- a/deps/v8/src/mips/assembler-mips-inl.h +++ b/deps/v8/src/mips/assembler-mips-inl.h @@ -116,10 +116,10 @@ int RelocInfo::target_address_size() { } -void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) { +void RelocInfo::set_target_address(Address target) { ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY); Assembler::set_target_address_at(pc_, target); - if (mode == UPDATE_WRITE_BARRIER && host() != NULL && IsCodeTarget(rmode_)) { + if (host() != NULL && IsCodeTarget(rmode_)) { Object* target_code = Code::GetCodeFromTargetAddress(target); host()->GetHeap()->incremental_marking()->RecordWriteIntoCode( host(), this, HeapObject::cast(target_code)); @@ -150,12 +150,10 @@ Object** RelocInfo::target_object_address() { } -void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) { +void RelocInfo::set_target_object(Object* target) { ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); Assembler::set_target_address_at(pc_, reinterpret_cast<Address>(target)); - if (mode == UPDATE_WRITE_BARRIER && - host() != NULL && - target->IsHeapObject()) { + if (host() != NULL && target->IsHeapObject()) { host()->GetHeap()->incremental_marking()->RecordWrite( host(), &Memory::Object_at(pc_), HeapObject::cast(target)); } @@ -186,12 +184,11 @@ JSGlobalPropertyCell* RelocInfo::target_cell() { } -void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell, - WriteBarrierMode mode) { +void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell) { ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL); Address address = cell->address() + JSGlobalPropertyCell::kValueOffset; Memory::Address_at(pc_) = address; - if (mode == UPDATE_WRITE_BARRIER && host() != NULL) { + if (host() != NULL) { // TODO(1550) We are passing NULL as a slot because cell can never be on // evacuation candidate. host()->GetHeap()->incremental_marking()->RecordWrite( diff --git a/deps/v8/src/mips/assembler-mips.h b/deps/v8/src/mips/assembler-mips.h index b66ea0d9f..38e9537af 100644 --- a/deps/v8/src/mips/assembler-mips.h +++ b/deps/v8/src/mips/assembler-mips.h @@ -302,7 +302,7 @@ const FPURegister f29 = { 29 }; const FPURegister f30 = { 30 }; const FPURegister f31 = { 31 }; -static const FPURegister& kDoubleRegZero = f28; +const FPURegister kDoubleRegZero = f28; // FPU (coprocessor 1) control registers. // Currently only FCSR (#31) is implemented. diff --git a/deps/v8/src/mips/builtins-mips.cc b/deps/v8/src/mips/builtins-mips.cc index 1687abe88..5609d5ee4 100644 --- a/deps/v8/src/mips/builtins-mips.cc +++ b/deps/v8/src/mips/builtins-mips.cc @@ -88,6 +88,12 @@ static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) { } +// This constant has the same value as JSArray::kPreallocatedArrayElements and +// if JSArray::kPreallocatedArrayElements is changed handling of loop unfolding +// below should be reconsidered. +static const int kLoopUnfoldLimit = 4; + + // Allocate an empty JSArray. The allocated array is put into the result // register. An elements backing store is allocated with size initial_capacity // and filled with the hole values. @@ -97,9 +103,9 @@ static void AllocateEmptyJSArray(MacroAssembler* masm, Register scratch1, Register scratch2, Register scratch3, + int initial_capacity, Label* gc_required) { - const int initial_capacity = JSArray::kPreallocatedArrayElements; - STATIC_ASSERT(initial_capacity >= 0); + ASSERT(initial_capacity > 0); // Load the initial map from the array function. __ lw(scratch1, FieldMemOperand(array_function, JSFunction::kPrototypeOrInitialMapOffset)); @@ -149,24 +155,13 @@ static void AllocateEmptyJSArray(MacroAssembler* masm, __ sw(scratch3, MemOperand(scratch1)); __ Addu(scratch1, scratch1, kPointerSize); - // Fill the FixedArray with the hole value. Inline the code if short. - if (initial_capacity == 0) return; + // Fill the FixedArray with the hole value. ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize); + ASSERT(initial_capacity <= kLoopUnfoldLimit); __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex); - static const int kLoopUnfoldLimit = 4; - if (initial_capacity <= kLoopUnfoldLimit) { - for (int i = 0; i < initial_capacity; i++) { - __ sw(scratch3, MemOperand(scratch1, i * kPointerSize)); - } - } else { - Label loop, entry; - __ Addu(scratch2, scratch1, Operand(initial_capacity * kPointerSize)); - __ Branch(&entry); - __ bind(&loop); + for (int i = 0; i < initial_capacity; i++) { __ sw(scratch3, MemOperand(scratch1)); __ Addu(scratch1, scratch1, kPointerSize); - __ bind(&entry); - __ Branch(&loop, lt, scratch1, Operand(scratch2)); } } @@ -182,7 +177,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm, // register elements_array_storage is scratched. static void AllocateJSArray(MacroAssembler* masm, Register array_function, // Array function. - Register array_size, // As a smi, cannot be 0. + Register array_size, // As a smi. Register result, Register elements_array_storage, Register elements_array_end, @@ -190,18 +185,31 @@ static void AllocateJSArray(MacroAssembler* masm, Register scratch2, bool fill_with_hole, Label* gc_required) { + Label not_empty, allocated; + // Load the initial map from the array function. __ lw(elements_array_storage, FieldMemOperand(array_function, JSFunction::kPrototypeOrInitialMapOffset)); - if (FLAG_debug_code) { // Assert that array size is not zero. - __ Assert( - ne, "array size is unexpectedly 0", array_size, Operand(zero_reg)); - } + // Check whether an empty sized array is requested. + __ Branch(¬_empty, ne, array_size, Operand(zero_reg)); + + // If an empty array is requested allocate a small elements array anyway. This + // keeps the code below free of special casing for the empty array. + int size = JSArray::kSize + + FixedArray::SizeFor(JSArray::kPreallocatedArrayElements); + __ AllocateInNewSpace(size, + result, + elements_array_end, + scratch1, + gc_required, + TAG_OBJECT); + __ Branch(&allocated); // Allocate the JSArray object together with space for a FixedArray with the // requested number of elements. + __ bind(¬_empty); STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0); __ li(elements_array_end, (JSArray::kSize + FixedArray::kHeaderSize) / kPointerSize); @@ -220,6 +228,7 @@ static void AllocateJSArray(MacroAssembler* masm, // result: JSObject // elements_array_storage: initial map // array_size: size of array (smi) + __ bind(&allocated); __ sw(elements_array_storage, FieldMemOperand(result, JSObject::kMapOffset)); __ LoadRoot(elements_array_storage, Heap::kEmptyFixedArrayRootIndex); __ sw(elements_array_storage, @@ -253,6 +262,8 @@ static void AllocateJSArray(MacroAssembler* masm, // the actual JSArray has length 0 and the size of the JSArray for non-empty // JSArrays. The length of a FixedArray is stored as a smi. STATIC_ASSERT(kSmiTag == 0); + __ li(at, Operand(Smi::FromInt(JSArray::kPreallocatedArrayElements))); + __ movz(array_size, at, array_size); ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset); __ sw(array_size, MemOperand(elements_array_storage)); @@ -301,18 +312,18 @@ static void AllocateJSArray(MacroAssembler* masm, static void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code) { Counters* counters = masm->isolate()->counters(); - Label argc_one_or_more, argc_two_or_more, not_empty_array, empty_array; + Label argc_one_or_more, argc_two_or_more; // Check for array construction with zero arguments or one. __ Branch(&argc_one_or_more, ne, a0, Operand(zero_reg)); // Handle construction of an empty array. - __ bind(&empty_array); AllocateEmptyJSArray(masm, a1, a2, a3, t0, t1, + JSArray::kPreallocatedArrayElements, call_generic_code); __ IncrementCounter(counters->array_function_native(), 1, a3, t0); // Setup return value, remove receiver from stack and return. @@ -327,12 +338,6 @@ static void ArrayNativeCode(MacroAssembler* masm, STATIC_ASSERT(kSmiTag == 0); __ lw(a2, MemOperand(sp)); // Get the argument from the stack. - __ Branch(¬_empty_array, ne, a2, Operand(zero_reg)); - __ Drop(1); // Adjust stack. - __ mov(a0, zero_reg); // Treat this as a call with argc of zero. - __ Branch(&empty_array); - - __ bind(¬_empty_array); __ And(a3, a2, Operand(kIntptrSignBit | kSmiTagMask)); __ Branch(call_generic_code, eq, a3, Operand(zero_reg)); @@ -1048,9 +1053,9 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); // Set up the roots register. - ExternalReference roots_array_start = - ExternalReference::roots_array_start(masm->isolate()); - __ li(s6, Operand(roots_array_start)); + ExternalReference roots_address = + ExternalReference::roots_address(masm->isolate()); + __ li(s6, Operand(roots_address)); // Push the function and the receiver onto the stack. __ Push(a1, a2); diff --git a/deps/v8/src/mips/code-stubs-mips.cc b/deps/v8/src/mips/code-stubs-mips.cc index 85e929d39..fe251b9e6 100644 --- a/deps/v8/src/mips/code-stubs-mips.cc +++ b/deps/v8/src/mips/code-stubs-mips.cc @@ -262,12 +262,7 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { // [sp + (2 * kPointerSize)]: literals array. // All sizes here are multiples of kPointerSize. - int elements_size = 0; - if (length_ > 0) { - elements_size = mode_ == CLONE_DOUBLE_ELEMENTS - ? FixedDoubleArray::SizeFor(length_) - : FixedArray::SizeFor(length_); - } + int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0; int size = JSArray::kSize + elements_size; // Load boilerplate object into r3 and check if we need to create a @@ -288,9 +283,6 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { if (mode_ == CLONE_ELEMENTS) { message = "Expected (writable) fixed array"; expected_map_index = Heap::kFixedArrayMapRootIndex; - } else if (mode_ == CLONE_DOUBLE_ELEMENTS) { - message = "Expected (writable) fixed double array"; - expected_map_index = Heap::kFixedDoubleArrayMapRootIndex; } else { ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS); message = "Expected copy-on-write fixed array"; @@ -330,7 +322,6 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { __ sw(a2, FieldMemOperand(v0, JSArray::kElementsOffset)); // Copy the elements array. - ASSERT((elements_size % kPointerSize) == 0); __ CopyFields(a2, a3, a1.bit(), elements_size / kPointerSize); } @@ -4080,7 +4071,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) { } // Get the prototype of the function. - __ TryGetFunctionPrototype(function, prototype, scratch, &slow, true); + __ TryGetFunctionPrototype(function, prototype, scratch, &slow); // Check that the function prototype is a JS object. __ JumpIfSmi(prototype, &slow); @@ -6898,7 +6889,7 @@ void DirectCEntryStub::Generate(MacroAssembler* masm) { // The saved ra is after the reserved stack space for the 4 args. __ lw(t9, MemOperand(sp, kCArgsSlotsSize)); - if (FLAG_debug_code && FLAG_enable_slow_asserts) { + if (FLAG_debug_code && EnableSlowAsserts()) { // In case of an error the return address may point to a memory area // filled with kZapValue by the GC. // Dereference the address and check for this. @@ -6948,82 +6939,7 @@ void DirectCEntryStub::GenerateCall(MacroAssembler* masm, } -void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm, - Label* miss, - Label* done, - Register receiver, - Register properties, - Handle<String> name, - Register scratch0) { - // If names of slots in range from 1 to kProbes - 1 for the hash value are - // not equal to the name and kProbes-th slot is not used (its name is the - // undefined value), it guarantees the hash table doesn't contain the - // property. It's true even if some slots represent deleted properties - // (their names are the null value). - for (int i = 0; i < kInlinedProbes; i++) { - // scratch0 points to properties hash. - // Compute the masked index: (hash + i + i * i) & mask. - Register index = scratch0; - // Capacity is smi 2^n. - __ lw(index, FieldMemOperand(properties, kCapacityOffset)); - __ Subu(index, index, Operand(1)); - __ And(index, index, Operand( - Smi::FromInt(name->Hash() + StringDictionary::GetProbeOffset(i)))); - - // Scale the index by multiplying by the entry size. - ASSERT(StringDictionary::kEntrySize == 3); - __ sll(at, index, 1); - __ Addu(index, index, at); - - Register entity_name = scratch0; - // Having undefined at this place means the name is not contained. - ASSERT_EQ(kSmiTagSize, 1); - Register tmp = properties; - __ sll(tmp, index, 1); - __ Addu(tmp, properties, tmp); - __ lw(entity_name, FieldMemOperand(tmp, kElementsStartOffset)); - - ASSERT(!tmp.is(entity_name)); - __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex); - __ Branch(done, eq, entity_name, Operand(tmp)); - - if (i != kInlinedProbes - 1) { - // Stop if found the property. - __ Branch(miss, eq, entity_name, Operand(Handle<String>(name))); - - // Check if the entry name is not a symbol. - __ lw(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset)); - __ lbu(entity_name, - FieldMemOperand(entity_name, Map::kInstanceTypeOffset)); - __ And(tmp, entity_name, Operand(kIsSymbolMask)); - __ Branch(miss, eq, tmp, Operand(zero_reg)); - - // Restore the properties. - __ lw(properties, - FieldMemOperand(receiver, JSObject::kPropertiesOffset)); - } - } - - const int spill_mask = - (ra.bit() | t2.bit() | t1.bit() | t0.bit() | a3.bit() | - a2.bit() | a1.bit() | a0.bit() | v0.bit()); - - __ MultiPush(spill_mask); - __ lw(a0, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); - __ li(a1, Operand(Handle<String>(name))); - StringDictionaryLookupStub stub(NEGATIVE_LOOKUP); - __ CallStub(&stub); - __ mov(at, v0); - __ MultiPop(spill_mask); - - __ Branch(done, eq, at, Operand(zero_reg)); - __ Branch(miss, ne, at, Operand(zero_reg)); -} - - -// TODO(kmillikin): Eliminate this function when the stub cache is fully -// handlified. -MaybeObject* StringDictionaryLookupStub::TryGenerateNegativeLookup( +MaybeObject* StringDictionaryLookupStub::GenerateNegativeLookup( MacroAssembler* masm, Label* miss, Label* done, @@ -7049,7 +6965,8 @@ MaybeObject* StringDictionaryLookupStub::TryGenerateNegativeLookup( // Scale the index by multiplying by the entry size. ASSERT(StringDictionary::kEntrySize == 3); // index *= 3. - __ sll(at, index, 1); + __ mov(at, index); + __ sll(index, index, 1); __ Addu(index, index, at); Register entity_name = scratch0; @@ -7084,7 +7001,7 @@ MaybeObject* StringDictionaryLookupStub::TryGenerateNegativeLookup( const int spill_mask = (ra.bit() | t2.bit() | t1.bit() | t0.bit() | a3.bit() | - a2.bit() | a1.bit() | a0.bit() | v0.bit()); + a2.bit() | a1.bit() | a0.bit()); __ MultiPush(spill_mask); __ lw(a0, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); @@ -7092,11 +7009,10 @@ MaybeObject* StringDictionaryLookupStub::TryGenerateNegativeLookup( StringDictionaryLookupStub stub(NEGATIVE_LOOKUP); MaybeObject* result = masm->TryCallStub(&stub); if (result->IsFailure()) return result; - __ mov(at, v0); __ MultiPop(spill_mask); - __ Branch(done, eq, at, Operand(zero_reg)); - __ Branch(miss, ne, at, Operand(zero_reg)); + __ Branch(done, eq, v0, Operand(zero_reg)); + __ Branch(miss, ne, v0, Operand(zero_reg)); return result; } @@ -7142,7 +7058,8 @@ void StringDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm, ASSERT(StringDictionary::kEntrySize == 3); // scratch2 = scratch2 * 3. - __ sll(at, scratch2, 1); + __ mov(at, scratch2); + __ sll(scratch2, scratch2, 1); __ Addu(scratch2, scratch2, at); // Check if the key is identical to the name. @@ -7154,26 +7071,19 @@ void StringDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm, const int spill_mask = (ra.bit() | t2.bit() | t1.bit() | t0.bit() | - a3.bit() | a2.bit() | a1.bit() | a0.bit() | v0.bit()) & + a3.bit() | a2.bit() | a1.bit() | a0.bit()) & ~(scratch1.bit() | scratch2.bit()); __ MultiPush(spill_mask); - if (name.is(a0)) { - ASSERT(!elements.is(a1)); - __ Move(a1, name); - __ Move(a0, elements); - } else { - __ Move(a0, elements); - __ Move(a1, name); - } + __ Move(a0, elements); + __ Move(a1, name); StringDictionaryLookupStub stub(POSITIVE_LOOKUP); __ CallStub(&stub); __ mov(scratch2, a2); - __ mov(at, v0); __ MultiPop(spill_mask); - __ Branch(done, ne, at, Operand(zero_reg)); - __ Branch(miss, eq, at, Operand(zero_reg)); + __ Branch(done, ne, v0, Operand(zero_reg)); + __ Branch(miss, eq, v0, Operand(zero_reg)); } @@ -7297,13 +7207,6 @@ struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = { { a3, a1, a2, EMIT_REMEMBERED_SET }, // KeyedStoreStubCompiler::GenerateStoreFastElement. { t0, a2, a3, EMIT_REMEMBERED_SET }, - // ElementsTransitionGenerator::GenerateSmiOnlyToObject - // and ElementsTransitionGenerator::GenerateSmiOnlyToDouble - // and ElementsTransitionGenerator::GenerateDoubleToObject - { a2, a3, t5, EMIT_REMEMBERED_SET }, - // ElementsTransitionGenerator::GenerateDoubleToObject - { t2, a2, a0, EMIT_REMEMBERED_SET }, - { a2, t2, t5, EMIT_REMEMBERED_SET }, // Null termination. { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET} }; diff --git a/deps/v8/src/mips/code-stubs-mips.h b/deps/v8/src/mips/code-stubs-mips.h index beb20aab4..ef6b88908 100644 --- a/deps/v8/src/mips/code-stubs-mips.h +++ b/deps/v8/src/mips/code-stubs-mips.h @@ -799,17 +799,7 @@ class StringDictionaryLookupStub: public CodeStub { void Generate(MacroAssembler* masm); - static void GenerateNegativeLookup(MacroAssembler* masm, - Label* miss, - Label* done, - Register receiver, - Register properties, - Handle<String> name, - Register scratch0); - - // TODO(kmillikin): Eliminate this function when the stub cache is fully - // handlified. - MUST_USE_RESULT static MaybeObject* TryGenerateNegativeLookup( + MUST_USE_RESULT static MaybeObject* GenerateNegativeLookup( MacroAssembler* masm, Label* miss, Label* done, diff --git a/deps/v8/src/mips/codegen-mips.cc b/deps/v8/src/mips/codegen-mips.cc index e9fe2324e..ff146dd4e 100644 --- a/deps/v8/src/mips/codegen-mips.cc +++ b/deps/v8/src/mips/codegen-mips.cc @@ -30,13 +30,10 @@ #if defined(V8_TARGET_ARCH_MIPS) #include "codegen.h" -#include "macro-assembler.h" namespace v8 { namespace internal { -#define __ ACCESS_MASM(masm) - // ------------------------------------------------------------------------- // Platform-specific RuntimeCallHelper functions. @@ -53,260 +50,6 @@ void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const { masm->set_has_frame(false); } -// ------------------------------------------------------------------------- -// Code generators - -void ElementsTransitionGenerator::GenerateSmiOnlyToObject( - MacroAssembler* masm) { - // ----------- S t a t e ------------- - // -- a0 : value - // -- a1 : key - // -- a2 : receiver - // -- ra : return address - // -- a3 : target map, scratch for subsequent call - // -- t0 : scratch (elements) - // ----------------------------------- - // Set transitioned map. - __ sw(a3, FieldMemOperand(a2, HeapObject::kMapOffset)); - __ RecordWriteField(a2, - HeapObject::kMapOffset, - a3, - t5, - kRAHasNotBeenSaved, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); -} - - -void ElementsTransitionGenerator::GenerateSmiOnlyToDouble( - MacroAssembler* masm, Label* fail) { - // ----------- S t a t e ------------- - // -- a0 : value - // -- a1 : key - // -- a2 : receiver - // -- ra : return address - // -- a3 : target map, scratch for subsequent call - // -- t0 : scratch (elements) - // ----------------------------------- - Label loop, entry, convert_hole, gc_required; - bool fpu_supported = CpuFeatures::IsSupported(FPU); - __ push(ra); - - Register scratch = t6; - - __ lw(t0, FieldMemOperand(a2, JSObject::kElementsOffset)); - __ lw(t1, FieldMemOperand(t0, FixedArray::kLengthOffset)); - // t0: source FixedArray - // t1: number of elements (smi-tagged) - - // Allocate new FixedDoubleArray. - __ sll(scratch, t1, 2); - __ Addu(scratch, scratch, FixedDoubleArray::kHeaderSize); - __ AllocateInNewSpace(scratch, t2, t3, t5, &gc_required, NO_ALLOCATION_FLAGS); - // t2: destination FixedDoubleArray, not tagged as heap object - __ LoadRoot(t5, Heap::kFixedDoubleArrayMapRootIndex); - __ sw(t5, MemOperand(t2, HeapObject::kMapOffset)); - // Set destination FixedDoubleArray's length. - __ sw(t1, MemOperand(t2, FixedDoubleArray::kLengthOffset)); - // Update receiver's map. - - __ sw(a3, FieldMemOperand(a2, HeapObject::kMapOffset)); - __ RecordWriteField(a2, - HeapObject::kMapOffset, - a3, - t5, - kRAHasBeenSaved, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - // Replace receiver's backing store with newly created FixedDoubleArray. - __ Addu(a3, t2, Operand(kHeapObjectTag)); - __ sw(a3, FieldMemOperand(a2, JSObject::kElementsOffset)); - __ RecordWriteField(a2, - JSObject::kElementsOffset, - a3, - t5, - kRAHasBeenSaved, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - - - // Prepare for conversion loop. - __ Addu(a3, t0, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); - __ Addu(t3, t2, Operand(FixedDoubleArray::kHeaderSize)); - __ sll(t2, t1, 2); - __ Addu(t2, t2, t3); - __ li(t0, Operand(kHoleNanLower32)); - __ li(t1, Operand(kHoleNanUpper32)); - // t0: kHoleNanLower32 - // t1: kHoleNanUpper32 - // t2: end of destination FixedDoubleArray, not tagged - // t3: begin of FixedDoubleArray element fields, not tagged - - if (!fpu_supported) __ Push(a1, a0); - - __ Branch(&entry); - - // Call into runtime if GC is required. - __ bind(&gc_required); - __ pop(ra); - __ Branch(fail); - - // Convert and copy elements. - __ bind(&loop); - __ lw(t5, MemOperand(a3)); - __ Addu(a3, a3, kIntSize); - // t5: current element - __ JumpIfNotSmi(t5, &convert_hole); - - // Normal smi, convert to double and store. - __ SmiUntag(t5); - if (fpu_supported) { - CpuFeatures::Scope scope(FPU); - __ mtc1(t5, f0); - __ cvt_d_w(f0, f0); - __ sdc1(f0, MemOperand(t3)); - __ Addu(t3, t3, kDoubleSize); - } else { - FloatingPointHelper::ConvertIntToDouble(masm, - t5, - FloatingPointHelper::kCoreRegisters, - f0, - a0, - a1, - t7, - f0); - __ sw(a0, MemOperand(t3)); // mantissa - __ sw(a1, MemOperand(t3, kIntSize)); // exponent - __ Addu(t3, t3, kDoubleSize); - } - __ Branch(&entry); - - // Hole found, store the-hole NaN. - __ bind(&convert_hole); - __ sw(t0, MemOperand(t3)); // mantissa - __ sw(t1, MemOperand(t3, kIntSize)); // exponent - __ Addu(t3, t3, kDoubleSize); - - __ bind(&entry); - __ Branch(&loop, lt, t3, Operand(t2)); - - if (!fpu_supported) __ Pop(a1, a0); - __ pop(ra); -} - - -void ElementsTransitionGenerator::GenerateDoubleToObject( - MacroAssembler* masm, Label* fail) { - // ----------- S t a t e ------------- - // -- a0 : value - // -- a1 : key - // -- a2 : receiver - // -- ra : return address - // -- a3 : target map, scratch for subsequent call - // -- t0 : scratch (elements) - // ----------------------------------- - Label entry, loop, convert_hole, gc_required; - __ MultiPush(a0.bit() | a1.bit() | a2.bit() | a3.bit() | ra.bit()); - - __ lw(t0, FieldMemOperand(a2, JSObject::kElementsOffset)); - __ lw(t1, FieldMemOperand(t0, FixedArray::kLengthOffset)); - // t0: source FixedArray - // t1: number of elements (smi-tagged) - - // Allocate new FixedArray. - __ sll(a0, t1, 1); - __ Addu(a0, a0, FixedDoubleArray::kHeaderSize); - __ AllocateInNewSpace(a0, t2, t3, t5, &gc_required, NO_ALLOCATION_FLAGS); - // t2: destination FixedArray, not tagged as heap object - __ LoadRoot(t5, Heap::kFixedArrayMapRootIndex); - __ sw(t5, MemOperand(t2, HeapObject::kMapOffset)); - // Set destination FixedDoubleArray's length. - __ sw(t1, MemOperand(t2, FixedDoubleArray::kLengthOffset)); - - // Prepare for conversion loop. - __ Addu(t0, t0, Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag + 4)); - __ Addu(a3, t2, Operand(FixedArray::kHeaderSize)); - __ Addu(t2, t2, Operand(kHeapObjectTag)); - __ sll(t1, t1, 1); - __ Addu(t1, a3, t1); - __ LoadRoot(t3, Heap::kTheHoleValueRootIndex); - __ LoadRoot(t5, Heap::kHeapNumberMapRootIndex); - // Using offsetted addresses. - // a3: begin of destination FixedArray element fields, not tagged - // t0: begin of source FixedDoubleArray element fields, not tagged, +4 - // t1: end of destination FixedArray, not tagged - // t2: destination FixedArray - // t3: the-hole pointer - // t5: heap number map - __ Branch(&entry); - - // Call into runtime if GC is required. - __ bind(&gc_required); - __ MultiPop(a0.bit() | a1.bit() | a2.bit() | a3.bit() | ra.bit()); - - __ Branch(fail); - - __ bind(&loop); - __ lw(a1, MemOperand(t0)); - __ Addu(t0, t0, kDoubleSize); - // a1: current element's upper 32 bit - // t0: address of next element's upper 32 bit - __ Branch(&convert_hole, eq, a1, Operand(kHoleNanUpper32)); - - // Non-hole double, copy value into a heap number. - __ AllocateHeapNumber(a2, a0, t6, t5, &gc_required); - // a2: new heap number - __ lw(a0, MemOperand(t0, -12)); - __ sw(a0, FieldMemOperand(a2, HeapNumber::kMantissaOffset)); - __ sw(a1, FieldMemOperand(a2, HeapNumber::kExponentOffset)); - __ mov(a0, a3); - __ sw(a2, MemOperand(a3)); - __ Addu(a3, a3, kIntSize); - __ RecordWrite(t2, - a0, - a2, - kRAHasBeenSaved, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - __ Branch(&entry); - - // Replace the-hole NaN with the-hole pointer. - __ bind(&convert_hole); - __ sw(t3, MemOperand(a3)); - __ Addu(a3, a3, kIntSize); - - __ bind(&entry); - __ Branch(&loop, lt, a3, Operand(t1)); - - __ MultiPop(a2.bit() | a3.bit() | a0.bit() | a1.bit()); - // Update receiver's map. - __ sw(a3, FieldMemOperand(a2, HeapObject::kMapOffset)); - __ RecordWriteField(a2, - HeapObject::kMapOffset, - a3, - t5, - kRAHasBeenSaved, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - // Replace receiver's backing store with newly created and filled FixedArray. - __ sw(t2, FieldMemOperand(a2, JSObject::kElementsOffset)); - __ RecordWriteField(a2, - JSObject::kElementsOffset, - t2, - t5, - kRAHasBeenSaved, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - __ pop(ra); -} - -#undef __ } } // namespace v8::internal diff --git a/deps/v8/src/mips/codegen-mips.h b/deps/v8/src/mips/codegen-mips.h index 4549509f3..b020d8057 100644 --- a/deps/v8/src/mips/codegen-mips.h +++ b/deps/v8/src/mips/codegen-mips.h @@ -31,6 +31,7 @@ #include "ast.h" +#include "code-stubs-mips.h" #include "ic-inl.h" namespace v8 { diff --git a/deps/v8/src/mips/deoptimizer-mips.cc b/deps/v8/src/mips/deoptimizer-mips.cc index 92d7edd83..280b8cb54 100644 --- a/deps/v8/src/mips/deoptimizer-mips.cc +++ b/deps/v8/src/mips/deoptimizer-mips.cc @@ -61,8 +61,7 @@ void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code, } -void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code, - Address pc_after, +void Deoptimizer::RevertStackCheckCodeAt(Address pc_after, Code* check_code, Code* replacement_code) { UNIMPLEMENTED(); diff --git a/deps/v8/src/mips/full-codegen-mips.cc b/deps/v8/src/mips/full-codegen-mips.cc index 2f989bc6f..b3f054087 100644 --- a/deps/v8/src/mips/full-codegen-mips.cc +++ b/deps/v8/src/mips/full-codegen-mips.cc @@ -278,10 +278,7 @@ void FullCodeGenerator::Generate(CompilationInfo* info) { // constant. if (scope()->is_function_scope() && scope()->function() != NULL) { int ignored = 0; - VariableProxy* proxy = scope()->function(); - ASSERT(proxy->var()->mode() == CONST || - proxy->var()->mode() == CONST_HARMONY); - EmitDeclaration(proxy, proxy->var()->mode(), NULL, &ignored); + EmitDeclaration(scope()->function(), CONST, NULL, &ignored); } VisitDeclarations(scope()->declarations()); } @@ -731,8 +728,6 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, // need to "declare" it at runtime to make sure it actually exists in the // local context. Variable* variable = proxy->var(); - bool binding_needs_init = - mode == CONST || mode == CONST_HARMONY || mode == LET; switch (variable->location()) { case Variable::UNALLOCATED: ++(*global_count); @@ -744,7 +739,7 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, Comment cmnt(masm_, "[ Declaration"); VisitForAccumulatorValue(function); __ sw(result_register(), StackOperand(variable)); - } else if (binding_needs_init) { + } else if (mode == CONST || mode == LET) { Comment cmnt(masm_, "[ Declaration"); __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); __ sw(t0, StackOperand(variable)); @@ -780,7 +775,7 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); PrepareForBailoutForId(proxy->id(), NO_REGISTERS); - } else if (binding_needs_init) { + } else if (mode == CONST || mode == LET) { Comment cmnt(masm_, "[ Declaration"); __ LoadRoot(at, Heap::kTheHoleValueRootIndex); __ sw(at, ContextOperand(cp, variable->index())); @@ -792,13 +787,9 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, case Variable::LOOKUP: { Comment cmnt(masm_, "[ Declaration"); __ li(a2, Operand(variable->name())); - // Declaration nodes are always introduced in one of four modes. - ASSERT(mode == VAR || - mode == CONST || - mode == CONST_HARMONY || - mode == LET); - PropertyAttributes attr = (mode == CONST || mode == CONST_HARMONY) - ? READ_ONLY : NONE; + // Declaration nodes are always introduced in one of three modes. + ASSERT(mode == VAR || mode == CONST || mode == LET); + PropertyAttributes attr = (mode == CONST) ? READ_ONLY : NONE; __ li(a1, Operand(Smi::FromInt(attr))); // Push initial value, if any. // Note: For variables we must not push an initial value (such as @@ -808,7 +799,7 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, __ Push(cp, a2, a1); // Push initial value for function declaration. VisitForStackValue(function); - } else if (binding_needs_init) { + } else if (mode == CONST || mode == LET) { __ LoadRoot(a0, Heap::kTheHoleValueRootIndex); __ Push(cp, a2, a1, a0); } else { @@ -951,17 +942,11 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { __ bind(&done_convert); __ push(a0); - // Check for proxies. - Label call_runtime; - STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); - __ GetObjectType(a0, a1, a1); - __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE)); - // Check cache validity in generated code. This is a fast case for // the JSObject::IsSimpleEnum cache validity checks. If we cannot // guarantee cache validity, call the runtime system to check cache // validity or get the property names in a fixed array. - Label next; + Label next, call_runtime; // Preload a couple of values used in the loop. Register empty_fixed_array_value = t2; __ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex); @@ -1035,16 +1020,9 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { __ jmp(&loop); // We got a fixed array in register v0. Iterate through that. - Label non_proxy; __ bind(&fixed_array); - __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check - __ lw(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object - STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); - __ GetObjectType(a2, a3, a3); - __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE)); - __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy - __ bind(&non_proxy); - __ Push(a1, v0); // Smi and array + __ li(a1, Operand(Smi::FromInt(0))); // Map (0) - force slow check. + __ Push(a1, v0); __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset)); __ li(a0, Operand(Smi::FromInt(0))); __ Push(a1, a0); // Fixed array length (as smi) and initial index. @@ -1063,22 +1041,17 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { __ addu(t0, a2, t0); // Array base + scaled (smi) index. __ lw(a3, MemOperand(t0)); // Current entry. - // Get the expected map from the stack or a smi in the + // Get the expected map from the stack or a zero map in the // permanent slow case into register a2. __ lw(a2, MemOperand(sp, 3 * kPointerSize)); // Check if the expected map still matches that of the enumerable. - // If not, we may have to filter the key. + // If not, we have to filter the key. Label update_each; __ lw(a1, MemOperand(sp, 4 * kPointerSize)); __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset)); __ Branch(&update_each, eq, t0, Operand(a2)); - // For proxies, no filtering is done. - // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. - ASSERT_EQ(Smi::FromInt(0), 0); - __ Branch(&update_each, eq, a2, Operand(zero_reg)); - // Convert the entry to a string or (smi) 0 if it isn't a property // any more. If the property has been removed while iterating, we // just skip it. @@ -1133,7 +1106,7 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, !pretenure && scope()->is_function_scope() && info->num_literals() == 0) { - FastNewClosureStub stub(info->strict_mode_flag()); + FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode); __ li(a0, Operand(info)); __ push(a0); __ CallStub(&stub); @@ -1164,7 +1137,7 @@ void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var, Scope* s = scope(); while (s != NULL) { if (s->num_heap_slots() > 0) { - if (s->calls_non_strict_eval()) { + if (s->calls_eval()) { // Check that extension is NULL. __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX)); __ Branch(slow, ne, temp, Operand(zero_reg)); @@ -1176,7 +1149,7 @@ void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var, } // If no outer scope calls eval, we do not need to check more // context extensions. - if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break; + if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break; s = s->outer_scope(); } @@ -1218,7 +1191,7 @@ MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { if (s->num_heap_slots() > 0) { - if (s->calls_non_strict_eval()) { + if (s->calls_eval()) { // Check that extension is NULL. __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX)); __ Branch(slow, ne, temp, Operand(zero_reg)); @@ -1255,14 +1228,13 @@ void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var, Variable* local = var->local_if_not_shadowed(); __ lw(v0, ContextSlotOperandCheckExtensions(local, slow)); if (local->mode() == CONST || - local->mode() == CONST_HARMONY || local->mode() == LET) { __ LoadRoot(at, Heap::kTheHoleValueRootIndex); __ subu(at, v0, at); // Sub as compare: at == 0 on eq. if (local->mode() == CONST) { __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); __ movz(v0, a0, at); // Conditional move: return Undefined if TheHole. - } else { // LET || CONST_HARMONY + } else { // LET __ Branch(done, ne, at, Operand(zero_reg)); __ li(a0, Operand(var->name())); __ push(a0); @@ -1300,16 +1272,14 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { Comment cmnt(masm_, var->IsContextSlot() ? "Context variable" : "Stack variable"); - if (!var->binding_needs_init()) { + if (var->mode() != LET && var->mode() != CONST) { context()->Plug(var); } else { // Let and const need a read barrier. GetVar(v0, var); __ LoadRoot(at, Heap::kTheHoleValueRootIndex); __ subu(at, v0, at); // Sub as compare: at == 0 on eq. - if (var->mode() == LET || var->mode() == CONST_HARMONY) { - // Throw a reference error when using an uninitialized let/const - // binding in harmony mode. + if (var->mode() == LET) { Label done; __ Branch(&done, ne, at, Operand(zero_reg)); __ li(a0, Operand(var->name())); @@ -1317,8 +1287,6 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { __ CallRuntime(Runtime::kThrowReferenceError, 1); __ bind(&done); } else { - // Uninitalized const bindings outside of harmony mode are unholed. - ASSERT(var->mode() == CONST); __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); __ movz(v0, a0, at); // Conditional move: Undefined if TheHole. } @@ -1508,21 +1476,13 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { ZoneList<Expression*>* subexprs = expr->values(); int length = subexprs->length(); - - Handle<FixedArray> constant_elements = expr->constant_elements(); - ASSERT_EQ(2, constant_elements->length()); - ElementsKind constant_elements_kind = - static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value()); - Handle<FixedArrayBase> constant_elements_values( - FixedArrayBase::cast(constant_elements->get(1))); - __ mov(a0, result_register()); __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset)); __ li(a2, Operand(Smi::FromInt(expr->literal_index()))); - __ li(a1, Operand(constant_elements)); + __ li(a1, Operand(expr->constant_elements())); __ Push(a3, a2, a1); - if (constant_elements_values->map() == + if (expr->constant_elements()->map() == isolate()->heap()->fixed_cow_array_map()) { FastCloneShallowArrayStub stub( FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length); @@ -1534,14 +1494,8 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); } else { - ASSERT(constant_elements_kind == FAST_ELEMENTS || - constant_elements_kind == FAST_SMI_ONLY_ELEMENTS || - FLAG_smi_only_arrays); - FastCloneShallowArrayStub::Mode mode = - constant_elements_kind == FAST_DOUBLE_ELEMENTS - ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS - : FastCloneShallowArrayStub::CLONE_ELEMENTS; - FastCloneShallowArrayStub stub(mode, length); + FastCloneShallowArrayStub stub( + FastCloneShallowArrayStub::CLONE_ELEMENTS, length); __ CallStub(&stub); } @@ -1564,57 +1518,24 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { } VisitForAccumulatorValue(subexpr); + // Store the subexpression value in the array's elements. __ lw(t6, MemOperand(sp)); // Copy of array literal. __ lw(a1, FieldMemOperand(t6, JSObject::kElementsOffset)); - __ lw(a2, FieldMemOperand(t6, JSObject::kMapOffset)); int offset = FixedArray::kHeaderSize + (i * kPointerSize); - - Label element_done; - Label double_elements; - Label smi_element; - Label slow_elements; - Label fast_elements; - __ CheckFastElements(a2, a3, &double_elements); - - // FAST_SMI_ONLY_ELEMENTS or FAST_ELEMENTS - __ JumpIfSmi(result_register(), &smi_element); - __ CheckFastSmiOnlyElements(a2, a3, &fast_elements); - - // Store into the array literal requires a elements transition. Call into - // the runtime. - __ bind(&slow_elements); - __ push(t6); // Copy of array literal. - __ li(a1, Operand(Smi::FromInt(i))); - __ li(a2, Operand(Smi::FromInt(NONE))); // PropertyAttributes - __ li(a3, Operand(Smi::FromInt(strict_mode_flag()))); // Strict mode. - __ Push(a1, result_register(), a2, a3); - __ CallRuntime(Runtime::kSetProperty, 5); - __ Branch(&element_done); - - // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS. - __ bind(&double_elements); - __ li(a3, Operand(Smi::FromInt(i))); - __ StoreNumberToDoubleElements(result_register(), a3, t6, a1, t0, t1, t5, - t3, &slow_elements); - __ Branch(&element_done); - - // Array literal has ElementsKind of FAST_ELEMENTS and value is an object. - __ bind(&fast_elements); __ sw(result_register(), FieldMemOperand(a1, offset)); - // Update the write barrier for the array store. + Label no_map_change; + __ JumpIfSmi(result_register(), &no_map_change); + // Update the write barrier for the array store with v0 as the scratch + // register. __ RecordWriteField( a1, offset, result_register(), a2, kRAHasBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); - __ Branch(&element_done); - - // Array literal has ElementsKind of FAST_SMI_ONLY_ELEMENTS or - // FAST_ELEMENTS, and value is Smi. - __ bind(&smi_element); - __ sw(result_register(), FieldMemOperand(a1, offset)); - // Fall through - - __ bind(&element_done); + __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset)); + __ CheckFastSmiOnlyElements(a3, a2, &no_map_change); + __ push(t6); // Copy of array literal. + __ CallRuntime(Runtime::kNonSmiElementStored, 1); + __ bind(&no_map_change); PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); } @@ -1996,9 +1917,8 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, } } - } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) { - // Assignment to var or initializing assignment to let/const - // in harmony mode. + } else if (var->mode() != CONST) { + // Assignment to var or initializing assignment to let. if (var->IsStackAllocated() || var->IsContextSlot()) { MemOperand location = VarOperand(var, a1); if (FLAG_debug_code && op == Token::INIT_LET) { @@ -2883,10 +2803,10 @@ void FullCodeGenerator::EmitRandomHeapNumber(ZoneList<Expression*>* args) { // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). if (CpuFeatures::IsSupported(FPU)) { __ PrepareCallCFunction(1, a0); - __ lw(a0, ContextOperand(cp, Context::GLOBAL_INDEX)); - __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalContextOffset)); + __ li(a0, Operand(ExternalReference::isolate_address())); __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1); + CpuFeatures::Scope scope(FPU); // 0x41300000 is the top half of 1.0 x 2^20 as a double. __ li(a1, Operand(0x41300000)); @@ -2901,8 +2821,7 @@ void FullCodeGenerator::EmitRandomHeapNumber(ZoneList<Expression*>* args) { } else { __ PrepareCallCFunction(2, a0); __ mov(a0, s0); - __ lw(a1, ContextOperand(cp, Context::GLOBAL_INDEX)); - __ lw(a1, FieldMemOperand(a1, GlobalObject::kGlobalContextOffset)); + __ li(a1, Operand(ExternalReference::isolate_address())); __ CallCFunction( ExternalReference::fill_heap_number_with_random_function(isolate()), 2); } @@ -4181,26 +4100,36 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { case Token::EQ_STRICT: case Token::EQ: cc = eq; + __ mov(a0, result_register()); + __ pop(a1); break; case Token::LT: cc = lt; + __ mov(a0, result_register()); + __ pop(a1); break; case Token::GT: - cc = gt; + // Reverse left and right sides to obtain ECMA-262 conversion order. + cc = lt; + __ mov(a1, result_register()); + __ pop(a0); break; case Token::LTE: - cc = le; + // Reverse left and right sides to obtain ECMA-262 conversion order. + cc = ge; + __ mov(a1, result_register()); + __ pop(a0); break; case Token::GTE: cc = ge; + __ mov(a0, result_register()); + __ pop(a1); break; case Token::IN: case Token::INSTANCEOF: default: UNREACHABLE(); } - __ mov(a0, result_register()); - __ pop(a1); bool inline_smi_code = ShouldInlineSmiCase(op); JumpPatchSite patch_site(masm_); diff --git a/deps/v8/src/mips/ic-mips.cc b/deps/v8/src/mips/ic-mips.cc index ca6383cba..fb33eb665 100644 --- a/deps/v8/src/mips/ic-mips.cc +++ b/deps/v8/src/mips/ic-mips.cc @@ -384,10 +384,10 @@ Object* CallIC_Miss(Arguments args); // The generated code does not accept smi keys. // The generated code falls through if both probes miss. -void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm, - int argc, - Code::Kind kind, - Code::ExtraICState extra_state) { +static void GenerateMonomorphicCacheProbe(MacroAssembler* masm, + int argc, + Code::Kind kind, + Code::ExtraICState extra_ic_state) { // ----------- S t a t e ------------- // -- a1 : receiver // -- a2 : name @@ -397,7 +397,7 @@ void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm, // Probe the stub cache. Code::Flags flags = Code::ComputeFlags(kind, MONOMORPHIC, - extra_state, + extra_ic_state, NORMAL, argc); Isolate::Current()->stub_cache()->GenerateProbe( @@ -463,7 +463,7 @@ static void GenerateFunctionTailCall(MacroAssembler* masm, } -void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) { +static void GenerateCallNormal(MacroAssembler* masm, int argc) { // ----------- S t a t e ------------- // -- a2 : name // -- ra : return address @@ -486,10 +486,10 @@ void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) { } -void CallICBase::GenerateMiss(MacroAssembler* masm, - int argc, - IC::UtilityId id, - Code::ExtraICState extra_state) { +static void GenerateCallMiss(MacroAssembler* masm, + int argc, + IC::UtilityId id, + Code::ExtraICState extra_ic_state) { // ----------- S t a t e ------------- // -- a2 : name // -- ra : return address @@ -540,7 +540,7 @@ void CallICBase::GenerateMiss(MacroAssembler* masm, __ bind(&invoke); } // Invoke the function. - CallKind call_kind = CallICBase::Contextual::decode(extra_state) + CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state) ? CALL_AS_FUNCTION : CALL_AS_METHOD; ParameterCount actual(argc); @@ -552,6 +552,18 @@ void CallICBase::GenerateMiss(MacroAssembler* masm, } +void CallIC::GenerateMiss(MacroAssembler* masm, + int argc, + Code::ExtraICState extra_ic_state) { + // ----------- S t a t e ------------- + // -- a2 : name + // -- ra : return address + // ----------------------------------- + + GenerateCallMiss(masm, argc, IC::kCallIC_Miss, extra_ic_state); +} + + void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc, Code::ExtraICState extra_ic_state) { @@ -567,6 +579,27 @@ void CallIC::GenerateMegamorphic(MacroAssembler* masm, } +void CallIC::GenerateNormal(MacroAssembler* masm, int argc) { + // ----------- S t a t e ------------- + // -- a2 : name + // -- ra : return address + // ----------------------------------- + + GenerateCallNormal(masm, argc); + GenerateMiss(masm, argc, Code::kNoExtraICState); +} + + +void KeyedCallIC::GenerateMiss(MacroAssembler* masm, int argc) { + // ----------- S t a t e ------------- + // -- a2 : name + // -- ra : return address + // ----------------------------------- + + GenerateCallMiss(masm, argc, IC::kKeyedCallIC_Miss, Code::kNoExtraICState); +} + + void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) { // ----------- S t a t e ------------- // -- a2 : name @@ -683,7 +716,7 @@ void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) { __ JumpIfSmi(a2, &miss); __ IsObjectJSStringType(a2, a0, &miss); - CallICBase::GenerateNormal(masm, argc); + GenerateCallNormal(masm, argc); __ bind(&miss); GenerateMiss(masm, argc); } @@ -1388,47 +1421,6 @@ void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) { } -void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) { - // ---------- S t a t e -------------- - // -- a2 : receiver - // -- a3 : target map - // -- ra : return address - // ----------------------------------- - // Must return the modified receiver in v0. - if (!FLAG_trace_elements_transitions) { - Label fail; - ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &fail); - __ Ret(USE_DELAY_SLOT); - __ mov(v0, a2); - __ bind(&fail); - } - - __ push(a2); - __ TailCallRuntime(Runtime::kTransitionElementsSmiToDouble, 1, 1); -} - - -void KeyedStoreIC::GenerateTransitionElementsDoubleToObject( - MacroAssembler* masm) { - // ---------- S t a t e -------------- - // -- a2 : receiver - // -- a3 : target map - // -- ra : return address - // ----------------------------------- - // Must return the modified receiver in v0. - if (!FLAG_trace_elements_transitions) { - Label fail; - ElementsTransitionGenerator::GenerateDoubleToObject(masm, &fail); - __ Ret(USE_DELAY_SLOT); - __ mov(v0, a2); - __ bind(&fail); - } - - __ push(a2); - __ TailCallRuntime(Runtime::kTransitionElementsDoubleToObject, 1, 1); -} - - void StoreIC::GenerateMegamorphic(MacroAssembler* masm, StrictModeFlag strict_mode) { // ----------- S t a t e ------------- @@ -1568,9 +1560,11 @@ Condition CompareIC::ComputeCondition(Token::Value op) { case Token::LT: return lt; case Token::GT: - return gt; + // Reverse left and right operands to obtain ECMA-262 conversion order. + return lt; case Token::LTE: - return le; + // Reverse left and right operands to obtain ECMA-262 conversion order. + return ge; case Token::GTE: return ge; default: diff --git a/deps/v8/src/mips/macro-assembler-mips.cc b/deps/v8/src/mips/macro-assembler-mips.cc index 32dce660d..2964fbc86 100644 --- a/deps/v8/src/mips/macro-assembler-mips.cc +++ b/deps/v8/src/mips/macro-assembler-mips.cc @@ -2873,7 +2873,6 @@ void MacroAssembler::AllocateInNewSpace(Register object_size, ASSERT(!result.is(scratch1)); ASSERT(!result.is(scratch2)); ASSERT(!scratch1.is(scratch2)); - ASSERT(!object_size.is(t9)); ASSERT(!scratch1.is(t9) && !scratch2.is(t9) && !result.is(t9)); // Check relative positions of allocation top and limit addresses. @@ -3617,16 +3616,24 @@ void MacroAssembler::InvokeFunction(JSFunction* function, // You can't call a function without a valid frame. ASSERT(flag == JUMP_FUNCTION || has_frame()); + ASSERT(function->is_compiled()); + // Get the function and setup the context. li(a1, Operand(Handle<JSFunction>(function))); lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); + // Invoke the cached code. + Handle<Code> code(function->code()); ParameterCount expected(function->shared()->formal_parameter_count()); - // We call indirectly through the code field in the function to - // allow recompilation to take effect without changing any of the - // call sites. - lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); - InvokeCode(a3, expected, actual, flag, NullCallWrapper(), call_kind); + if (V8::UseCrankshaft()) { + // TODO(kasperl): For now, we always call indirectly through the + // code field in the function to allow recompilation to take effect + // without changing any of the call sites. + lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); + InvokeCode(a3, expected, actual, flag, NullCallWrapper(), call_kind); + } else { + InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag, call_kind); + } } @@ -3667,8 +3674,7 @@ void MacroAssembler::IsObjectJSStringType(Register object, void MacroAssembler::TryGetFunctionPrototype(Register function, Register result, Register scratch, - Label* miss, - bool miss_on_bound_function) { + Label* miss) { // Check that the receiver isn't a smi. JumpIfSmi(function, miss); @@ -3676,16 +3682,6 @@ void MacroAssembler::TryGetFunctionPrototype(Register function, GetObjectType(function, result, scratch); Branch(miss, ne, scratch, Operand(JS_FUNCTION_TYPE)); - if (miss_on_bound_function) { - lw(scratch, - FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset)); - lw(scratch, - FieldMemOperand(scratch, SharedFunctionInfo::kCompilerHintsOffset)); - And(scratch, scratch, - Operand(Smi::FromInt(1 << SharedFunctionInfo::kBoundFunction))); - Branch(miss, ne, scratch, Operand(zero_reg)); - } - // Make sure that the function has an instance prototype. Label non_instance; lbu(scratch, FieldMemOperand(result, Map::kBitFieldOffset)); diff --git a/deps/v8/src/mips/macro-assembler-mips.h b/deps/v8/src/mips/macro-assembler-mips.h index 84c55f7e6..6f81a4bd6 100644 --- a/deps/v8/src/mips/macro-assembler-mips.h +++ b/deps/v8/src/mips/macro-assembler-mips.h @@ -887,8 +887,7 @@ class MacroAssembler: public Assembler { void TryGetFunctionPrototype(Register function, Register result, Register scratch, - Label* miss, - bool miss_on_bound_function = false); + Label* miss); void GetObjectType(Register function, Register map, diff --git a/deps/v8/src/mips/regexp-macro-assembler-mips.cc b/deps/v8/src/mips/regexp-macro-assembler-mips.cc index cb210fed0..9db5c5bed 100644 --- a/deps/v8/src/mips/regexp-macro-assembler-mips.cc +++ b/deps/v8/src/mips/regexp-macro-assembler-mips.cc @@ -1112,11 +1112,6 @@ int RegExpMacroAssemblerMIPS::CheckStackGuardState(Address* return_address, frame_entry<const String*>(re_frame, kInputString) = *subject; frame_entry<const byte*>(re_frame, kInputStart) = new_address; frame_entry<const byte*>(re_frame, kInputEnd) = new_address + byte_length; - } else if (frame_entry<const String*>(re_frame, kInputString) != *subject) { - // Subject string might have been a ConsString that underwent - // short-circuiting during GC. That will not change start_address but - // will change pointer inside the subject handle. - frame_entry<const String*>(re_frame, kInputString) = *subject; } return 0; diff --git a/deps/v8/src/mips/simulator-mips.cc b/deps/v8/src/mips/simulator-mips.cc index 0ec3e283c..17c18977c 100644 --- a/deps/v8/src/mips/simulator-mips.cc +++ b/deps/v8/src/mips/simulator-mips.cc @@ -1359,9 +1359,9 @@ void Simulator::WriteB(int32_t addr, int8_t value) { // Returns the limit of the stack area to enable checking for stack overflows. uintptr_t Simulator::StackLimit() const { - // Leave a safety margin of 512 bytes to prevent overrunning the stack when + // Leave a safety margin of 256 bytes to prevent overrunning the stack when // pushing values. - return reinterpret_cast<uintptr_t>(stack_) + 512; + return reinterpret_cast<uintptr_t>(stack_) + 256; } diff --git a/deps/v8/src/mips/stub-cache-mips.cc b/deps/v8/src/mips/stub-cache-mips.cc index 9f94b1d79..4bad0a2cc 100644 --- a/deps/v8/src/mips/stub-cache-mips.cc +++ b/deps/v8/src/mips/stub-cache-mips.cc @@ -99,61 +99,7 @@ static void ProbeTable(Isolate* isolate, // must always call a backup property check that is complete. // This function is safe to call if the receiver has fast properties. // Name must be a symbol and receiver must be a heap object. -static void GenerateDictionaryNegativeLookup(MacroAssembler* masm, - Label* miss_label, - Register receiver, - Handle<String> name, - Register scratch0, - Register scratch1) { - ASSERT(name->IsSymbol()); - Counters* counters = masm->isolate()->counters(); - __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1); - __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); - - Label done; - - const int kInterceptorOrAccessCheckNeededMask = - (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); - - // Bail out if the receiver has a named interceptor or requires access checks. - Register map = scratch1; - __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); - __ lbu(scratch0, FieldMemOperand(map, Map::kBitFieldOffset)); - __ And(scratch0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask)); - __ Branch(miss_label, ne, scratch0, Operand(zero_reg)); - - // Check that receiver is a JSObject. - __ lbu(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset)); - __ Branch(miss_label, lt, scratch0, Operand(FIRST_SPEC_OBJECT_TYPE)); - - // Load properties array. - Register properties = scratch0; - __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); - // Check that the properties array is a dictionary. - __ lw(map, FieldMemOperand(properties, HeapObject::kMapOffset)); - Register tmp = properties; - __ LoadRoot(tmp, Heap::kHashTableMapRootIndex); - __ Branch(miss_label, ne, map, Operand(tmp)); - - // Restore the temporarily used register. - __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); - - - StringDictionaryLookupStub::GenerateNegativeLookup(masm, - miss_label, - &done, - receiver, - properties, - name, - scratch1); - __ bind(&done); - __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); -} - - -// TODO(kmillikin): Eliminate this function when the stub cache is fully -// handlified. -MUST_USE_RESULT static MaybeObject* TryGenerateDictionaryNegativeLookup( +MUST_USE_RESULT static MaybeObject* GenerateDictionaryNegativeLookup( MacroAssembler* masm, Label* miss_label, Register receiver, @@ -194,7 +140,7 @@ MUST_USE_RESULT static MaybeObject* TryGenerateDictionaryNegativeLookup( // Restore the temporarily used register. __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); - MaybeObject* result = StringDictionaryLookupStub::TryGenerateNegativeLookup( + MaybeObject* result = StringDictionaryLookupStub::GenerateNegativeLookup( masm, miss_label, &done, @@ -315,10 +261,8 @@ void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype( // are loaded directly otherwise the property is loaded from the properties // fixed array. void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, - Register dst, - Register src, - Handle<JSObject> holder, - int index) { + Register dst, Register src, + JSObject* holder, int index) { // Adjust for the number of properties stored in the holder. index -= holder->map()->inobject_properties(); if (index < 0) { @@ -525,15 +469,20 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm, void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) { ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC); - Handle<Code> code = (kind == Code::LOAD_IC) - ? masm->isolate()->builtins()->LoadIC_Miss() - : masm->isolate()->builtins()->KeyedLoadIC_Miss(); - __ Jump(code, RelocInfo::CODE_TARGET); + Code* code = NULL; + if (kind == Code::LOAD_IC) { + code = masm->isolate()->builtins()->builtin(Builtins::kLoadIC_Miss); + } else { + code = masm->isolate()->builtins()->builtin(Builtins::kKeyedLoadIC_Miss); + } + + Handle<Code> ic(code); + __ Jump(ic, RelocInfo::CODE_TARGET); } static void GenerateCallFunction(MacroAssembler* masm, - Handle<Object> object, + Object* object, const ParameterCount& arguments, Label* miss, Code::ExtraICState extra_ic_state) { @@ -929,25 +878,7 @@ class CallInterceptorCompiler BASE_EMBEDDED { // Generate code to check that a global property cell is empty. Create // the property cell at compilation time if no cell exists for the // property. -static void GenerateCheckPropertyCell(MacroAssembler* masm, - Handle<GlobalObject> global, - Handle<String> name, - Register scratch, - Label* miss) { - Handle<JSGlobalPropertyCell> cell = - GlobalObject::EnsurePropertyCell(global, name); - ASSERT(cell->value()->IsTheHole()); - __ li(scratch, Operand(cell)); - __ lw(scratch, - FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset)); - __ LoadRoot(at, Heap::kTheHoleValueRootIndex); - __ Branch(miss, ne, scratch, Operand(at)); -} - - -// TODO(kmillikin): Eliminate this function when the stub cache is fully -// handlified. -MUST_USE_RESULT static MaybeObject* TryGenerateCheckPropertyCell( +MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell( MacroAssembler* masm, GlobalObject* global, String* name, @@ -970,29 +901,7 @@ MUST_USE_RESULT static MaybeObject* TryGenerateCheckPropertyCell( // Calls GenerateCheckPropertyCell for each global object in the prototype chain // from object to (but not including) holder. -static void GenerateCheckPropertyCells(MacroAssembler* masm, - Handle<JSObject> object, - Handle<JSObject> holder, - Handle<String> name, - Register scratch, - Label* miss) { - Handle<JSObject> current = object; - while (!current.is_identical_to(holder)) { - if (current->IsGlobalObject()) { - GenerateCheckPropertyCell(masm, - Handle<GlobalObject>::cast(current), - name, - scratch, - miss); - } - current = Handle<JSObject>(JSObject::cast(current->GetPrototype())); - } -} - - -// TODO(kmillikin): Eliminate this function when the stub cache is fully -// handlified. -MUST_USE_RESULT static MaybeObject* TryGenerateCheckPropertyCells( +MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCells( MacroAssembler* masm, JSObject* object, JSObject* holder, @@ -1003,7 +912,7 @@ MUST_USE_RESULT static MaybeObject* TryGenerateCheckPropertyCells( while (current != holder) { if (current->IsGlobalObject()) { // Returns a cell or a failure. - MaybeObject* result = TryGenerateCheckPropertyCell( + MaybeObject* result = GenerateCheckPropertyCell( masm, GlobalObject::cast(current), name, @@ -1138,108 +1047,6 @@ static void GenerateUInt2Double(MacroAssembler* masm, #define __ ACCESS_MASM(masm()) -Register StubCompiler::CheckPrototypes(Handle<JSObject> object, - Register object_reg, - Handle<JSObject> holder, - Register holder_reg, - Register scratch1, - Register scratch2, - Handle<String> name, - int save_at_depth, - Label* miss) { - // Make sure there's no overlap between holder and object registers. - ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg)); - ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg) - && !scratch2.is(scratch1)); - - // Keep track of the current object in register reg. - Register reg = object_reg; - int depth = 0; - - if (save_at_depth == depth) { - __ sw(reg, MemOperand(sp)); - } - - // Check the maps in the prototype chain. - // Traverse the prototype chain from the object and do map checks. - Handle<JSObject> current = object; - while (!current.is_identical_to(holder)) { - ++depth; - - // Only global objects and objects that do not require access - // checks are allowed in stubs. - ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded()); - - Handle<JSObject> prototype(JSObject::cast(current->GetPrototype())); - if (!current->HasFastProperties() && - !current->IsJSGlobalObject() && - !current->IsJSGlobalProxy()) { - if (!name->IsSymbol()) { - name = factory()->LookupSymbol(name); - } - ASSERT(current->property_dictionary()->FindEntry(*name) == - StringDictionary::kNotFound); - - GenerateDictionaryNegativeLookup(masm(), miss, reg, name, - scratch1, scratch2); - - __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); - reg = holder_reg; // From now on the object will be in holder_reg. - __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset)); - } else { - Handle<Map> current_map(current->map()); - __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); - // Branch on the result of the map check. - __ Branch(miss, ne, scratch1, Operand(current_map)); - // Check access rights to the global object. This has to happen after - // the map check so that we know that the object is actually a global - // object. - if (current->IsJSGlobalProxy()) { - __ CheckAccessGlobalProxy(reg, scratch2, miss); - } - reg = holder_reg; // From now on the object will be in holder_reg. - - if (heap()->InNewSpace(*prototype)) { - // The prototype is in new space; we cannot store a reference to it - // in the code. Load it from the map. - __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset)); - } else { - // The prototype is in old space; load it directly. - __ li(reg, Operand(prototype)); - } - } - - if (save_at_depth == depth) { - __ sw(reg, MemOperand(sp)); - } - - // Go to the next object in the prototype chain. - current = prototype; - } - - // Log the check depth. - LOG(masm()->isolate(), IntEvent("check-maps-depth", depth + 1)); - - // Check the holder map. - __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); - __ Branch(miss, ne, scratch1, Operand(Handle<Map>(current->map()))); - - // Perform security check for access to the global object. - ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded()); - if (holder->IsJSGlobalProxy()) { - __ CheckAccessGlobalProxy(reg, scratch1, miss); - } - - // If we've skipped any global objects, it's not enough to verify that - // their maps haven't changed. We also need to check that the property - // cell for the property is still empty. - GenerateCheckPropertyCells(masm(), object, holder, name, scratch1, miss); - - // Return the register containing the holder. - return reg; -} - - Register StubCompiler::CheckPrototypes(JSObject* object, Register object_reg, JSObject* holder, @@ -1289,14 +1096,12 @@ Register StubCompiler::CheckPrototypes(JSObject* object, ASSERT(current->property_dictionary()->FindEntry(name) == StringDictionary::kNotFound); - MaybeObject* negative_lookup = - TryGenerateDictionaryNegativeLookup(masm(), - miss, - reg, - name, - scratch1, - scratch2); - + MaybeObject* negative_lookup = GenerateDictionaryNegativeLookup(masm(), + miss, + reg, + name, + scratch1, + scratch2); if (negative_lookup->IsFailure()) { set_failure(Failure::cast(negative_lookup)); return reg; @@ -1361,18 +1166,18 @@ Register StubCompiler::CheckPrototypes(JSObject* object, ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded()); if (holder->IsJSGlobalProxy()) { __ CheckAccessGlobalProxy(reg, scratch1, miss); - } + }; // If we've skipped any global objects, it's not enough to verify // that their maps haven't changed. We also need to check that the // property cell for the property is still empty. - MaybeObject* result = TryGenerateCheckPropertyCells(masm(), - object, - holder, - name, - scratch1, - miss); + MaybeObject* result = GenerateCheckPropertyCells(masm(), + object, + holder, + name, + scratch1, + miss); if (result->IsFailure()) set_failure(Failure::cast(result)); // Return the register containing the holder. @@ -1380,35 +1185,36 @@ Register StubCompiler::CheckPrototypes(JSObject* object, } -void StubCompiler::GenerateLoadField(Handle<JSObject> object, - Handle<JSObject> holder, +void StubCompiler::GenerateLoadField(JSObject* object, + JSObject* holder, Register receiver, Register scratch1, Register scratch2, Register scratch3, int index, - Handle<String> name, + String* name, Label* miss) { // Check that the receiver isn't a smi. __ And(scratch1, receiver, Operand(kSmiTagMask)); __ Branch(miss, eq, scratch1, Operand(zero_reg)); // Check that the maps haven't changed. - Register reg = CheckPrototypes( - object, receiver, holder, scratch1, scratch2, scratch3, name, miss); + Register reg = + CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3, + name, miss); GenerateFastPropertyLoad(masm(), v0, reg, holder, index); __ Ret(); } -void StubCompiler::GenerateLoadConstant(Handle<JSObject> object, - Handle<JSObject> holder, +void StubCompiler::GenerateLoadConstant(JSObject* object, + JSObject* holder, Register receiver, Register scratch1, Register scratch2, Register scratch3, - Handle<Object> value, - Handle<String> name, + Object* value, + String* name, Label* miss) { // Check that the receiver isn't a smi. __ JumpIfSmi(receiver, miss, scratch1); @@ -1419,7 +1225,7 @@ void StubCompiler::GenerateLoadConstant(Handle<JSObject> object, scratch1, scratch2, scratch3, name, miss); // Return the constant value. - __ li(v0, Operand(value)); + __ li(v0, Operand(Handle<Object>(value))); __ Ret(); } @@ -1584,8 +1390,7 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object, // We found FIELD property in prototype chain of interceptor's holder. // Retrieve a field from field's holder. GenerateFastPropertyLoad(masm(), v0, holder_reg, - Handle<JSObject>(lookup->holder()), - lookup->GetFieldIndex()); + lookup->holder(), lookup->GetFieldIndex()); __ Ret(); } else { // We found CALLBACKS property in prototype chain of interceptor's @@ -1635,9 +1440,9 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object, } -void CallStubCompiler::GenerateNameCheck(Handle<String> name, Label* miss) { +void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) { if (kind_ == Code::KEYED_CALL_IC) { - __ Branch(miss, ne, a2, Operand(name)); + __ Branch(miss, ne, a2, Operand(Handle<String>(name))); } } @@ -1694,22 +1499,11 @@ void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell, } -void CallStubCompiler::GenerateMissBranch() { - Handle<Code> code = +MaybeObject* CallStubCompiler::GenerateMissBranch() { + MaybeObject* maybe_obj = isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(), kind_, - extra_state_); - __ Jump(code, RelocInfo::CODE_TARGET); -} - - -// TODO(kmillikin): Eliminate this function when the stub cache is fully -// handlified. -MaybeObject* CallStubCompiler::TryGenerateMissBranch() { - MaybeObject* maybe_obj = - isolate()->stub_cache()->TryComputeCallMiss(arguments().immediate(), - kind_, - extra_state_); + extra_ic_state_); Object* obj; if (!maybe_obj->ToObject(&obj)) return maybe_obj; __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET); @@ -1717,10 +1511,10 @@ MaybeObject* CallStubCompiler::TryGenerateMissBranch() { } -Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object, - Handle<JSObject> holder, +MaybeObject* CallStubCompiler::CompileCallField(JSObject* object, + JSObject* holder, int index, - Handle<String> name) { + String* name) { // ----------- S t a t e ------------- // -- a2 : name // -- ra : return address @@ -1740,11 +1534,12 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object, Register reg = CheckPrototypes(object, a0, holder, a1, a3, t0, name, &miss); GenerateFastPropertyLoad(masm(), a1, reg, holder, index); - GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_); + GenerateCallFunction(masm(), object, arguments(), &miss, extra_ic_state_); // Handle call cache miss. __ bind(&miss); - GenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); + if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. return GetCode(FIELD, name); @@ -1769,7 +1564,7 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object, Label miss; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); Register receiver = a1; @@ -1845,7 +1640,7 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object, __ bind(&with_write_barrier); __ lw(t2, FieldMemOperand(receiver, HeapObject::kMapOffset)); - __ CheckFastObjectElements(t2, t2, &call_builtin); + __ CheckFastSmiOnlyElements(t2, t2, &call_builtin); // Save new length. __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset)); @@ -1935,11 +1730,11 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object, // Handle call cache miss. __ bind(&miss); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(function); + return GetCode(function); } @@ -1964,7 +1759,7 @@ MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object, Register receiver = a1; Register elements = a3; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); // Get the receiver from the stack. const int argc = arguments().immediate(); @@ -2024,11 +1819,11 @@ MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object, // Handle call cache miss. __ bind(&miss); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(function); + return GetCode(function); } @@ -2058,12 +1853,12 @@ MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall( Label* index_out_of_range_label = &index_out_of_range; if (kind_ == Code::CALL_IC && - (CallICBase::StringStubState::decode(extra_state_) == + (CallICBase::StringStubState::decode(extra_ic_state_) == DEFAULT_STRING_STUB)) { index_out_of_range_label = &miss; } - GenerateNameCheck(Handle<String>(name), &name_miss); + GenerateNameCheck(name, &name_miss); // Check that the maps starting from the prototype haven't changed. GenerateDirectLoadGlobalFunctionPrototype(masm(), @@ -2111,11 +1906,11 @@ MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall( // Restore function name in a2. __ li(a2, Handle<String>(name)); __ bind(&name_miss); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(function); + return GetCode(function); } @@ -2144,12 +1939,12 @@ MaybeObject* CallStubCompiler::CompileStringCharAtCall( Label* index_out_of_range_label = &index_out_of_range; if (kind_ == Code::CALL_IC && - (CallICBase::StringStubState::decode(extra_state_) == + (CallICBase::StringStubState::decode(extra_ic_state_) == DEFAULT_STRING_STUB)) { index_out_of_range_label = &miss; } - GenerateNameCheck(Handle<String>(name), &name_miss); + GenerateNameCheck(name, &name_miss); // Check that the maps starting from the prototype haven't changed. GenerateDirectLoadGlobalFunctionPrototype(masm(), @@ -2199,11 +1994,11 @@ MaybeObject* CallStubCompiler::CompileStringCharAtCall( // Restore function name in a2. __ li(a2, Handle<String>(name)); __ bind(&name_miss); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(function); + return GetCode(function); } @@ -2228,7 +2023,7 @@ MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall( if (!object->IsJSObject() || argc != 1) return heap()->undefined_value(); Label miss; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); if (cell == NULL) { __ lw(a1, MemOperand(sp, 1 * kPointerSize)); @@ -2271,11 +2066,11 @@ MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall( __ bind(&miss); // a2: function name. - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return (cell == NULL) ? TryGetCode(function) : TryGetCode(NORMAL, name); + return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name); } @@ -2303,7 +2098,7 @@ MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object, if (!object->IsJSObject() || argc != 1) return heap()->undefined_value(); Label miss, slow; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); if (cell == NULL) { __ lw(a1, MemOperand(sp, 1 * kPointerSize)); @@ -2405,11 +2200,11 @@ MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object, __ bind(&miss); // a2: function name. - MaybeObject* obj = TryGenerateMissBranch(); + MaybeObject* obj = GenerateMissBranch(); if (obj->IsFailure()) return obj; // Return the generated code. - return (cell == NULL) ? TryGetCode(function) : TryGetCode(NORMAL, name); + return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name); } @@ -2433,7 +2228,7 @@ MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object, if (!object->IsJSObject() || argc != 1) return heap()->undefined_value(); Label miss; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); if (cell == NULL) { __ lw(a1, MemOperand(sp, 1 * kPointerSize)); @@ -2507,11 +2302,11 @@ MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object, __ bind(&miss); // a2: function name. - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return (cell == NULL) ? TryGetCode(function) : TryGetCode(NORMAL, name); + return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name); } @@ -2537,7 +2332,7 @@ MaybeObject* CallStubCompiler::CompileFastApiCall( Label miss, miss_before_stack_reserved; - GenerateNameCheck(Handle<String>(name), &miss_before_stack_reserved); + GenerateNameCheck(name, &miss_before_stack_reserved); // Get the receiver from the stack. const int argc = arguments().immediate(); @@ -2562,11 +2357,11 @@ MaybeObject* CallStubCompiler::CompileFastApiCall( FreeSpaceForFastApiCall(masm()); __ bind(&miss_before_stack_reserved); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(function); + return GetCode(function); } @@ -2590,7 +2385,7 @@ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object, Label miss; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); // Get the receiver from the stack. const int argc = arguments().immediate(); @@ -2689,7 +2484,7 @@ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object, UNREACHABLE(); } - CallKind call_kind = CallICBase::Contextual::decode(extra_state_) + CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_) ? CALL_AS_FUNCTION : CALL_AS_METHOD; __ InvokeFunction(function, arguments(), JUMP_FUNCTION, call_kind); @@ -2697,11 +2492,11 @@ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object, // Handle call cache miss. __ bind(&miss); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(function); + return GetCode(function); } @@ -2715,18 +2510,18 @@ MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object, Label miss; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); // Get the number of arguments. const int argc = arguments().immediate(); - LookupResult lookup(isolate()); + LookupResult lookup; LookupPostInterceptor(holder, name, &lookup); // Get the receiver from the stack. __ lw(a1, MemOperand(sp, argc * kPointerSize)); - CallInterceptorCompiler compiler(this, arguments(), a2, extra_state_); + CallInterceptorCompiler compiler(this, arguments(), a2, extra_ic_state_); MaybeObject* result = compiler.Compile(masm(), object, holder, @@ -2746,16 +2541,15 @@ MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object, // Restore receiver. __ lw(a0, MemOperand(sp, argc * kPointerSize)); - GenerateCallFunction(masm(), Handle<Object>(object), arguments(), &miss, - extra_state_); + GenerateCallFunction(masm(), object, arguments(), &miss, extra_ic_state_); // Handle call cache miss. __ bind(&miss); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(INTERCEPTOR, name); + return GetCode(INTERCEPTOR, name); } @@ -2780,7 +2574,7 @@ MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object, Label miss; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); // Get the number of arguments. const int argc = arguments().immediate(); @@ -2801,26 +2595,32 @@ MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object, // Jump to the cached code (tail call). Counters* counters = masm()->isolate()->counters(); __ IncrementCounter(counters->call_global_inline(), 1, a3, t0); + ASSERT(function->is_compiled()); Handle<Code> code(function->code()); ParameterCount expected(function->shared()->formal_parameter_count()); - CallKind call_kind = CallICBase::Contextual::decode(extra_state_) + CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_) ? CALL_AS_FUNCTION : CALL_AS_METHOD; - // We call indirectly through the code field in the function to - // allow recompilation to take effect without changing any of the - // call sites. - __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); - __ InvokeCode(a3, expected, arguments(), JUMP_FUNCTION, - NullCallWrapper(), call_kind); + if (V8::UseCrankshaft()) { + // TODO(kasperl): For now, we always call indirectly through the + // code field in the function to allow recompilation to take effect + // without changing any of the call sites. + __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); + __ InvokeCode(a3, expected, arguments(), JUMP_FUNCTION, + NullCallWrapper(), call_kind); + } else { + __ InvokeCode(code, expected, arguments(), RelocInfo::CODE_TARGET, + JUMP_FUNCTION, call_kind); + } // Handle call cache miss. __ bind(&miss); __ IncrementCounter(counters->call_global_inline_miss(), 1, a1, a3); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(NORMAL, name); + return GetCode(NORMAL, name); } @@ -2999,9 +2799,9 @@ MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object, } -Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name, - Handle<JSObject> object, - Handle<JSObject> last) { +MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name, + JSObject* object, + JSObject* last) { // ----------- S t a t e ------------- // -- a0 : receiver // -- ra : return address @@ -3017,8 +2817,15 @@ Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name, // If the last object in the prototype chain is a global object, // check that the global property cell is empty. if (last->IsGlobalObject()) { - GenerateCheckPropertyCell( - masm(), Handle<GlobalObject>::cast(last), name, a1, &miss); + MaybeObject* cell = GenerateCheckPropertyCell(masm(), + GlobalObject::cast(last), + name, + a1, + &miss); + if (cell->IsFailure()) { + miss.Unuse(); + return cell; + } } // Return undefined if maps of the full prototype chain is still the same. @@ -3029,14 +2836,14 @@ Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name, GenerateLoadMiss(masm(), Code::LOAD_IC); // Return the generated code. - return GetCode(NONEXISTENT, factory()->empty_string()); + return GetCode(NONEXISTENT, heap()->empty_string()); } -Handle<Code> LoadStubCompiler::CompileLoadField(Handle<JSObject> object, - Handle<JSObject> holder, +MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object, + JSObject* holder, int index, - Handle<String> name) { + String* name) { // ----------- S t a t e ------------- // -- a0 : receiver // -- a2 : name @@ -3077,14 +2884,14 @@ MaybeObject* LoadStubCompiler::CompileLoadCallback(String* name, GenerateLoadMiss(masm(), Code::LOAD_IC); // Return the generated code. - return TryGetCode(CALLBACKS, name); + return GetCode(CALLBACKS, name); } -Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object, - Handle<JSObject> holder, - Handle<Object> value, - Handle<String> name) { +MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object, + JSObject* holder, + Object* value, + String* name) { // ----------- S t a t e ------------- // -- a0 : receiver // -- a2 : name @@ -3112,7 +2919,7 @@ MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* object, // ----------------------------------- Label miss; - LookupResult lookup(isolate()); + LookupResult lookup; LookupPostInterceptor(holder, name, &lookup); GenerateLoadInterceptor(object, holder, @@ -3128,7 +2935,7 @@ MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* object, GenerateLoadMiss(masm(), Code::LOAD_IC); // Return the generated code. - return TryGetCode(INTERCEPTOR, name); + return GetCode(INTERCEPTOR, name); } @@ -3175,13 +2982,13 @@ MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object, GenerateLoadMiss(masm(), Code::LOAD_IC); // Return the generated code. - return TryGetCode(NORMAL, name); + return GetCode(NORMAL, name); } -Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name, - Handle<JSObject> receiver, - Handle<JSObject> holder, +MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name, + JSObject* receiver, + JSObject* holder, int index) { // ----------- S t a t e ------------- // -- ra : return address @@ -3191,7 +2998,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name, Label miss; // Check the key is the cached one. - __ Branch(&miss, ne, a0, Operand(name)); + __ Branch(&miss, ne, a0, Operand(Handle<String>(name))); GenerateLoadField(receiver, holder, a1, a2, a3, t0, index, name, &miss); __ bind(&miss); @@ -3226,15 +3033,14 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback( __ bind(&miss); GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); - return TryGetCode(CALLBACKS, name); + return GetCode(CALLBACKS, name); } -Handle<Code> KeyedLoadStubCompiler::CompileLoadConstant( - Handle<String> name, - Handle<JSObject> receiver, - Handle<JSObject> holder, - Handle<Object> value) { +MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name, + JSObject* receiver, + JSObject* holder, + Object* value) { // ----------- S t a t e ------------- // -- ra : return address // -- a0 : key @@ -3243,7 +3049,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadConstant( Label miss; // Check the key is the cached one. - __ Branch(&miss, ne, a0, Operand(name)); + __ Branch(&miss, ne, a0, Operand(Handle<String>(name))); GenerateLoadConstant(receiver, holder, a1, a2, a3, t0, value, name, &miss); __ bind(&miss); @@ -3267,7 +3073,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, // Check the key is the cached one. __ Branch(&miss, ne, a0, Operand(Handle<String>(name))); - LookupResult lookup(isolate()); + LookupResult lookup; LookupPostInterceptor(holder, name, &lookup); GenerateLoadInterceptor(receiver, holder, @@ -3282,12 +3088,11 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, __ bind(&miss); GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); - return TryGetCode(INTERCEPTOR, name); + return GetCode(INTERCEPTOR, name); } -Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength( - Handle<String> name) { +MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) { // ----------- S t a t e ------------- // -- ra : return address // -- a0 : key @@ -3296,7 +3101,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength( Label miss; // Check the key is the cached one. - __ Branch(&miss, ne, a0, Operand(name)); + __ Branch(&miss, ne, a0, Operand(Handle<String>(name))); GenerateLoadArrayLength(masm(), a1, a2, &miss); __ bind(&miss); @@ -3306,8 +3111,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength( } -Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength( - Handle<String> name) { +MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) { // ----------- S t a t e ------------- // -- ra : return address // -- a0 : key @@ -3319,7 +3123,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength( __ IncrementCounter(counters->keyed_load_string_length(), 1, a2, a3); // Check the key is the cached one. - __ Branch(&miss, ne, a0, Operand(name)); + __ Branch(&miss, ne, a0, Operand(Handle<String>(name))); GenerateLoadStringLength(masm(), a1, a2, a3, &miss, true); __ bind(&miss); @@ -3331,8 +3135,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength( } -Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype( - Handle<String> name) { +MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) { // ----------- S t a t e ------------- // -- ra : return address // -- a0 : key @@ -3344,7 +3147,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype( __ IncrementCounter(counters->keyed_load_function_prototype(), 1, a2, a3); // Check the name hasn't changed. - __ Branch(&miss, ne, a0, Operand(name)); + __ Branch(&miss, ne, a0, Operand(Handle<String>(name))); GenerateLoadFunctionPrototype(masm(), a1, a2, a3, &miss); __ bind(&miss); @@ -3375,7 +3178,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadElement(Map* receiver_map) { __ Jump(ic, RelocInfo::CODE_TARGET); // Return the generated code. - return TryGetCode(NORMAL, NULL); + return GetCode(NORMAL, NULL); } @@ -3403,7 +3206,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadPolymorphic( __ Jump(miss_ic, RelocInfo::CODE_TARGET); // Return the generated code. - return TryGetCode(NORMAL, NULL, MEGAMORPHIC); + return GetCode(NORMAL, NULL, MEGAMORPHIC); } @@ -3496,8 +3299,8 @@ MaybeObject* KeyedStoreStubCompiler::CompileStorePolymorphic( __ Jump(code, RelocInfo::CODE_TARGET, eq, a3, Operand(map)); } else { Label next_map; - __ Branch(&next_map, ne, a3, Operand(map)); - __ li(a3, Operand(Handle<Map>(transitioned_maps->at(i)))); + __ Branch(&next_map, eq, a3, Operand(map)); + __ li(t0, Operand(Handle<Map>(transitioned_maps->at(i)))); __ Jump(code, RelocInfo::CODE_TARGET); __ bind(&next_map); } diff --git a/deps/v8/src/mirror-debugger.js b/deps/v8/src/mirror-debugger.js index 999252d57..e3f3c48bb 100644 --- a/deps/v8/src/mirror-debugger.js +++ b/deps/v8/src/mirror-debugger.js @@ -1087,7 +1087,7 @@ ErrorMirror.prototype.toText = function() { // Use the same text representation as in messages.js. var text; try { - str = %_CallFunction(this.value_, builtins.ErrorToString); + str = %_CallFunction(this.value_, builtins.errorToString); } catch (e) { str = '#<Error>'; } diff --git a/deps/v8/src/mksnapshot.cc b/deps/v8/src/mksnapshot.cc index bc0c2fc5b..7a3fd090d 100644 --- a/deps/v8/src/mksnapshot.cc +++ b/deps/v8/src/mksnapshot.cc @@ -312,6 +312,7 @@ int main(int argc, char** argv) { } // If we don't do this then we end up with a stray root pointing at the // context even after we have disposed of the context. + // TODO(gc): request full compaction? HEAP->CollectAllGarbage(i::Heap::kNoGCFlags); i::Object* raw_context = *(v8::Utils::OpenHandle(*context)); context.Dispose(); diff --git a/deps/v8/src/objects-debug.cc b/deps/v8/src/objects-debug.cc index 64bda9473..6d2cf5f72 100644 --- a/deps/v8/src/objects-debug.cc +++ b/deps/v8/src/objects-debug.cc @@ -156,12 +156,6 @@ void HeapObject::HeapObjectVerify() { case JS_ARRAY_TYPE: JSArray::cast(this)->JSArrayVerify(); break; - case JS_SET_TYPE: - JSSet::cast(this)->JSSetVerify(); - break; - case JS_MAP_TYPE: - JSMap::cast(this)->JSMapVerify(); - break; case JS_WEAK_MAP_TYPE: JSWeakMap::cast(this)->JSWeakMapVerify(); break; @@ -269,12 +263,6 @@ void ExternalDoubleArray::ExternalDoubleArrayVerify() { void JSObject::JSObjectVerify() { VerifyHeapPointer(properties()); VerifyHeapPointer(elements()); - - if (GetElementsKind() == NON_STRICT_ARGUMENTS_ELEMENTS) { - ASSERT(this->elements()->IsFixedArray()); - ASSERT(this->elements()->length() >= 2); - } - if (HasFastProperties()) { CHECK_EQ(map()->unused_property_fields(), (map()->inobject_properties() + properties()->length() - @@ -506,22 +494,6 @@ void JSArray::JSArrayVerify() { } -void JSSet::JSSetVerify() { - CHECK(IsJSSet()); - JSObjectVerify(); - VerifyHeapPointer(table()); - ASSERT(table()->IsHashTable() || table()->IsUndefined()); -} - - -void JSMap::JSMapVerify() { - CHECK(IsJSMap()); - JSObjectVerify(); - VerifyHeapPointer(table()); - ASSERT(table()->IsHashTable() || table()->IsUndefined()); -} - - void JSWeakMap::JSWeakMapVerify() { CHECK(IsJSWeakMap()); JSObjectVerify(); diff --git a/deps/v8/src/objects-inl.h b/deps/v8/src/objects-inl.h index dc3aa4666..cebf9be07 100644 --- a/deps/v8/src/objects-inl.h +++ b/deps/v8/src/objects-inl.h @@ -67,13 +67,6 @@ PropertyDetails PropertyDetails::AsDeleted() { } -#define TYPE_CHECKER(type, instancetype) \ - bool Object::Is##type() { \ - return Object::IsHeapObject() && \ - HeapObject::cast(this)->map()->instance_type() == instancetype; \ - } - - #define CAST_ACCESSOR(type) \ type* type::cast(Object* object) { \ ASSERT(object->Is##type()); \ @@ -119,11 +112,6 @@ PropertyDetails PropertyDetails::AsDeleted() { } -bool Object::IsFixedArrayBase() { - return IsFixedArray() || IsFixedDoubleArray(); -} - - bool Object::IsInstanceOf(FunctionTemplateInfo* expected) { // There is a constraint on the object; check. if (!this->IsJSObject()) return false; @@ -159,7 +147,10 @@ bool Object::NonFailureIsHeapObject() { } -TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE) +bool Object::IsHeapNumber() { + return Object::IsHeapObject() + && HeapObject::cast(this)->map()->instance_type() == HEAP_NUMBER_TYPE; +} bool Object::IsString() { @@ -412,8 +403,16 @@ bool Object::IsNumber() { } -TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE) -TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE) +bool Object::IsByteArray() { + return Object::IsHeapObject() + && HeapObject::cast(this)->map()->instance_type() == BYTE_ARRAY_TYPE; +} + + +bool Object::IsFreeSpace() { + return Object::IsHeapObject() + && HeapObject::cast(this)->map()->instance_type() == FREE_SPACE_TYPE; +} bool Object::IsFiller() { @@ -423,7 +422,11 @@ bool Object::IsFiller() { } -TYPE_CHECKER(ExternalPixelArray, EXTERNAL_PIXEL_ARRAY_TYPE) +bool Object::IsExternalPixelArray() { + return Object::IsHeapObject() && + HeapObject::cast(this)->map()->instance_type() == + EXTERNAL_PIXEL_ARRAY_TYPE; +} bool Object::IsExternalArray() { @@ -436,14 +439,60 @@ bool Object::IsExternalArray() { } -TYPE_CHECKER(ExternalByteArray, EXTERNAL_BYTE_ARRAY_TYPE) -TYPE_CHECKER(ExternalUnsignedByteArray, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE) -TYPE_CHECKER(ExternalShortArray, EXTERNAL_SHORT_ARRAY_TYPE) -TYPE_CHECKER(ExternalUnsignedShortArray, EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE) -TYPE_CHECKER(ExternalIntArray, EXTERNAL_INT_ARRAY_TYPE) -TYPE_CHECKER(ExternalUnsignedIntArray, EXTERNAL_UNSIGNED_INT_ARRAY_TYPE) -TYPE_CHECKER(ExternalFloatArray, EXTERNAL_FLOAT_ARRAY_TYPE) -TYPE_CHECKER(ExternalDoubleArray, EXTERNAL_DOUBLE_ARRAY_TYPE) +bool Object::IsExternalByteArray() { + return Object::IsHeapObject() && + HeapObject::cast(this)->map()->instance_type() == + EXTERNAL_BYTE_ARRAY_TYPE; +} + + +bool Object::IsExternalUnsignedByteArray() { + return Object::IsHeapObject() && + HeapObject::cast(this)->map()->instance_type() == + EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE; +} + + +bool Object::IsExternalShortArray() { + return Object::IsHeapObject() && + HeapObject::cast(this)->map()->instance_type() == + EXTERNAL_SHORT_ARRAY_TYPE; +} + + +bool Object::IsExternalUnsignedShortArray() { + return Object::IsHeapObject() && + HeapObject::cast(this)->map()->instance_type() == + EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE; +} + + +bool Object::IsExternalIntArray() { + return Object::IsHeapObject() && + HeapObject::cast(this)->map()->instance_type() == + EXTERNAL_INT_ARRAY_TYPE; +} + + +bool Object::IsExternalUnsignedIntArray() { + return Object::IsHeapObject() && + HeapObject::cast(this)->map()->instance_type() == + EXTERNAL_UNSIGNED_INT_ARRAY_TYPE; +} + + +bool Object::IsExternalFloatArray() { + return Object::IsHeapObject() && + HeapObject::cast(this)->map()->instance_type() == + EXTERNAL_FLOAT_ARRAY_TYPE; +} + + +bool Object::IsExternalDoubleArray() { + return Object::IsHeapObject() && + HeapObject::cast(this)->map()->instance_type() == + EXTERNAL_DOUBLE_ARRAY_TYPE; +} bool MaybeObject::IsFailure() { @@ -500,14 +549,42 @@ bool Object::IsJSProxy() { } -TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE) -TYPE_CHECKER(JSSet, JS_SET_TYPE) -TYPE_CHECKER(JSMap, JS_MAP_TYPE) -TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE) -TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE) -TYPE_CHECKER(Map, MAP_TYPE) -TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE) -TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE) +bool Object::IsJSFunctionProxy() { + return Object::IsHeapObject() && + HeapObject::cast(this)->map()->instance_type() == JS_FUNCTION_PROXY_TYPE; +} + + +bool Object::IsJSWeakMap() { + return Object::IsJSObject() && + HeapObject::cast(this)->map()->instance_type() == JS_WEAK_MAP_TYPE; +} + + +bool Object::IsJSContextExtensionObject() { + return IsHeapObject() + && (HeapObject::cast(this)->map()->instance_type() == + JS_CONTEXT_EXTENSION_OBJECT_TYPE); +} + + +bool Object::IsMap() { + return Object::IsHeapObject() + && HeapObject::cast(this)->map()->instance_type() == MAP_TYPE; +} + + +bool Object::IsFixedArray() { + return Object::IsHeapObject() + && HeapObject::cast(this)->map()->instance_type() == FIXED_ARRAY_TYPE; +} + + +bool Object::IsFixedDoubleArray() { + return Object::IsHeapObject() + && HeapObject::cast(this)->map()->instance_type() == + FIXED_DOUBLE_ARRAY_TYPE; +} bool Object::IsDescriptorArray() { @@ -570,7 +647,10 @@ bool Object::IsSerializedScopeInfo() { } -TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE) +bool Object::IsJSFunction() { + return Object::IsHeapObject() + && HeapObject::cast(this)->map()->instance_type() == JS_FUNCTION_TYPE; +} template <> inline bool Is<JSFunction>(Object* obj) { @@ -578,12 +658,43 @@ template <> inline bool Is<JSFunction>(Object* obj) { } -TYPE_CHECKER(Code, CODE_TYPE) -TYPE_CHECKER(Oddball, ODDBALL_TYPE) -TYPE_CHECKER(JSGlobalPropertyCell, JS_GLOBAL_PROPERTY_CELL_TYPE) -TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE) -TYPE_CHECKER(JSValue, JS_VALUE_TYPE) -TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE) +bool Object::IsCode() { + return Object::IsHeapObject() + && HeapObject::cast(this)->map()->instance_type() == CODE_TYPE; +} + + +bool Object::IsOddball() { + return Object::IsHeapObject() + && HeapObject::cast(this)->map()->instance_type() == ODDBALL_TYPE; +} + + +bool Object::IsJSGlobalPropertyCell() { + return Object::IsHeapObject() + && HeapObject::cast(this)->map()->instance_type() + == JS_GLOBAL_PROPERTY_CELL_TYPE; +} + + +bool Object::IsSharedFunctionInfo() { + return Object::IsHeapObject() && + (HeapObject::cast(this)->map()->instance_type() == + SHARED_FUNCTION_INFO_TYPE); +} + + +bool Object::IsJSValue() { + return Object::IsHeapObject() + && HeapObject::cast(this)->map()->instance_type() == JS_VALUE_TYPE; +} + + +bool Object::IsJSMessageObject() { + return Object::IsHeapObject() + && (HeapObject::cast(this)->map()->instance_type() == + JS_MESSAGE_OBJECT_TYPE); +} bool Object::IsStringWrapper() { @@ -591,7 +702,10 @@ bool Object::IsStringWrapper() { } -TYPE_CHECKER(Foreign, FOREIGN_TYPE) +bool Object::IsForeign() { + return Object::IsHeapObject() + && HeapObject::cast(this)->map()->instance_type() == FOREIGN_TYPE; +} bool Object::IsBoolean() { @@ -600,8 +714,16 @@ bool Object::IsBoolean() { } -TYPE_CHECKER(JSArray, JS_ARRAY_TYPE) -TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE) +bool Object::IsJSArray() { + return Object::IsHeapObject() + && HeapObject::cast(this)->map()->instance_type() == JS_ARRAY_TYPE; +} + + +bool Object::IsJSRegExp() { + return Object::IsHeapObject() + && HeapObject::cast(this)->map()->instance_type() == JS_REGEXP_TYPE; +} template <> inline bool Is<JSArray>(Object* obj) { @@ -638,10 +760,7 @@ bool Object::IsJSFunctionResultCache() { return false; } #ifdef DEBUG - if (FLAG_verify_heap) { - reinterpret_cast<JSFunctionResultCache*>(this)-> - JSFunctionResultCacheVerify(); - } + reinterpret_cast<JSFunctionResultCache*>(this)->JSFunctionResultCacheVerify(); #endif return true; } @@ -653,9 +772,7 @@ bool Object::IsNormalizedMapCache() { return false; } #ifdef DEBUG - if (FLAG_verify_heap) { - reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify(); - } + reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify(); #endif return true; } @@ -704,8 +821,18 @@ bool Object::IsGlobalObject() { } -TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE) -TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE) +bool Object::IsJSGlobalObject() { + return IsHeapObject() && + (HeapObject::cast(this)->map()->instance_type() == + JS_GLOBAL_OBJECT_TYPE); +} + + +bool Object::IsJSBuiltinsObject() { + return IsHeapObject() && + (HeapObject::cast(this)->map()->instance_type() == + JS_BUILTINS_OBJECT_TYPE); +} bool Object::IsUndetectableObject() { @@ -1173,6 +1300,7 @@ ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset) FixedArrayBase* JSObject::elements() { Object* array = READ_FIELD(this, kElementsOffset); + ASSERT(array->HasValidElements()); return static_cast<FixedArrayBase*>(array); } @@ -1612,11 +1740,7 @@ void FixedDoubleArray::Initialize(FixedDoubleArray* from) { old_length * kDoubleSize); } else { for (int i = 0; i < old_length; ++i) { - if (from->is_the_hole(i)) { - set_the_hole(i); - } else { - set(i, from->get_scalar(i)); - } + set(i, from->get_scalar(i)); } } int offset = kHeaderSize + old_length * kDoubleSize; @@ -1681,13 +1805,15 @@ void FixedArray::set(int index, } -void FixedArray::NoWriteBarrierSet(FixedArray* array, - int index, - Object* value) { +void FixedArray::fast_set(FixedArray* array, int index, Object* value) { ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map()); ASSERT(index >= 0 && index < array->length()); ASSERT(!HEAP->InNewSpace(value)); WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value); + array->GetHeap()->incremental_marking()->RecordWrite( + array, + HeapObject::RawField(array, kHeaderSize + index * kPointerSize), + value); } @@ -1775,12 +1901,10 @@ void DescriptorArray::set_bit_field3_storage(int value) { } -void DescriptorArray::NoWriteBarrierSwap(FixedArray* array, - int first, - int second) { +void DescriptorArray::fast_swap(FixedArray* array, int first, int second) { Object* tmp = array->get(first); - NoWriteBarrierSet(array, first, array->get(second)); - NoWriteBarrierSet(array, second, tmp); + fast_set(array, first, array->get(second)); + fast_set(array, second, tmp); } @@ -1888,9 +2012,7 @@ void DescriptorArray::Get(int descriptor_number, Descriptor* desc) { } -void DescriptorArray::Set(int descriptor_number, - Descriptor* desc, - const WhitenessWitness&) { +void DescriptorArray::Set(int descriptor_number, Descriptor* desc) { // Range check. ASSERT(descriptor_number < number_of_descriptors()); @@ -1898,53 +2020,26 @@ void DescriptorArray::Set(int descriptor_number, ASSERT(!HEAP->InNewSpace(desc->GetKey())); ASSERT(!HEAP->InNewSpace(desc->GetValue())); - NoWriteBarrierSet(this, - ToKeyIndex(descriptor_number), - desc->GetKey()); + fast_set(this, ToKeyIndex(descriptor_number), desc->GetKey()); FixedArray* content_array = GetContentArray(); - NoWriteBarrierSet(content_array, - ToValueIndex(descriptor_number), - desc->GetValue()); - NoWriteBarrierSet(content_array, - ToDetailsIndex(descriptor_number), - desc->GetDetails().AsSmi()); + fast_set(content_array, ToValueIndex(descriptor_number), desc->GetValue()); + fast_set(content_array, ToDetailsIndex(descriptor_number), + desc->GetDetails().AsSmi()); } -void DescriptorArray::CopyFrom(int index, - DescriptorArray* src, - int src_index, - const WhitenessWitness& witness) { +void DescriptorArray::CopyFrom(int index, DescriptorArray* src, int src_index) { Descriptor desc; src->Get(src_index, &desc); - Set(index, &desc, witness); + Set(index, &desc); } -void DescriptorArray::NoWriteBarrierSwapDescriptors(int first, int second) { - NoWriteBarrierSwap(this, ToKeyIndex(first), ToKeyIndex(second)); +void DescriptorArray::Swap(int first, int second) { + fast_swap(this, ToKeyIndex(first), ToKeyIndex(second)); FixedArray* content_array = GetContentArray(); - NoWriteBarrierSwap(content_array, - ToValueIndex(first), - ToValueIndex(second)); - NoWriteBarrierSwap(content_array, - ToDetailsIndex(first), - ToDetailsIndex(second)); -} - - -DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array) - : marking_(array->GetHeap()->incremental_marking()) { - marking_->EnterNoMarkingScope(); - if (array->number_of_descriptors() > 0) { - ASSERT(Marking::Color(array) == Marking::WHITE_OBJECT); - ASSERT(Marking::Color(array->GetContentArray()) == Marking::WHITE_OBJECT); - } -} - - -DescriptorArray::WhitenessWitness::~WhitenessWitness() { - marking_->LeaveNoMarkingScope(); + fast_swap(content_array, ToValueIndex(first), ToValueIndex(second)); + fast_swap(content_array, ToDetailsIndex(first), ToDetailsIndex(second)); } @@ -2047,8 +2142,6 @@ CAST_ACCESSOR(JSArray) CAST_ACCESSOR(JSRegExp) CAST_ACCESSOR(JSProxy) CAST_ACCESSOR(JSFunctionProxy) -CAST_ACCESSOR(JSSet) -CAST_ACCESSOR(JSMap) CAST_ACCESSOR(JSWeakMap) CAST_ACCESSOR(Foreign) CAST_ACCESSOR(ByteArray) @@ -2886,21 +2979,6 @@ void Code::set_has_debug_break_slots(bool value) { } -bool Code::is_compiled_optimizable() { - ASSERT(kind() == FUNCTION); - byte flags = READ_BYTE_FIELD(this, kFullCodeFlags); - return FullCodeFlagsIsCompiledOptimizable::decode(flags); -} - - -void Code::set_compiled_optimizable(bool value) { - ASSERT(kind() == FUNCTION); - byte flags = READ_BYTE_FIELD(this, kFullCodeFlags); - flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value); - WRITE_BYTE_FIELD(this, kFullCodeFlags, flags); -} - - int Code::allow_osr_at_loop_nesting_level() { ASSERT(kind() == FUNCTION); return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset); @@ -3226,7 +3304,7 @@ ACCESSORS(Map, prototype_transitions, FixedArray, kPrototypeTransitionsOffset) ACCESSORS(Map, constructor, Object, kConstructorOffset) ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset) -ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset) +ACCESSORS(JSFunction, literals, FixedArray, kLiteralsOffset) ACCESSORS(JSFunction, next_function_link, Object, @@ -3469,23 +3547,8 @@ void SharedFunctionInfo::set_optimization_disabled(bool disable) { } -StrictModeFlag SharedFunctionInfo::strict_mode_flag() { - return BooleanBit::get(compiler_hints(), kStrictModeFunction) - ? kStrictMode : kNonStrictMode; -} - - -void SharedFunctionInfo::set_strict_mode_flag(StrictModeFlag strict_mode_flag) { - ASSERT(strict_mode_flag == kStrictMode || - strict_mode_flag == kNonStrictMode); - bool value = strict_mode_flag == kStrictMode; - set_compiler_hints( - BooleanBit::set(compiler_hints(), kStrictModeFunction, value)); -} - - -BOOL_GETTER(SharedFunctionInfo, compiler_hints, strict_mode, - kStrictModeFunction) +BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, strict_mode, + kStrictModeFunction) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, name_should_print_as_anonymous, @@ -3760,36 +3823,7 @@ bool JSFunction::is_compiled() { } -FixedArray* JSFunction::literals() { - ASSERT(!shared()->bound()); - return literals_or_bindings(); -} - - -void JSFunction::set_literals(FixedArray* literals) { - ASSERT(!shared()->bound()); - set_literals_or_bindings(literals); -} - - -FixedArray* JSFunction::function_bindings() { - ASSERT(shared()->bound()); - return literals_or_bindings(); -} - - -void JSFunction::set_function_bindings(FixedArray* bindings) { - ASSERT(shared()->bound()); - // Bound function literal may be initialized to the empty fixed array - // before the bindings are set. - ASSERT(bindings == GetHeap()->empty_fixed_array() || - bindings->map() == GetHeap()->fixed_cow_array_map()); - set_literals_or_bindings(bindings); -} - - int JSFunction::NumberOfLiterals() { - ASSERT(!shared()->bound()); return literals()->length(); } @@ -3836,8 +3870,6 @@ void JSProxy::InitializeBody(int object_size, Object* value) { } -ACCESSORS(JSSet, table, Object, kTableOffset) -ACCESSORS(JSMap, table, Object, kTableOffset) ACCESSORS(JSWeakMap, table, Object, kTableOffset) ACCESSORS(JSWeakMap, next, Object, kNextOffset) @@ -4024,16 +4056,14 @@ ElementsKind JSObject::GetElementsKind() { reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset)); Map* map = fixed_array->map(); ASSERT(((kind == FAST_ELEMENTS || kind == FAST_SMI_ONLY_ELEMENTS) && - (map == GetHeap()->fixed_array_map() || - map == GetHeap()->fixed_cow_array_map())) || - (kind == FAST_DOUBLE_ELEMENTS && - fixed_array->IsFixedDoubleArray()) || - (kind == DICTIONARY_ELEMENTS && - fixed_array->IsFixedArray() && - fixed_array->IsDictionary()) || - (kind > DICTIONARY_ELEMENTS)); - ASSERT((kind != NON_STRICT_ARGUMENTS_ELEMENTS) || - (elements()->IsFixedArray() && elements()->length() >= 2)); + (map == GetHeap()->fixed_array_map() || + map == GetHeap()->fixed_cow_array_map())) || + (kind == FAST_DOUBLE_ELEMENTS && + fixed_array->IsFixedDoubleArray()) || + (kind == DICTIONARY_ELEMENTS && + fixed_array->IsFixedArray() && + fixed_array->IsDictionary()) || + (kind > DICTIONARY_ELEMENTS)); #endif return kind; } @@ -4377,7 +4407,7 @@ void Dictionary<Shape, Key>::SetEntry(int entry, WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc); FixedArray::set(index, key, mode); FixedArray::set(index+1, value, mode); - FixedArray::set(index+2, details.AsSmi()); + FixedArray::fast_set(this, index+2, details.AsSmi()); } @@ -4426,31 +4456,27 @@ MaybeObject* StringDictionaryShape::AsObject(String* key) { } -template <int entrysize> -bool ObjectHashTableShape<entrysize>::IsMatch(Object* key, Object* other) { - return key->SameValue(other); +bool ObjectHashTableShape::IsMatch(JSReceiver* key, Object* other) { + return key == JSReceiver::cast(other); } -template <int entrysize> -uint32_t ObjectHashTableShape<entrysize>::Hash(Object* key) { - ASSERT(!key->IsUndefined() && !key->IsNull()); - MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION); - return Smi::cast(maybe_hash->ToObjectChecked())->value(); +uint32_t ObjectHashTableShape::Hash(JSReceiver* key) { + MaybeObject* maybe_hash = key->GetIdentityHash(OMIT_CREATION); + ASSERT(!maybe_hash->IsFailure()); + return Smi::cast(maybe_hash->ToObjectUnchecked())->value(); } -template <int entrysize> -uint32_t ObjectHashTableShape<entrysize>::HashForObject(Object* key, - Object* other) { - ASSERT(!other->IsUndefined() && !other->IsNull()); - MaybeObject* maybe_hash = other->GetHash(OMIT_CREATION); - return Smi::cast(maybe_hash->ToObjectChecked())->value(); +uint32_t ObjectHashTableShape::HashForObject(JSReceiver* key, Object* other) { + MaybeObject* maybe_hash = + JSReceiver::cast(other)->GetIdentityHash(OMIT_CREATION); + ASSERT(!maybe_hash->IsFailure()); + return Smi::cast(maybe_hash->ToObjectUnchecked())->value(); } -template <int entrysize> -MaybeObject* ObjectHashTableShape<entrysize>::AsObject(Object* key) { +MaybeObject* ObjectHashTableShape::AsObject(JSReceiver* key) { return key; } @@ -4508,12 +4534,6 @@ MaybeObject* FixedArray::Copy() { } -MaybeObject* FixedDoubleArray::Copy() { - if (length() == 0) return this; - return GetHeap()->CopyFixedDoubleArray(this); -} - - Relocatable::Relocatable(Isolate* isolate) { ASSERT(isolate == Isolate::Current()); isolate_ = isolate; diff --git a/deps/v8/src/objects-printer.cc b/deps/v8/src/objects-printer.cc index b788504fa..fc7573241 100644 --- a/deps/v8/src/objects-printer.cc +++ b/deps/v8/src/objects-printer.cc @@ -245,6 +245,54 @@ void ExternalDoubleArray::ExternalDoubleArrayPrint(FILE* out) { } +static void PrintElementsKind(FILE* out, ElementsKind kind) { + switch (kind) { + case FAST_SMI_ONLY_ELEMENTS: + PrintF(out, "FAST_SMI_ONLY_ELEMENTS"); + break; + case FAST_ELEMENTS: + PrintF(out, "FAST_ELEMENTS"); + break; + case FAST_DOUBLE_ELEMENTS: + PrintF(out, "FAST_DOUBLE_ELEMENTS"); + break; + case DICTIONARY_ELEMENTS: + PrintF(out, "DICTIONARY_ELEMENTS"); + break; + case NON_STRICT_ARGUMENTS_ELEMENTS: + PrintF(out, "NON_STRICT_ARGUMENTS_ELEMENTS"); + break; + case EXTERNAL_BYTE_ELEMENTS: + PrintF(out, "EXTERNAL_BYTE_ELEMENTS"); + break; + case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: + PrintF(out, "EXTERNAL_UNSIGNED_BYTE_ELEMENTS"); + break; + case EXTERNAL_SHORT_ELEMENTS: + PrintF(out, "EXTERNAL_SHORT_ELEMENTS"); + break; + case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: + PrintF(out, "EXTERNAL_UNSIGNED_SHORT_ELEMENTS"); + break; + case EXTERNAL_INT_ELEMENTS: + PrintF(out, "EXTERNAL_INT_ELEMENTS"); + break; + case EXTERNAL_UNSIGNED_INT_ELEMENTS: + PrintF(out, "EXTERNAL_UNSIGNED_INT_ELEMENTS"); + break; + case EXTERNAL_FLOAT_ELEMENTS: + PrintF(out, "EXTERNAL_FLOAT_ELEMENTS"); + break; + case EXTERNAL_DOUBLE_ELEMENTS: + PrintF(out, "EXTERNAL_DOUBLE_ELEMENTS"); + break; + case EXTERNAL_PIXEL_ELEMENTS: + PrintF(out, "EXTERNAL_DOUBLE_ELEMENTS"); + break; + } +} + + void JSObject::PrintProperties(FILE* out) { if (HasFastProperties()) { DescriptorArray* descs = map()->instance_descriptors(); diff --git a/deps/v8/src/objects-visiting.cc b/deps/v8/src/objects-visiting.cc index a796283e2..20a7b3170 100644 --- a/deps/v8/src/objects-visiting.cc +++ b/deps/v8/src/objects-visiting.cc @@ -94,16 +94,6 @@ StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId( case JS_GLOBAL_PROPERTY_CELL_TYPE: return kVisitPropertyCell; - case JS_SET_TYPE: - return GetVisitorIdForSize(kVisitStruct, - kVisitStructGeneric, - JSSet::kSize); - - case JS_MAP_TYPE: - return GetVisitorIdForSize(kVisitStruct, - kVisitStructGeneric, - JSMap::kSize); - case JS_WEAK_MAP_TYPE: return kVisitJSWeakMap; diff --git a/deps/v8/src/objects.cc b/deps/v8/src/objects.cc index 9a87ac57d..561273230 100644 --- a/deps/v8/src/objects.cc +++ b/deps/v8/src/objects.cc @@ -55,54 +55,6 @@ namespace v8 { namespace internal { -void PrintElementsKind(FILE* out, ElementsKind kind) { - switch (kind) { - case FAST_SMI_ONLY_ELEMENTS: - PrintF(out, "FAST_SMI_ONLY_ELEMENTS"); - break; - case FAST_ELEMENTS: - PrintF(out, "FAST_ELEMENTS"); - break; - case FAST_DOUBLE_ELEMENTS: - PrintF(out, "FAST_DOUBLE_ELEMENTS"); - break; - case DICTIONARY_ELEMENTS: - PrintF(out, "DICTIONARY_ELEMENTS"); - break; - case NON_STRICT_ARGUMENTS_ELEMENTS: - PrintF(out, "NON_STRICT_ARGUMENTS_ELEMENTS"); - break; - case EXTERNAL_BYTE_ELEMENTS: - PrintF(out, "EXTERNAL_BYTE_ELEMENTS"); - break; - case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: - PrintF(out, "EXTERNAL_UNSIGNED_BYTE_ELEMENTS"); - break; - case EXTERNAL_SHORT_ELEMENTS: - PrintF(out, "EXTERNAL_SHORT_ELEMENTS"); - break; - case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: - PrintF(out, "EXTERNAL_UNSIGNED_SHORT_ELEMENTS"); - break; - case EXTERNAL_INT_ELEMENTS: - PrintF(out, "EXTERNAL_INT_ELEMENTS"); - break; - case EXTERNAL_UNSIGNED_INT_ELEMENTS: - PrintF(out, "EXTERNAL_UNSIGNED_INT_ELEMENTS"); - break; - case EXTERNAL_FLOAT_ELEMENTS: - PrintF(out, "EXTERNAL_FLOAT_ELEMENTS"); - break; - case EXTERNAL_DOUBLE_ELEMENTS: - PrintF(out, "EXTERNAL_DOUBLE_ELEMENTS"); - break; - case EXTERNAL_PIXEL_ELEMENTS: - PrintF(out, "EXTERNAL_DOUBLE_ELEMENTS"); - break; - } -} - - // Getters and setters are stored in a fixed array property. These are // constants for their indices. const int kGetterIndex = 0; @@ -202,7 +154,7 @@ void Object::Lookup(String* name, LookupResult* result) { MaybeObject* Object::GetPropertyWithReceiver(Object* receiver, String* name, PropertyAttributes* attributes) { - LookupResult result(name->GetIsolate()); + LookupResult result; Lookup(name, &result); MaybeObject* value = GetProperty(receiver, &result, name, attributes); ASSERT(*attributes <= ABSENT); @@ -282,14 +234,6 @@ MaybeObject* JSProxy::GetPropertyWithHandler(Object* receiver_raw, } -Handle<Object> Object::GetElement(Handle<Object> object, uint32_t index) { - Isolate* isolate = object->IsHeapObject() - ? Handle<HeapObject>::cast(object)->GetIsolate() - : Isolate::Current(); - CALL_HEAP_FUNCTION(isolate, object->GetElement(index), Object); -} - - MaybeObject* JSProxy::GetElementWithHandler(Object* receiver, uint32_t index) { String* name; @@ -366,7 +310,7 @@ MaybeObject* JSObject::GetPropertyWithFailedAccessCheck( case FIELD: case CONSTANT_FUNCTION: { // Search ALL_CAN_READ accessors in prototype chain. - LookupResult r(GetIsolate()); + LookupResult r; result->holder()->LookupRealNamedPropertyInPrototypes(name, &r); if (r.IsProperty()) { return GetPropertyWithFailedAccessCheck(receiver, @@ -379,7 +323,7 @@ MaybeObject* JSObject::GetPropertyWithFailedAccessCheck( case INTERCEPTOR: { // If the object has an interceptor, try real named properties. // No access check in GetPropertyAttributeWithInterceptor. - LookupResult r(GetIsolate()); + LookupResult r; result->holder()->LookupRealNamedProperty(name, &r); if (r.IsProperty()) { return GetPropertyWithFailedAccessCheck(receiver, @@ -426,7 +370,7 @@ PropertyAttributes JSObject::GetPropertyAttributeWithFailedAccessCheck( case CONSTANT_FUNCTION: { if (!continue_search) break; // Search ALL_CAN_READ accessors in prototype chain. - LookupResult r(GetIsolate()); + LookupResult r; result->holder()->LookupRealNamedPropertyInPrototypes(name, &r); if (r.IsProperty()) { return GetPropertyAttributeWithFailedAccessCheck(receiver, @@ -440,7 +384,7 @@ PropertyAttributes JSObject::GetPropertyAttributeWithFailedAccessCheck( case INTERCEPTOR: { // If the object has an interceptor, try real named properties. // No access check in GetPropertyAttributeWithInterceptor. - LookupResult r(GetIsolate()); + LookupResult r; if (continue_search) { result->holder()->LookupRealNamedProperty(name, &r); } else { @@ -460,7 +404,7 @@ PropertyAttributes JSObject::GetPropertyAttributeWithFailedAccessCheck( } } - GetIsolate()->ReportFailedAccessCheck(this, v8::ACCESS_HAS); + GetHeap()->isolate()->ReportFailedAccessCheck(this, v8::ACCESS_HAS); return ABSENT; } @@ -584,21 +528,6 @@ bool JSObject::IsDirty() { } -Handle<Object> Object::GetProperty(Handle<Object> object, - Handle<Object> receiver, - LookupResult* result, - Handle<String> key, - PropertyAttributes* attributes) { - Isolate* isolate = object->IsHeapObject() - ? Handle<HeapObject>::cast(object)->GetIsolate() - : Isolate::Current(); - CALL_HEAP_FUNCTION( - isolate, - object->GetProperty(*receiver, result, *key, attributes), - Object); -} - - MaybeObject* Object::GetProperty(Object* receiver, LookupResult* result, String* name, @@ -771,49 +700,6 @@ Object* Object::GetPrototype() { } -MaybeObject* Object::GetHash(CreationFlag flag) { - // The object is either a number, a string, an odd-ball, - // a real JS object, or a Harmony proxy. - if (IsNumber()) { - uint32_t hash = ComputeLongHash(double_to_uint64(Number())); - return Smi::FromInt(hash & Smi::kMaxValue); - } - if (IsString()) { - uint32_t hash = String::cast(this)->Hash(); - return Smi::FromInt(hash); - } - if (IsOddball()) { - uint32_t hash = Oddball::cast(this)->to_string()->Hash(); - return Smi::FromInt(hash); - } - if (IsJSReceiver()) { - return JSReceiver::cast(this)->GetIdentityHash(flag); - } - - UNREACHABLE(); - return Smi::FromInt(0); -} - - -bool Object::SameValue(Object* other) { - if (other == this) return true; - if (!IsHeapObject() || !other->IsHeapObject()) return false; - - // The object is either a number, a string, an odd-ball, - // a real JS object, or a Harmony proxy. - if (IsNumber() && other->IsNumber()) { - double this_value = Number(); - double other_value = other->Number(); - return (this_value == other_value) || - (isnan(this_value) && isnan(other_value)); - } - if (IsString() && other->IsString()) { - return String::cast(this)->Equals(String::cast(other)); - } - return false; -} - - void Object::ShortPrint(FILE* out) { HeapStringAllocator allocator; StringStream accumulator(&allocator); @@ -1188,27 +1074,6 @@ void JSObject::JSObjectShortPrint(StringStream* accumulator) { } -void JSObject::PrintElementsTransition( - FILE* file, ElementsKind from_kind, FixedArrayBase* from_elements, - ElementsKind to_kind, FixedArrayBase* to_elements) { - if (from_kind != to_kind) { - PrintF(file, "elements transition ["); - PrintElementsKind(file, from_kind); - PrintF(file, " -> "); - PrintElementsKind(file, to_kind); - PrintF(file, "] in "); - JavaScriptFrame::PrintTop(file, false, true); - PrintF(file, " for "); - ShortPrint(file); - PrintF(file, " from "); - from_elements->ShortPrint(file); - PrintF(file, " to "); - to_elements->ShortPrint(file); - PrintF(file, "\n"); - } -} - - void HeapObject::HeapObjectShortPrint(StringStream* accumulator) { Heap* heap = GetHeap(); if (!heap->Contains(this)) { @@ -1237,10 +1102,6 @@ void HeapObject::HeapObjectShortPrint(StringStream* accumulator) { case FIXED_ARRAY_TYPE: accumulator->Add("<FixedArray[%u]>", FixedArray::cast(this)->length()); break; - case FIXED_DOUBLE_ARRAY_TYPE: - accumulator->Add("<FixedDoubleArray[%u]>", - FixedDoubleArray::cast(this)->length()); - break; case BYTE_ARRAY_TYPE: accumulator->Add("<ByteArray[%u]>", ByteArray::cast(this)->length()); break; @@ -1386,8 +1247,6 @@ void HeapObject::IterateBody(InstanceType type, int object_size, case JS_CONTEXT_EXTENSION_OBJECT_TYPE: case JS_VALUE_TYPE: case JS_ARRAY_TYPE: - case JS_SET_TYPE: - case JS_MAP_TYPE: case JS_WEAK_MAP_TYPE: case JS_REGEXP_TYPE: case JS_GLOBAL_PROXY_TYPE: @@ -1799,7 +1658,7 @@ MaybeObject* JSObject::SetPropertyPostInterceptor( PropertyAttributes attributes, StrictModeFlag strict_mode) { // Check local property, ignore interceptor. - LookupResult result(GetIsolate()); + LookupResult result; LocalLookupRealNamedProperty(name, &result); if (result.IsFound()) { // An existing property, a map transition or a null descriptor was @@ -1981,7 +1840,7 @@ MaybeObject* JSReceiver::SetProperty(String* name, Object* value, PropertyAttributes attributes, StrictModeFlag strict_mode) { - LookupResult result(GetIsolate()); + LookupResult result; LocalLookup(name, &result); return SetProperty(&result, name, value, attributes, strict_mode); } @@ -2147,9 +2006,9 @@ MaybeObject* JSObject::SetPropertyWithCallbackSetterInPrototypes( PropertyAttributes attributes, bool* found, StrictModeFlag strict_mode) { - Heap* heap = GetHeap(); - LookupResult result(heap->isolate()); + LookupResult result; LookupCallbackSetterInPrototypes(name, &result); + Heap* heap = GetHeap(); if (result.IsFound()) { *found = true; if (result.type() == CALLBACKS) { @@ -2161,7 +2020,7 @@ MaybeObject* JSObject::SetPropertyWithCallbackSetterInPrototypes( } else if (result.type() == HANDLER) { // We could not find a local property so let's check whether there is an // accessor that wants to handle the property. - LookupResult accessor_result(heap->isolate()); + LookupResult accessor_result; LookupCallbackSetterInPrototypes(name, &accessor_result); if (accessor_result.IsFound()) { if (accessor_result.type() == CALLBACKS) { @@ -2226,51 +2085,6 @@ void Map::LookupInDescriptors(JSObject* holder, } -static bool ContainsMap(MapHandleList* maps, Handle<Map> map) { - ASSERT(!map.is_null()); - for (int i = 0; i < maps->length(); ++i) { - if (!maps->at(i).is_null() && maps->at(i).is_identical_to(map)) return true; - } - return false; -} - - -template <class T> -static Handle<T> MaybeNull(T* p) { - if (p == NULL) return Handle<T>::null(); - return Handle<T>(p); -} - - -Handle<Map> Map::FindTransitionedMap(MapHandleList* candidates) { - ElementsKind elms_kind = elements_kind(); - if (elms_kind == FAST_DOUBLE_ELEMENTS) { - bool dummy = true; - Handle<Map> fast_map = - MaybeNull(LookupElementsTransitionMap(FAST_ELEMENTS, &dummy)); - if (!fast_map.is_null() && ContainsMap(candidates, fast_map)) { - return fast_map; - } - return Handle<Map>::null(); - } - if (elms_kind == FAST_SMI_ONLY_ELEMENTS) { - bool dummy = true; - Handle<Map> double_map = - MaybeNull(LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, &dummy)); - // In the current implementation, if the DOUBLE map doesn't exist, the - // FAST map can't exist either. - if (double_map.is_null()) return Handle<Map>::null(); - Handle<Map> fast_map = - MaybeNull(double_map->LookupElementsTransitionMap(FAST_ELEMENTS, - &dummy)); - if (!fast_map.is_null() && ContainsMap(candidates, fast_map)) { - return fast_map; - } - if (ContainsMap(candidates, double_map)) return double_map; - } - return Handle<Map>::null(); -} - static Map* GetElementsTransitionMapFromDescriptor(Object* descriptor_contents, ElementsKind elements_kind) { if (descriptor_contents->IsMap()) { @@ -2454,15 +2268,6 @@ MaybeObject* Map::AddElementsTransition(ElementsKind elements_kind, } -Handle<Map> JSObject::GetElementsTransitionMap(Handle<JSObject> object, - ElementsKind to_kind) { - Isolate* isolate = object->GetIsolate(); - CALL_HEAP_FUNCTION(isolate, - object->GetElementsTransitionMap(to_kind), - Map); -} - - MaybeObject* JSObject::GetElementsTransitionMap(ElementsKind to_kind) { Map* current_map = map(); ElementsKind from_kind = current_map->elements_kind(); @@ -2618,7 +2423,7 @@ MaybeObject* JSObject::SetPropertyWithFailedAccessCheck( case INTERCEPTOR: { // Try lookup real named properties. Note that only property can be // set is callbacks marked as ALL_CAN_WRITE on the prototype chain. - LookupResult r(GetIsolate()); + LookupResult r; LookupRealNamedProperty(name, &r); if (r.IsProperty()) { return SetPropertyWithFailedAccessCheck(&r, @@ -2636,10 +2441,10 @@ MaybeObject* JSObject::SetPropertyWithFailedAccessCheck( } } - Isolate* isolate = GetIsolate(); - HandleScope scope(isolate); + Heap* heap = GetHeap(); + HandleScope scope(heap->isolate()); Handle<Object> value_handle(value); - isolate->ReportFailedAccessCheck(this, v8::ACCESS_SET); + heap->isolate()->ReportFailedAccessCheck(this, v8::ACCESS_SET); return *value_handle; } @@ -2702,7 +2507,6 @@ MUST_USE_RESULT MaybeObject* JSProxy::SetPropertyWithHandlerIfDefiningSetter( *found = true; // except where defined otherwise... Isolate* isolate = GetHeap()->isolate(); Handle<JSProxy> proxy(this); - Handle<Object> handler(this->handler()); // Trap might morph proxy. Handle<String> name(name_raw); Handle<Object> value(value_raw); Handle<Object> args[] = { name }; @@ -2726,9 +2530,7 @@ MUST_USE_RESULT MaybeObject* JSProxy::SetPropertyWithHandlerIfDefiningSetter( Handle<Object> configurable(v8::internal::GetProperty(desc, conf_name)); ASSERT(!isolate->has_pending_exception()); if (configurable->IsFalse()) { - Handle<String> trap = - isolate->factory()->LookupAsciiSymbol("getPropertyDescriptor"); - Handle<Object> args[] = { handler, trap, name }; + Handle<Object> args[] = { Handle<Object>(proxy->handler()), proxy, name }; Handle<Object> error = isolate->factory()->NewTypeError( "proxy_prop_not_configurable", HandleVector(args, ARRAY_SIZE(args))); return isolate->Throw(*error); @@ -2808,7 +2610,6 @@ MUST_USE_RESULT PropertyAttributes JSProxy::GetPropertyAttributeWithHandler( Isolate* isolate = GetIsolate(); HandleScope scope(isolate); Handle<JSProxy> proxy(this); - Handle<Object> handler(this->handler()); // Trap might morph proxy. Handle<JSReceiver> receiver(receiver_raw); Handle<Object> name(name_raw); @@ -2838,9 +2639,7 @@ MUST_USE_RESULT PropertyAttributes JSProxy::GetPropertyAttributeWithHandler( if (isolate->has_pending_exception()) return NONE; if (configurable->IsFalse()) { - Handle<String> trap = - isolate->factory()->LookupAsciiSymbol("getPropertyDescriptor"); - Handle<Object> args[] = { handler, trap, name }; + Handle<Object> args[] = { Handle<Object>(proxy->handler()), proxy, name }; Handle<Object> error = isolate->factory()->NewTypeError( "proxy_prop_not_configurable", HandleVector(args, ARRAY_SIZE(args))); isolate->Throw(*error); @@ -3060,12 +2859,12 @@ MaybeObject* JSObject::SetLocalPropertyIgnoreAttributes( // Make sure that the top context does not change when doing callbacks or // interceptor calls. AssertNoContextChange ncc; - Isolate* isolate = GetIsolate(); - LookupResult result(isolate); + LookupResult result; LocalLookup(name, &result); // Check access rights if needed. if (IsAccessCheckNeeded()) { - if (!isolate->MayNamedAccess(this, name, v8::ACCESS_SET)) { + Heap* heap = GetHeap(); + if (!heap->isolate()->MayNamedAccess(this, name, v8::ACCESS_SET)) { return SetPropertyWithFailedAccessCheck(&result, name, value, @@ -3136,7 +2935,7 @@ PropertyAttributes JSObject::GetPropertyAttributePostInterceptor( String* name, bool continue_search) { // Check local property, ignore interceptor. - LookupResult result(GetIsolate()); + LookupResult result; LocalLookupRealNamedProperty(name, &result); if (result.IsProperty()) return result.GetAttributes(); @@ -3212,7 +3011,7 @@ PropertyAttributes JSReceiver::GetPropertyAttributeWithReceiver( ? NONE : ABSENT; } // Named property. - LookupResult result(GetIsolate()); + LookupResult result; Lookup(key, &result); return GetPropertyAttribute(receiver, &result, key, true); } @@ -3261,7 +3060,7 @@ PropertyAttributes JSReceiver::GetLocalPropertyAttribute(String* name) { return ABSENT; } // Named property. - LookupResult result(GetIsolate()); + LookupResult result; LocalLookup(name, &result); return GetPropertyAttribute(this, &result, name, false); } @@ -3276,9 +3075,7 @@ MaybeObject* NormalizedMapCache::Get(JSObject* obj, if (result->IsMap() && Map::cast(result)->EquivalentToForNormalization(fast, mode)) { #ifdef DEBUG - if (FLAG_verify_heap) { - Map::cast(result)->SharedMapVerify(); - } + Map::cast(result)->SharedMapVerify(); if (FLAG_enable_slow_asserts) { // The cached map should match newly created normalized map bit-by-bit. Object* fresh; @@ -3314,15 +3111,6 @@ void NormalizedMapCache::Clear() { } -void JSObject::UpdateMapCodeCache(Handle<JSObject> object, - Handle<String> name, - Handle<Code> code) { - Isolate* isolate = object->GetIsolate(); - CALL_HEAP_FUNCTION_VOID(isolate, - object->UpdateMapCodeCache(*name, *code)); -} - - MaybeObject* JSObject::UpdateMapCodeCache(String* name, Code* code) { if (map()->is_shared()) { // Fast case maps are never marked as shared. @@ -3568,7 +3356,7 @@ Smi* JSReceiver::GenerateIdentityHash() { do { // Generate a random 32-bit hash value but limit range to fit // within a smi. - hash_value = V8::RandomPrivate(isolate) & Smi::kMaxValue; + hash_value = V8::Random(isolate) & Smi::kMaxValue; attempts++; } while (hash_value == 0 && attempts < 30); hash_value = hash_value != 0 ? hash_value : 1; // never return 0 @@ -3589,9 +3377,6 @@ MaybeObject* JSObject::GetIdentityHash(CreationFlag flag) { Object* stored_value = GetHiddenProperty(GetHeap()->identity_hash_symbol()); if (stored_value->IsSmi()) return stored_value; - // Do not generate permanent identity hash code if not requested. - if (flag == OMIT_CREATION) return GetHeap()->undefined_value(); - Smi* hash = GenerateIdentityHash(); MaybeObject* result = SetHiddenProperty(GetHeap()->identity_hash_symbol(), hash); @@ -3782,7 +3567,7 @@ MaybeObject* JSObject::SetHiddenPropertiesDictionary( MaybeObject* JSObject::DeletePropertyPostInterceptor(String* name, DeleteMode mode) { // Check local property, ignore interceptor. - LookupResult result(GetIsolate()); + LookupResult result; LocalLookupRealNamedProperty(name, &result); if (!result.IsProperty()) return GetHeap()->true_value(); @@ -3931,7 +3716,7 @@ MaybeObject* JSObject::DeleteProperty(String* name, DeleteMode mode) { if (name->AsArrayIndex(&index)) { return DeleteElement(index, mode); } else { - LookupResult result(isolate); + LookupResult result; LocalLookup(name, &result); if (!result.IsProperty()) return isolate->heap()->true_value(); // Ignore attributes if forcing a deletion. @@ -4142,16 +3927,15 @@ MaybeObject* JSObject::PreventExtensions() { // Tests for the fast common case for property enumeration: -// - This object and all prototypes has an enum cache (which means that -// it is no proxy, has no interceptors and needs no access checks). +// - This object and all prototypes has an enum cache (which means that it has +// no interceptors and needs no access checks). // - This object has no elements. // - No prototype has enumerable properties/elements. -bool JSReceiver::IsSimpleEnum() { +bool JSObject::IsSimpleEnum() { Heap* heap = GetHeap(); for (Object* o = this; o != heap->null_value(); o = JSObject::cast(o)->GetPrototype()) { - if (!o->IsJSObject()) return false; JSObject* curr = JSObject::cast(o); if (!curr->map()->instance_descriptors()->HasEnumCache()) return false; ASSERT(!curr->HasNamedInterceptor()); @@ -4281,27 +4065,19 @@ void JSObject::LookupCallback(String* name, LookupResult* result) { } -// Search for a getter or setter in an elements dictionary and update its -// attributes. Returns either undefined if the element is read-only, or the -// getter/setter pair (fixed array) if there is an existing one, or the hole -// value if the element does not exist or is a normal non-getter/setter data -// element. -static Object* UpdateGetterSetterInDictionary(NumberDictionary* dictionary, - uint32_t index, - PropertyAttributes attributes, - Heap* heap) { +// Search for a getter or setter in an elements dictionary. Returns either +// undefined if the element is read-only, or the getter/setter pair (fixed +// array) if there is an existing one, or the hole value if the element does +// not exist or is a normal non-getter/setter data element. +static Object* FindGetterSetterInDictionary(NumberDictionary* dictionary, + uint32_t index, + Heap* heap) { int entry = dictionary->FindEntry(index); if (entry != NumberDictionary::kNotFound) { Object* result = dictionary->ValueAt(entry); PropertyDetails details = dictionary->DetailsAt(entry); if (details.IsReadOnly()) return heap->undefined_value(); - if (details.type() == CALLBACKS && result->IsFixedArray()) { - if (details.attributes() != attributes) { - dictionary->DetailsAtPut(entry, - PropertyDetails(attributes, CALLBACKS, index)); - } - return result; - } + if (details.type() == CALLBACKS && result->IsFixedArray()) return result; } return heap->the_hole_value(); } @@ -4343,10 +4119,8 @@ MaybeObject* JSObject::DefineGetterSetter(String* name, // elements. return heap->undefined_value(); case DICTIONARY_ELEMENTS: { - Object* probe = UpdateGetterSetterInDictionary(element_dictionary(), - index, - attributes, - heap); + Object* probe = + FindGetterSetterInDictionary(element_dictionary(), index, heap); if (!probe->IsTheHole()) return probe; // Otherwise allow to override it. break; @@ -4363,10 +4137,7 @@ MaybeObject* JSObject::DefineGetterSetter(String* name, FixedArray* arguments = FixedArray::cast(parameter_map->get(1)); if (arguments->IsDictionary()) { NumberDictionary* dictionary = NumberDictionary::cast(arguments); - probe = UpdateGetterSetterInDictionary(dictionary, - index, - attributes, - heap); + probe = FindGetterSetterInDictionary(dictionary, index, heap); if (!probe->IsTheHole()) return probe; } } @@ -4375,7 +4146,7 @@ MaybeObject* JSObject::DefineGetterSetter(String* name, } } else { // Lookup the name. - LookupResult result(heap->isolate()); + LookupResult result; LocalLookup(name, &result); if (result.IsProperty()) { if (result.IsReadOnly()) return heap->undefined_value(); @@ -4405,8 +4176,8 @@ MaybeObject* JSObject::DefineGetterSetter(String* name, bool JSObject::CanSetCallback(String* name) { - ASSERT(!IsAccessCheckNeeded() || - GetIsolate()->MayNamedAccess(this, name, v8::ACCESS_SET)); + ASSERT(!IsAccessCheckNeeded() + || Isolate::Current()->MayNamedAccess(this, name, v8::ACCESS_SET)); // Check if there is an API defined callback object which prohibits // callback overwriting in this object or it's prototype chain. @@ -4414,7 +4185,7 @@ bool JSObject::CanSetCallback(String* name) { // certain accessors such as window.location should not be allowed // to be overwritten because allowing overwriting could potentially // cause security problems. - LookupResult callback_result(GetIsolate()); + LookupResult callback_result; LookupCallback(name, &callback_result); if (callback_result.IsProperty()) { Object* obj = callback_result.GetCallbackObject(); @@ -4611,7 +4382,7 @@ MaybeObject* JSObject::DefineAccessor(AccessorInfo* info) { } } else { // Lookup the name. - LookupResult result(isolate); + LookupResult result; LocalLookup(name, &result); // ES5 forbids turning a property into an accessor if it's not // configurable (that is IsDontDelete in ES3 and v8), see 8.6.1 (Table 5). @@ -4669,7 +4440,7 @@ Object* JSObject::LookupAccessor(String* name, bool is_getter) { for (Object* obj = this; obj != heap->null_value(); obj = JSObject::cast(obj)->GetPrototype()) { - LookupResult result(heap->isolate()); + LookupResult result; JSObject::cast(obj)->LocalLookup(name, &result); if (result.IsProperty()) { if (result.IsReadOnly()) return heap->undefined_value(); @@ -4777,7 +4548,7 @@ MaybeObject* Map::CopyNormalized(PropertyNormalizationMode mode, Map::cast(result)->set_is_shared(sharing == SHARED_NORMALIZED_MAP); #ifdef DEBUG - if (FLAG_verify_heap && Map::cast(result)->is_shared()) { + if (Map::cast(result)->is_shared()) { Map::cast(result)->SharedMapVerify(); } #endif @@ -4800,13 +4571,6 @@ MaybeObject* Map::CopyDropTransitions() { return new_map; } -void Map::UpdateCodeCache(Handle<Map> map, - Handle<String> name, - Handle<Code> code) { - Isolate* isolate = map->GetIsolate(); - CALL_HEAP_FUNCTION_VOID(isolate, - map->UpdateCodeCache(*name, *code)); -} MaybeObject* Map::UpdateCodeCache(String* name, Code* code) { // Allocate the code cache if not present. @@ -5196,16 +4960,7 @@ void CodeCacheHashTable::RemoveByIndex(int index) { } -void PolymorphicCodeCache::Update(Handle<PolymorphicCodeCache> cache, - MapHandleList* maps, - Code::Flags flags, - Handle<Code> code) { - Isolate* isolate = cache->GetIsolate(); - CALL_HEAP_FUNCTION_VOID(isolate, cache->Update(maps, flags, *code)); -} - - -MaybeObject* PolymorphicCodeCache::Update(MapHandleList* maps, +MaybeObject* PolymorphicCodeCache::Update(MapList* maps, Code::Flags flags, Code* code) { // Initialize cache if necessary. @@ -5233,14 +4988,13 @@ MaybeObject* PolymorphicCodeCache::Update(MapHandleList* maps, } -Handle<Object> PolymorphicCodeCache::Lookup(MapHandleList* maps, - Code::Flags flags) { +Object* PolymorphicCodeCache::Lookup(MapList* maps, Code::Flags flags) { if (!cache()->IsUndefined()) { PolymorphicCodeCacheHashTable* hash_table = PolymorphicCodeCacheHashTable::cast(cache()); - return Handle<Object>(hash_table->Lookup(maps, flags)); + return hash_table->Lookup(maps, flags); } else { - return GetIsolate()->factory()->undefined_value(); + return GetHeap()->undefined_value(); } } @@ -5251,12 +5005,12 @@ Handle<Object> PolymorphicCodeCache::Lookup(MapHandleList* maps, class PolymorphicCodeCacheHashTableKey : public HashTableKey { public: // Callers must ensure that |maps| outlives the newly constructed object. - PolymorphicCodeCacheHashTableKey(MapHandleList* maps, int code_flags) + PolymorphicCodeCacheHashTableKey(MapList* maps, int code_flags) : maps_(maps), code_flags_(code_flags) {} bool IsMatch(Object* other) { - MapHandleList other_maps(kDefaultListAllocationSize); + MapList other_maps(kDefaultListAllocationSize); int other_flags; FromObject(other, &other_flags, &other_maps); if (code_flags_ != other_flags) return false; @@ -5272,7 +5026,7 @@ class PolymorphicCodeCacheHashTableKey : public HashTableKey { for (int i = 0; i < maps_->length(); ++i) { bool match_found = false; for (int j = 0; j < other_maps.length(); ++j) { - if (maps_->at(i)->EquivalentTo(*other_maps.at(j))) { + if (maps_->at(i)->EquivalentTo(other_maps.at(j))) { match_found = true; break; } @@ -5282,7 +5036,7 @@ class PolymorphicCodeCacheHashTableKey : public HashTableKey { return true; } - static uint32_t MapsHashHelper(MapHandleList* maps, int code_flags) { + static uint32_t MapsHashHelper(MapList* maps, int code_flags) { uint32_t hash = code_flags; for (int i = 0; i < maps->length(); ++i) { hash ^= maps->at(i)->Hash(); @@ -5295,7 +5049,7 @@ class PolymorphicCodeCacheHashTableKey : public HashTableKey { } uint32_t HashForObject(Object* obj) { - MapHandleList other_maps(kDefaultListAllocationSize); + MapList other_maps(kDefaultListAllocationSize); int other_flags; FromObject(obj, &other_flags, &other_maps); return MapsHashHelper(&other_maps, other_flags); @@ -5313,32 +5067,29 @@ class PolymorphicCodeCacheHashTableKey : public HashTableKey { FixedArray* list = FixedArray::cast(obj); list->set(0, Smi::FromInt(code_flags_)); for (int i = 0; i < maps_->length(); ++i) { - list->set(i + 1, *maps_->at(i)); + list->set(i + 1, maps_->at(i)); } return list; } private: - static MapHandleList* FromObject(Object* obj, - int* code_flags, - MapHandleList* maps) { + static MapList* FromObject(Object* obj, int* code_flags, MapList* maps) { FixedArray* list = FixedArray::cast(obj); maps->Rewind(0); *code_flags = Smi::cast(list->get(0))->value(); for (int i = 1; i < list->length(); ++i) { - maps->Add(Handle<Map>(Map::cast(list->get(i)))); + maps->Add(Map::cast(list->get(i))); } return maps; } - MapHandleList* maps_; // weak. + MapList* maps_; // weak. int code_flags_; static const int kDefaultListAllocationSize = kMaxKeyedPolymorphism + 1; }; -Object* PolymorphicCodeCacheHashTable::Lookup(MapHandleList* maps, - int code_flags) { +Object* PolymorphicCodeCacheHashTable::Lookup(MapList* maps, int code_flags) { PolymorphicCodeCacheHashTableKey key(maps, code_flags); int entry = FindEntry(&key); if (entry == kNotFound) return GetHeap()->undefined_value(); @@ -5346,7 +5097,7 @@ Object* PolymorphicCodeCacheHashTable::Lookup(MapHandleList* maps, } -MaybeObject* PolymorphicCodeCacheHashTable::Put(MapHandleList* maps, +MaybeObject* PolymorphicCodeCacheHashTable::Put(MapList* maps, int code_flags, Code* code) { PolymorphicCodeCacheHashTableKey key(maps, code_flags); @@ -5481,9 +5232,9 @@ void DescriptorArray::SetEnumCache(FixedArray* bridge_storage, if (IsEmpty()) return; // Do nothing for empty descriptor array. FixedArray::cast(bridge_storage)-> set(kEnumCacheBridgeCacheIndex, new_cache); - NoWriteBarrierSet(FixedArray::cast(bridge_storage), - kEnumCacheBridgeEnumIndex, - get(kEnumerationIndexIndex)); + fast_set(FixedArray::cast(bridge_storage), + kEnumCacheBridgeEnumIndex, + get(kEnumerationIndexIndex)); set(kEnumerationIndexIndex, bridge_storage); } } @@ -5544,16 +5295,10 @@ MaybeObject* DescriptorArray::CopyInsert(Descriptor* descriptor, ++new_size; } } - - DescriptorArray* new_descriptors; { MaybeObject* maybe_result = Allocate(new_size); - if (!maybe_result->To<DescriptorArray>(&new_descriptors)) { - return maybe_result; - } + if (!maybe_result->ToObject(&result)) return maybe_result; } - - DescriptorArray::WhitenessWitness witness(new_descriptors); - + DescriptorArray* new_descriptors = DescriptorArray::cast(result); // Set the enumeration index in the descriptors and set the enumeration index // in the result. int enumeration_index = NextEnumerationIndex(); @@ -5581,16 +5326,16 @@ MaybeObject* DescriptorArray::CopyInsert(Descriptor* descriptor, } if (IsNullDescriptor(from_index)) continue; if (remove_transitions && IsTransition(from_index)) continue; - new_descriptors->CopyFrom(to_index++, this, from_index, witness); + new_descriptors->CopyFrom(to_index++, this, from_index); } - new_descriptors->Set(to_index++, descriptor, witness); + new_descriptors->Set(to_index++, descriptor); if (replacing) from_index++; for (; from_index < number_of_descriptors(); from_index++) { if (IsNullDescriptor(from_index)) continue; if (remove_transitions && IsTransition(from_index)) continue; - new_descriptors->CopyFrom(to_index++, this, from_index, witness); + new_descriptors->CopyFrom(to_index++, this, from_index); } ASSERT(to_index == new_descriptors->number_of_descriptors()); @@ -5612,21 +5357,16 @@ MaybeObject* DescriptorArray::RemoveTransitions() { } // Allocate the new descriptor array. - DescriptorArray* new_descriptors; + Object* result; { MaybeObject* maybe_result = Allocate(number_of_descriptors() - num_removed); - if (!maybe_result->To<DescriptorArray>(&new_descriptors)) { - return maybe_result; - } + if (!maybe_result->ToObject(&result)) return maybe_result; } - - DescriptorArray::WhitenessWitness witness(new_descriptors); + DescriptorArray* new_descriptors = DescriptorArray::cast(result); // Copy the content. int next_descriptor = 0; for (int i = 0; i < number_of_descriptors(); i++) { - if (IsProperty(i)) { - new_descriptors->CopyFrom(next_descriptor++, this, i, witness); - } + if (IsProperty(i)) new_descriptors->CopyFrom(next_descriptor++, this, i); } ASSERT(next_descriptor == new_descriptors->number_of_descriptors()); @@ -5634,7 +5374,7 @@ MaybeObject* DescriptorArray::RemoveTransitions() { } -void DescriptorArray::SortUnchecked(const WhitenessWitness& witness) { +void DescriptorArray::SortUnchecked() { // In-place heap sort. int len = number_of_descriptors(); @@ -5655,7 +5395,7 @@ void DescriptorArray::SortUnchecked(const WhitenessWitness& witness) { } } if (child_hash <= parent_hash) break; - NoWriteBarrierSwapDescriptors(parent_index, child_index); + Swap(parent_index, child_index); // Now element at child_index could be < its children. parent_index = child_index; // parent_hash remains correct. } @@ -5664,8 +5404,8 @@ void DescriptorArray::SortUnchecked(const WhitenessWitness& witness) { // Extract elements and create sorted array. for (int i = len - 1; i > 0; --i) { // Put max element at the back of the array. - NoWriteBarrierSwapDescriptors(0, i); - // Shift down the new top element. + Swap(0, i); + // Sift down the new top element. int parent_index = 0; const uint32_t parent_hash = GetKey(parent_index)->Hash(); const int max_parent_index = (i / 2) - 1; @@ -5680,15 +5420,15 @@ void DescriptorArray::SortUnchecked(const WhitenessWitness& witness) { } } if (child_hash <= parent_hash) break; - NoWriteBarrierSwapDescriptors(parent_index, child_index); + Swap(parent_index, child_index); parent_index = child_index; } } } -void DescriptorArray::Sort(const WhitenessWitness& witness) { - SortUnchecked(witness); +void DescriptorArray::Sort() { + SortUnchecked(); SLOW_ASSERT(IsSortedNoDuplicates()); } @@ -5773,6 +5513,24 @@ bool String::LooksValid() { } +int String::Utf8Length() { + if (IsAsciiRepresentation()) return length(); + // Attempt to flatten before accessing the string. It probably + // doesn't make Utf8Length faster, but it is very likely that + // the string will be accessed later (for example by WriteUtf8) + // so it's still a good idea. + Heap* heap = GetHeap(); + TryFlatten(); + Access<StringInputBuffer> buffer( + heap->isolate()->objects_string_input_buffer()); + buffer->Reset(0, this); + int result = 0; + while (buffer->has_more()) + result += unibrow::Utf8::Length(buffer->GetNext()); + return result; +} + + String::FlatContent String::GetFlatContent() { int length = this->length(); StringShape shape(this); @@ -6196,73 +5954,6 @@ const unibrow::byte* String::ReadBlock(String* input, } -// This method determines the type of string involved and then gets the UTF8 -// length of the string. It doesn't flatten the string and has log(n) recursion -// for a string of length n. -int String::Utf8Length(String* input, int from, int to) { - if (from == to) return 0; - int total = 0; - while (true) { - if (input->IsAsciiRepresentation()) return total + to - from; - switch (StringShape(input).representation_tag()) { - case kConsStringTag: { - ConsString* str = ConsString::cast(input); - String* first = str->first(); - String* second = str->second(); - int first_length = first->length(); - if (first_length - from < to - first_length) { - if (first_length > from) { - // Left hand side is shorter. - total += Utf8Length(first, from, first_length); - input = second; - from = 0; - to -= first_length; - } else { - // We only need the right hand side. - input = second; - from -= first_length; - to -= first_length; - } - } else { - if (first_length <= to) { - // Right hand side is shorter. - total += Utf8Length(second, 0, to - first_length); - input = first; - to = first_length; - } else { - // We only need the left hand side. - input = first; - } - } - continue; - } - case kExternalStringTag: - case kSeqStringTag: { - Vector<const uc16> vector = input->GetFlatContent().ToUC16Vector(); - const uc16* p = vector.start(); - for (int i = from; i < to; i++) { - total += unibrow::Utf8::Length(p[i]); - } - return total; - } - case kSlicedStringTag: { - SlicedString* str = SlicedString::cast(input); - int offset = str->offset(); - input = str->parent(); - from += offset; - to += offset; - continue; - } - default: - break; - } - UNREACHABLE(); - return 0; - } - return 0; -} - - void Relocatable::PostGarbageCollectionProcessing() { Isolate* isolate = Isolate::Current(); Relocatable* current = isolate->relocatable_top(); @@ -7160,57 +6851,6 @@ void JSFunction::MarkForLazyRecompilation() { } -bool SharedFunctionInfo::EnsureCompiled(Handle<SharedFunctionInfo> shared, - ClearExceptionFlag flag) { - return shared->is_compiled() || CompileLazy(shared, flag); -} - - -static bool CompileLazyHelper(CompilationInfo* info, - ClearExceptionFlag flag) { - // Compile the source information to a code object. - ASSERT(info->IsOptimizing() || !info->shared_info()->is_compiled()); - ASSERT(!info->isolate()->has_pending_exception()); - bool result = Compiler::CompileLazy(info); - ASSERT(result != Isolate::Current()->has_pending_exception()); - if (!result && flag == CLEAR_EXCEPTION) { - info->isolate()->clear_pending_exception(); - } - return result; -} - - -bool SharedFunctionInfo::CompileLazy(Handle<SharedFunctionInfo> shared, - ClearExceptionFlag flag) { - CompilationInfo info(shared); - return CompileLazyHelper(&info, flag); -} - - -bool JSFunction::CompileLazy(Handle<JSFunction> function, - ClearExceptionFlag flag) { - bool result = true; - if (function->shared()->is_compiled()) { - function->ReplaceCode(function->shared()->code()); - function->shared()->set_code_age(0); - } else { - CompilationInfo info(function); - result = CompileLazyHelper(&info, flag); - ASSERT(!result || function->is_compiled()); - } - return result; -} - - -bool JSFunction::CompileOptimized(Handle<JSFunction> function, - int osr_ast_id, - ClearExceptionFlag flag) { - CompilationInfo info(function); - info.SetOptimizing(osr_ast_id); - return CompileLazyHelper(&info, flag); -} - - bool JSFunction::IsInlineable() { if (IsBuiltin()) return false; SharedFunctionInfo* shared_info = shared(); @@ -7393,7 +7033,7 @@ bool SharedFunctionInfo::CanGenerateInlineConstructor(Object* prototype) { obj = obj->GetPrototype()) { JSObject* js_object = JSObject::cast(obj); for (int i = 0; i < this_property_assignments_count(); i++) { - LookupResult result(heap->isolate()); + LookupResult result; String* name = GetThisPropertyAssignmentName(i); js_object->LocalLookupRealNamedProperty(name, &result); if (result.IsProperty() && result.type() == CALLBACKS) { @@ -7751,8 +7391,6 @@ void Code::Relocate(intptr_t delta) { void Code::CopyFrom(const CodeDesc& desc) { - ASSERT(Marking::Color(this) == Marking::WHITE_OBJECT); - // copy code memmove(instruction_start(), desc.buffer, desc.instr_size); @@ -7772,17 +7410,16 @@ void Code::CopyFrom(const CodeDesc& desc) { RelocInfo::Mode mode = it.rinfo()->rmode(); if (mode == RelocInfo::EMBEDDED_OBJECT) { Handle<Object> p = it.rinfo()->target_object_handle(origin); - it.rinfo()->set_target_object(*p, SKIP_WRITE_BARRIER); + it.rinfo()->set_target_object(*p); } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) { Handle<JSGlobalPropertyCell> cell = it.rinfo()->target_cell_handle(); - it.rinfo()->set_target_cell(*cell, SKIP_WRITE_BARRIER); + it.rinfo()->set_target_cell(*cell); } else if (RelocInfo::IsCodeTarget(mode)) { // rewrite code handles in inline cache targets to direct // pointers to the first instruction in the code object Handle<Object> p = it.rinfo()->target_object_handle(origin); Code* code = Code::cast(*p); - it.rinfo()->set_target_address(code->instruction_start(), - SKIP_WRITE_BARRIER); + it.rinfo()->set_target_address(code->instruction_start()); } else { it.rinfo()->apply(delta); } @@ -8210,15 +7847,13 @@ MaybeObject* JSObject::SetFastElementsCapacityAndLength( new_map = Map::cast(object); } - FixedArrayBase* old_elements_raw = elements(); ElementsKind elements_kind = GetElementsKind(); switch (elements_kind) { case FAST_SMI_ONLY_ELEMENTS: case FAST_ELEMENTS: { AssertNoAllocation no_gc; WriteBarrierMode mode(new_elements->GetWriteBarrierMode(no_gc)); - CopyFastElementsToFast(FixedArray::cast(old_elements_raw), - new_elements, mode); + CopyFastElementsToFast(FixedArray::cast(elements()), new_elements, mode); set_map(new_map); set_elements(new_elements); break; @@ -8226,7 +7861,7 @@ MaybeObject* JSObject::SetFastElementsCapacityAndLength( case DICTIONARY_ELEMENTS: { AssertNoAllocation no_gc; WriteBarrierMode mode = new_elements->GetWriteBarrierMode(no_gc); - CopySlowElementsToFast(NumberDictionary::cast(old_elements_raw), + CopySlowElementsToFast(NumberDictionary::cast(elements()), new_elements, mode); set_map(new_map); @@ -8238,7 +7873,7 @@ MaybeObject* JSObject::SetFastElementsCapacityAndLength( WriteBarrierMode mode = new_elements->GetWriteBarrierMode(no_gc); // The object's map and the parameter map are unchanged, the unaliased // arguments are copied to the new backing store. - FixedArray* parameter_map = FixedArray::cast(old_elements_raw); + FixedArray* parameter_map = FixedArray::cast(elements()); FixedArray* arguments = FixedArray::cast(parameter_map->get(1)); if (arguments->IsDictionary()) { CopySlowElementsToFast(NumberDictionary::cast(arguments), @@ -8251,7 +7886,7 @@ MaybeObject* JSObject::SetFastElementsCapacityAndLength( break; } case FAST_DOUBLE_ELEMENTS: { - FixedDoubleArray* old_elements = FixedDoubleArray::cast(old_elements_raw); + FixedDoubleArray* old_elements = FixedDoubleArray::cast(elements()); uint32_t old_length = static_cast<uint32_t>(old_elements->length()); // Fill out the new array with this content and array holes. for (uint32_t i = 0; i < old_length; i++) { @@ -8289,11 +7924,6 @@ MaybeObject* JSObject::SetFastElementsCapacityAndLength( break; } - if (FLAG_trace_elements_transitions) { - PrintElementsTransition(stdout, elements_kind, old_elements_raw, - FAST_ELEMENTS, new_elements); - } - // Update the length if necessary. if (IsJSArray()) { JSArray::cast(this)->set_length(Smi::FromInt(length)); @@ -8323,21 +7953,19 @@ MaybeObject* JSObject::SetFastDoubleElementsCapacityAndLength( } Map* new_map = Map::cast(obj); - FixedArrayBase* old_elements = elements(); - ElementsKind elements_kind(GetElementsKind()); AssertNoAllocation no_gc; - switch (elements_kind) { + switch (GetElementsKind()) { case FAST_SMI_ONLY_ELEMENTS: case FAST_ELEMENTS: { - elems->Initialize(FixedArray::cast(old_elements)); + elems->Initialize(FixedArray::cast(elements())); break; } case FAST_DOUBLE_ELEMENTS: { - elems->Initialize(FixedDoubleArray::cast(old_elements)); + elems->Initialize(FixedDoubleArray::cast(elements())); break; } case DICTIONARY_ELEMENTS: { - elems->Initialize(NumberDictionary::cast(old_elements)); + elems->Initialize(NumberDictionary::cast(elements())); break; } default: @@ -8345,11 +7973,6 @@ MaybeObject* JSObject::SetFastDoubleElementsCapacityAndLength( break; } - if (FLAG_trace_elements_transitions) { - PrintElementsTransition(stdout, elements_kind, old_elements, - FAST_DOUBLE_ELEMENTS, elems); - } - ASSERT(new_map->has_fast_double_elements()); set_map(new_map); ASSERT(elems->IsFixedDoubleArray()); @@ -8369,14 +7992,13 @@ MaybeObject* JSObject::SetSlowElements(Object* len) { uint32_t new_length = static_cast<uint32_t>(len->Number()); - FixedArrayBase* old_elements = elements(); - ElementsKind elements_kind = GetElementsKind(); - switch (elements_kind) { + switch (GetElementsKind()) { case FAST_SMI_ONLY_ELEMENTS: case FAST_ELEMENTS: case FAST_DOUBLE_ELEMENTS: { // Make sure we never try to shrink dense arrays into sparse arrays. - ASSERT(static_cast<uint32_t>(old_elements->length()) <= new_length); + ASSERT(static_cast<uint32_t>( + FixedArrayBase::cast(elements())->length()) <= new_length); MaybeObject* result = NormalizeElements(); if (result->IsFailure()) return result; @@ -8408,12 +8030,6 @@ MaybeObject* JSObject::SetSlowElements(Object* len) { UNREACHABLE(); break; } - - if (FLAG_trace_elements_transitions) { - PrintElementsTransition(stdout, elements_kind, old_elements, - DICTIONARY_ELEMENTS, elements()); - } - return this; } @@ -9341,10 +8957,6 @@ MaybeObject* JSObject::SetFastElement(uint32_t index, Map* new_map; if (!maybe_new_map->To<Map>(&new_map)) return maybe_new_map; set_map(new_map); - if (FLAG_trace_elements_transitions) { - PrintElementsTransition(stdout, FAST_SMI_ONLY_ELEMENTS, elements(), - FAST_ELEMENTS, elements()); - } } // Increase backing store capacity if that's been decided previously. if (new_capacity != capacity) { @@ -9701,51 +9313,6 @@ MaybeObject* JSObject::SetElementWithoutInterceptor(uint32_t index, } -MUST_USE_RESULT MaybeObject* JSObject::TransitionElementsKind( - ElementsKind to_kind) { - ElementsKind from_kind = map()->elements_kind(); - FixedArrayBase* elms = FixedArrayBase::cast(elements()); - uint32_t capacity = static_cast<uint32_t>(elms->length()); - uint32_t length = capacity; - if (IsJSArray()) { - CHECK(JSArray::cast(this)->length()->ToArrayIndex(&length)); - } - if (from_kind == FAST_SMI_ONLY_ELEMENTS) { - if (to_kind == FAST_DOUBLE_ELEMENTS) { - MaybeObject* maybe_result = - SetFastDoubleElementsCapacityAndLength(capacity, length); - if (maybe_result->IsFailure()) return maybe_result; - return this; - } else if (to_kind == FAST_ELEMENTS) { - MaybeObject* maybe_new_map = GetElementsTransitionMap(FAST_ELEMENTS); - Map* new_map; - if (!maybe_new_map->To(&new_map)) return maybe_new_map; - set_map(new_map); - return this; - } - } else if (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS) { - MaybeObject* maybe_result = SetFastElementsCapacityAndLength( - capacity, length, kDontAllowSmiOnlyElements); - if (maybe_result->IsFailure()) return maybe_result; - return this; - } - // This method should never be called for any other case than the ones - // handled above. - UNREACHABLE(); - return GetIsolate()->heap()->null_value(); -} - - -// static -bool Map::IsValidElementsTransition(ElementsKind from_kind, - ElementsKind to_kind) { - return - (from_kind == FAST_SMI_ONLY_ELEMENTS && - (to_kind == FAST_DOUBLE_ELEMENTS || to_kind == FAST_ELEMENTS)) || - (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS); -} - - MaybeObject* JSArray::JSArrayUpdateLengthFromIndex(uint32_t index, Object* value) { uint32_t old_len = 0; @@ -10012,7 +9579,7 @@ MaybeObject* JSObject::GetPropertyPostInterceptor( String* name, PropertyAttributes* attributes) { // Check local property in holder, ignore interceptor. - LookupResult result(GetIsolate()); + LookupResult result; LocalLookupRealNamedProperty(name, &result); if (result.IsProperty()) { return GetProperty(receiver, &result, name, attributes); @@ -10030,7 +9597,7 @@ MaybeObject* JSObject::GetLocalPropertyPostInterceptor( String* name, PropertyAttributes* attributes) { // Check local property in holder, ignore interceptor. - LookupResult result(GetIsolate()); + LookupResult result; LocalLookupRealNamedProperty(name, &result); if (result.IsProperty()) { return GetProperty(receiver, &result, name, attributes); @@ -10081,15 +9648,15 @@ MaybeObject* JSObject::GetPropertyWithInterceptor( bool JSObject::HasRealNamedProperty(String* key) { // Check access rights if needed. - Isolate* isolate = GetIsolate(); if (IsAccessCheckNeeded()) { - if (!isolate->MayNamedAccess(this, key, v8::ACCESS_HAS)) { - isolate->ReportFailedAccessCheck(this, v8::ACCESS_HAS); + Heap* heap = GetHeap(); + if (!heap->isolate()->MayNamedAccess(this, key, v8::ACCESS_HAS)) { + heap->isolate()->ReportFailedAccessCheck(this, v8::ACCESS_HAS); return false; } } - LookupResult result(isolate); + LookupResult result; LocalLookupRealNamedProperty(key, &result); return result.IsProperty() && (result.type() != INTERCEPTOR); } @@ -10158,15 +9725,15 @@ bool JSObject::HasRealElementProperty(uint32_t index) { bool JSObject::HasRealNamedCallbackProperty(String* key) { // Check access rights if needed. - Isolate* isolate = GetIsolate(); if (IsAccessCheckNeeded()) { - if (!isolate->MayNamedAccess(this, key, v8::ACCESS_HAS)) { - isolate->ReportFailedAccessCheck(this, v8::ACCESS_HAS); + Heap* heap = GetHeap(); + if (!heap->isolate()->MayNamedAccess(this, key, v8::ACCESS_HAS)) { + heap->isolate()->ReportFailedAccessCheck(this, v8::ACCESS_HAS); return false; } } - LookupResult result(isolate); + LookupResult result; LocalLookupRealNamedProperty(key, &result); return result.IsProperty() && (result.type() == CALLBACKS); } @@ -11031,9 +10598,7 @@ template class HashTable<CompilationCacheShape, HashTableKey*>; template class HashTable<MapCacheShape, HashTableKey*>; -template class HashTable<ObjectHashTableShape<1>, Object*>; - -template class HashTable<ObjectHashTableShape<2>, Object*>; +template class HashTable<ObjectHashTableShape, JSReceiver*>; template class Dictionary<StringDictionaryShape, String*>; @@ -11524,16 +11089,6 @@ JSGlobalPropertyCell* GlobalObject::GetPropertyCell(LookupResult* result) { } -Handle<JSGlobalPropertyCell> GlobalObject::EnsurePropertyCell( - Handle<GlobalObject> global, - Handle<String> name) { - Isolate* isolate = global->GetIsolate(); - CALL_HEAP_FUNCTION(isolate, - global->EnsurePropertyCell(*name), - JSGlobalPropertyCell); -} - - MaybeObject* GlobalObject::EnsurePropertyCell(String* name) { ASSERT(!HasFastProperties()); int entry = property_dictionary()->FindEntry(name); @@ -11771,7 +11326,7 @@ MaybeObject* CompilationCacheTable::PutEval(String* src, SharedFunctionInfo* value) { StringSharedKey key(src, context->closure()->shared(), - value->strict_mode_flag()); + value->strict_mode() ? kStrictMode : kNonStrictMode); Object* obj; { MaybeObject* maybe_obj = EnsureCapacity(1, &key); if (!maybe_obj->ToObject(&obj)) return maybe_obj; @@ -11820,8 +11375,8 @@ void CompilationCacheTable::Remove(Object* value) { int entry_index = EntryToIndex(entry); int value_index = entry_index + 1; if (get(value_index) == value) { - NoWriteBarrierSet(this, entry_index, null_value); - NoWriteBarrierSet(this, value_index, null_value); + fast_set(this, entry_index, null_value); + fast_set(this, value_index, null_value); ElementRemoved(); } } @@ -12293,15 +11848,14 @@ MaybeObject* StringDictionary::TransformPropertiesToFastFor( } // Allocate the instance descriptor. - DescriptorArray* descriptors; - { MaybeObject* maybe_descriptors = + Object* descriptors_unchecked; + { MaybeObject* maybe_descriptors_unchecked = DescriptorArray::Allocate(instance_descriptor_length); - if (!maybe_descriptors->To<DescriptorArray>(&descriptors)) { - return maybe_descriptors; + if (!maybe_descriptors_unchecked->ToObject(&descriptors_unchecked)) { + return maybe_descriptors_unchecked; } } - - DescriptorArray::WhitenessWitness witness(descriptors); + DescriptorArray* descriptors = DescriptorArray::cast(descriptors_unchecked); int inobject_props = obj->map()->inobject_properties(); int number_of_allocated_fields = @@ -12339,7 +11893,7 @@ MaybeObject* StringDictionary::TransformPropertiesToFastFor( JSFunction::cast(value), details.attributes(), details.index()); - descriptors->Set(next_descriptor++, &d, witness); + descriptors->Set(next_descriptor++, &d); } else if (type == NORMAL) { if (current_offset < inobject_props) { obj->InObjectPropertyAtPut(current_offset, @@ -12353,13 +11907,13 @@ MaybeObject* StringDictionary::TransformPropertiesToFastFor( current_offset++, details.attributes(), details.index()); - descriptors->Set(next_descriptor++, &d, witness); + descriptors->Set(next_descriptor++, &d); } else if (type == CALLBACKS) { CallbacksDescriptor d(String::cast(key), value, details.attributes(), details.index()); - descriptors->Set(next_descriptor++, &d, witness); + descriptors->Set(next_descriptor++, &d); } else { UNREACHABLE(); } @@ -12367,7 +11921,7 @@ MaybeObject* StringDictionary::TransformPropertiesToFastFor( } ASSERT(current_offset == number_of_fields); - descriptors->Sort(witness); + descriptors->Sort(); // Allocate new map. Object* new_map; { MaybeObject* maybe_new_map = obj->map()->CopyDropDescriptors(); @@ -12390,74 +11944,20 @@ MaybeObject* StringDictionary::TransformPropertiesToFastFor( } -bool ObjectHashSet::Contains(Object* key) { - // If the object does not have an identity hash, it was never used as a key. - { MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION); - if (maybe_hash->ToObjectUnchecked()->IsUndefined()) return false; - } - return (FindEntry(key) != kNotFound); -} - - -MaybeObject* ObjectHashSet::Add(Object* key) { - // Make sure the key object has an identity hash code. - int hash; - { MaybeObject* maybe_hash = key->GetHash(ALLOW_CREATION); - if (maybe_hash->IsFailure()) return maybe_hash; - hash = Smi::cast(maybe_hash->ToObjectUnchecked())->value(); - } - int entry = FindEntry(key); - - // Check whether key is already present. - if (entry != kNotFound) return this; - - // Check whether the hash set should be extended and add entry. - Object* obj; - { MaybeObject* maybe_obj = EnsureCapacity(1, key); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; - } - ObjectHashSet* table = ObjectHashSet::cast(obj); - entry = table->FindInsertionEntry(hash); - table->set(EntryToIndex(entry), key); - table->ElementAdded(); - return table; -} - - -MaybeObject* ObjectHashSet::Remove(Object* key) { +Object* ObjectHashTable::Lookup(JSReceiver* key) { // If the object does not have an identity hash, it was never used as a key. - { MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION); - if (maybe_hash->ToObjectUnchecked()->IsUndefined()) return this; - } - int entry = FindEntry(key); - - // Check whether key is actually present. - if (entry == kNotFound) return this; - - // Remove entry and try to shrink this hash set. - set_null(EntryToIndex(entry)); - ElementRemoved(); - return Shrink(key); -} - - -Object* ObjectHashTable::Lookup(Object* key) { - // If the object does not have an identity hash, it was never used as a key. - { MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION); - if (maybe_hash->ToObjectUnchecked()->IsUndefined()) { - return GetHeap()->undefined_value(); - } - } + MaybeObject* maybe_hash = key->GetIdentityHash(OMIT_CREATION); + if (maybe_hash->IsFailure()) return GetHeap()->undefined_value(); int entry = FindEntry(key); if (entry == kNotFound) return GetHeap()->undefined_value(); return get(EntryToIndex(entry) + 1); } -MaybeObject* ObjectHashTable::Put(Object* key, Object* value) { +MaybeObject* ObjectHashTable::Put(JSReceiver* key, Object* value) { // Make sure the key object has an identity hash code. int hash; - { MaybeObject* maybe_hash = key->GetHash(ALLOW_CREATION); + { MaybeObject* maybe_hash = key->GetIdentityHash(ALLOW_CREATION); if (maybe_hash->IsFailure()) return maybe_hash; hash = Smi::cast(maybe_hash->ToObjectUnchecked())->value(); } @@ -12487,7 +11987,7 @@ MaybeObject* ObjectHashTable::Put(Object* key, Object* value) { } -void ObjectHashTable::AddEntry(int entry, Object* key, Object* value) { +void ObjectHashTable::AddEntry(int entry, JSReceiver* key, Object* value) { set(EntryToIndex(entry), key); set(EntryToIndex(entry) + 1, value); ElementAdded(); diff --git a/deps/v8/src/objects.h b/deps/v8/src/objects.h index f7d218022..b95fa574a 100644 --- a/deps/v8/src/objects.h +++ b/deps/v8/src/objects.h @@ -40,7 +40,6 @@ #endif #include "v8checks.h" - // // Most object types in the V8 JavaScript are described in this file. // @@ -53,8 +52,6 @@ // - JSReceiver (suitable for property access) // - JSObject // - JSArray -// - JSSet -// - JSMap // - JSWeakMap // - JSRegExp // - JSFunction @@ -176,8 +173,6 @@ enum ElementsKind { static const int kElementsKindCount = LAST_ELEMENTS_KIND - FIRST_ELEMENTS_KIND + 1; -void PrintElementsKind(FILE* out, ElementsKind kind); - // PropertyDetails captures type and attributes for a property. // They are used both in property dictionaries and instance descriptors. class PropertyDetails BASE_EMBEDDED { @@ -633,8 +628,6 @@ enum InstanceType { JS_BUILTINS_OBJECT_TYPE, JS_GLOBAL_PROXY_TYPE, JS_ARRAY_TYPE, - JS_SET_TYPE, - JS_MAP_TYPE, JS_WEAK_MAP_TYPE, JS_REGEXP_TYPE, @@ -827,8 +820,6 @@ class MaybeObject BASE_EMBEDDED { V(JSArray) \ V(JSProxy) \ V(JSFunctionProxy) \ - V(JSSet) \ - V(JSMap) \ V(JSWeakMap) \ V(JSRegExp) \ V(HashTable) \ @@ -866,8 +857,6 @@ class Object : public MaybeObject { HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL) #undef IS_TYPE_FUNCTION_DECL - inline bool IsFixedArrayBase(); - // Returns true if this object is an instance of the specified // function template. inline bool IsInstanceOf(FunctionTemplateInfo* type); @@ -923,22 +912,13 @@ class Object : public MaybeObject { Object* receiver, String* key, PropertyAttributes* attributes); - - static Handle<Object> GetProperty(Handle<Object> object, - Handle<Object> receiver, - LookupResult* result, - Handle<String> key, - PropertyAttributes* attributes); - MUST_USE_RESULT MaybeObject* GetProperty(Object* receiver, LookupResult* result, String* key, PropertyAttributes* attributes); - MUST_USE_RESULT MaybeObject* GetPropertyWithDefinedGetter(Object* receiver, JSReceiver* getter); - static Handle<Object> GetElement(Handle<Object> object, uint32_t index); inline MaybeObject* GetElement(uint32_t index); // For use when we know that no exception can be thrown. inline Object* GetElementNoExceptionThrown(uint32_t index); @@ -947,16 +927,6 @@ class Object : public MaybeObject { // Return the object's prototype (might be Heap::null_value()). Object* GetPrototype(); - // Returns the permanent hash code associated with this object depending on - // the actual object type. Might return a failure in case no hash was - // created yet or GC was caused by creation. - MUST_USE_RESULT MaybeObject* GetHash(CreationFlag flag); - - // Checks whether this object has the same value as the given one. This - // function is implemented according to ES5, section 9.12 and can be used - // to implement the Harmony "egal" function. - bool SameValue(Object* other); - // Tries to convert an object to an array index. Returns true and sets // the output parameter if it succeeds. inline bool ToArrayIndex(uint32_t* index); @@ -1381,9 +1351,6 @@ class JSReceiver: public HeapObject { StrictModeFlag strict_mode, bool check_prototype); - // Tests for the fast common case for property enumeration. - bool IsSimpleEnum(); - // Returns the class name ([[Class]] property in the specification). String* class_name(); @@ -1409,7 +1376,7 @@ class JSReceiver: public HeapObject { bool skip_hidden_prototypes); // Retrieves a permanent object identity hash code. The undefined value might - // be returned in case no hash was created yet and OMIT_CREATION was used. + // be returned in case no has been created yet and OMIT_CREATION was used. inline MUST_USE_RESULT MaybeObject* GetIdentityHash(CreationFlag flag); // Lookup a property. If found, the result is valid and has @@ -1636,6 +1603,9 @@ class JSObject: public JSReceiver { MUST_USE_RESULT MaybeObject* DeleteProperty(String* name, DeleteMode mode); MUST_USE_RESULT MaybeObject* DeleteElement(uint32_t index, DeleteMode mode); + // Tests for the fast common case for property enumeration. + bool IsSimpleEnum(); + inline void ValidateSmiOnlyElements(); // Makes sure that this object can contain non-smi Object as elements. @@ -1816,13 +1786,9 @@ class JSObject: public JSReceiver { // Returns a new map with all transitions dropped from the object's current // map and the ElementsKind set. - static Handle<Map> GetElementsTransitionMap(Handle<JSObject> object, - ElementsKind to_kind); MUST_USE_RESULT MaybeObject* GetElementsTransitionMap( ElementsKind elements_kind); - MUST_USE_RESULT MaybeObject* TransitionElementsKind(ElementsKind to_kind); - // Converts a descriptor of any other type to a real field, // backed by the properties array. Descriptors of visible // types, such as CONSTANT_FUNCTION, keep their enumeration order. @@ -1869,10 +1835,6 @@ class JSObject: public JSReceiver { // dictionary. Returns the backing after conversion. MUST_USE_RESULT MaybeObject* NormalizeElements(); - static void UpdateMapCodeCache(Handle<JSObject> object, - Handle<String> name, - Handle<Code> code); - MUST_USE_RESULT MaybeObject* UpdateMapCodeCache(String* name, Code* code); // Transform slow named properties to fast variants. @@ -1934,10 +1896,6 @@ class JSObject: public JSReceiver { void PrintElements(FILE* out); #endif - void PrintElementsTransition( - FILE* file, ElementsKind from_kind, FixedArrayBase* from_elements, - ElementsKind to_kind, FixedArrayBase* to_elements); - #ifdef DEBUG // Structure for collecting spill information about JSObjects. class SpillInformation { @@ -2204,9 +2162,7 @@ class FixedArray: public FixedArrayBase { protected: // Set operation on FixedArray without using write barriers. Can // only be used for storing old space objects or smis. - static inline void NoWriteBarrierSet(FixedArray* array, - int index, - Object* value); + static inline void fast_set(FixedArray* array, int index, Object* value); private: DISALLOW_IMPLICIT_CONSTRUCTORS(FixedArray); @@ -2229,9 +2185,6 @@ class FixedDoubleArray: public FixedArrayBase { // Checking for the hole. inline bool is_the_hole(int index); - // Copy operations - MUST_USE_RESULT inline MaybeObject* Copy(); - // Garbage collection support. inline static int SizeFor(int length) { return kHeaderSize + length * kDoubleSize; @@ -2271,9 +2224,6 @@ class FixedDoubleArray: public FixedArrayBase { }; -class IncrementalMarking; - - // DescriptorArrays are fixed arrays used to hold instance descriptors. // The format of the these objects is: // TODO(1399): It should be possible to make room for bit_field3 in the map @@ -2315,7 +2265,7 @@ class DescriptorArray: public FixedArray { // Set next enumeration index and flush any enum cache. void SetNextEnumerationIndex(int value) { if (!IsEmpty()) { - set(kEnumerationIndexIndex, Smi::FromInt(value)); + fast_set(this, kEnumerationIndexIndex, Smi::FromInt(value)); } } bool HasEnumCache() { @@ -2352,27 +2302,13 @@ class DescriptorArray: public FixedArray { inline bool IsNullDescriptor(int descriptor_number); inline bool IsDontEnum(int descriptor_number); - class WhitenessWitness { - public: - inline explicit WhitenessWitness(DescriptorArray* array); - inline ~WhitenessWitness(); - - private: - IncrementalMarking* marking_; - }; - // Accessor for complete descriptor. inline void Get(int descriptor_number, Descriptor* desc); - inline void Set(int descriptor_number, - Descriptor* desc, - const WhitenessWitness&); + inline void Set(int descriptor_number, Descriptor* desc); // Transfer complete descriptor from another descriptor array to // this one. - inline void CopyFrom(int index, - DescriptorArray* src, - int src_index, - const WhitenessWitness&); + inline void CopyFrom(int index, DescriptorArray* src, int src_index); // Copy the descriptor array, insert a new descriptor and optionally // remove map transitions. If the descriptor is already present, it is @@ -2389,11 +2325,11 @@ class DescriptorArray: public FixedArray { // Sort the instance descriptors by the hash codes of their keys. // Does not check for duplicates. - void SortUnchecked(const WhitenessWitness&); + void SortUnchecked(); // Sort the instance descriptors by the hash codes of their keys. // Checks the result for duplicates. - void Sort(const WhitenessWitness&); + void Sort(); // Search the instance descriptors for given name. inline int Search(String* name); @@ -2486,12 +2422,10 @@ class DescriptorArray: public FixedArray { NULL_DESCRIPTOR; } // Swap operation on FixedArray without using write barriers. - static inline void NoWriteBarrierSwap(FixedArray* array, - int first, - int second); + static inline void fast_swap(FixedArray* array, int first, int second); // Swap descriptor first and second. - inline void NoWriteBarrierSwapDescriptors(int first, int second); + inline void Swap(int first, int second); FixedArray* GetContentArray() { return FixedArray::cast(get(kContentArrayIndex)); @@ -2633,12 +2567,12 @@ class HashTable: public FixedArray { // Update the number of elements in the hash table. void SetNumberOfElements(int nof) { - set(kNumberOfElementsIndex, Smi::FromInt(nof)); + fast_set(this, kNumberOfElementsIndex, Smi::FromInt(nof)); } // Update the number of deleted elements in the hash table. void SetNumberOfDeletedElements(int nod) { - set(kNumberOfDeletedElementsIndex, Smi::FromInt(nod)); + fast_set(this, kNumberOfDeletedElementsIndex, Smi::FromInt(nod)); } // Sets the capacity of the hash table. @@ -2648,7 +2582,7 @@ class HashTable: public FixedArray { // and non-zero. ASSERT(capacity > 0); ASSERT(capacity <= kMaxCapacity); - set(kCapacityIndex, Smi::FromInt(capacity)); + fast_set(this, kCapacityIndex, Smi::FromInt(capacity)); } @@ -2856,7 +2790,7 @@ class Dictionary: public HashTable<Shape, Key> { // Accessors for next enumeration index. void SetNextEnumerationIndex(int index) { - this->set(kNextEnumerationIndexIndex, Smi::FromInt(index)); + this->fast_set(this, kNextEnumerationIndexIndex, Smi::FromInt(index)); } int NextEnumerationIndex() { @@ -2997,41 +2931,20 @@ class NumberDictionary: public Dictionary<NumberDictionaryShape, uint32_t> { }; -template <int entrysize> class ObjectHashTableShape { public: - static inline bool IsMatch(Object* key, Object* other); - static inline uint32_t Hash(Object* key); - static inline uint32_t HashForObject(Object* key, Object* object); - MUST_USE_RESULT static inline MaybeObject* AsObject(Object* key); + static inline bool IsMatch(JSReceiver* key, Object* other); + static inline uint32_t Hash(JSReceiver* key); + static inline uint32_t HashForObject(JSReceiver* key, Object* object); + MUST_USE_RESULT static inline MaybeObject* AsObject(JSReceiver* key); static const int kPrefixSize = 0; - static const int kEntrySize = entrysize; -}; - - -// ObjectHashSet holds keys that are arbitrary objects by using the identity -// hash of the key for hashing purposes. -class ObjectHashSet: public HashTable<ObjectHashTableShape<1>, Object*> { - public: - static inline ObjectHashSet* cast(Object* obj) { - ASSERT(obj->IsHashTable()); - return reinterpret_cast<ObjectHashSet*>(obj); - } - - // Looks up whether the given key is part of this hash set. - bool Contains(Object* key); - - // Adds the given key to this hash set. - MUST_USE_RESULT MaybeObject* Add(Object* key); - - // Removes the given key from this hash set. - MUST_USE_RESULT MaybeObject* Remove(Object* key); + static const int kEntrySize = 2; }; -// ObjectHashTable maps keys that are arbitrary objects to object values by +// ObjectHashTable maps keys that are JavaScript objects to object values by // using the identity hash of the key for hashing purposes. -class ObjectHashTable: public HashTable<ObjectHashTableShape<2>, Object*> { +class ObjectHashTable: public HashTable<ObjectHashTableShape, JSReceiver*> { public: static inline ObjectHashTable* cast(Object* obj) { ASSERT(obj->IsHashTable()); @@ -3040,16 +2953,16 @@ class ObjectHashTable: public HashTable<ObjectHashTableShape<2>, Object*> { // Looks up the value associated with the given key. The undefined value is // returned in case the key is not present. - Object* Lookup(Object* key); + Object* Lookup(JSReceiver* key); // Adds (or overwrites) the value associated with the given key. Mapping a // key to the undefined value causes removal of the whole entry. - MUST_USE_RESULT MaybeObject* Put(Object* key, Object* value); + MUST_USE_RESULT MaybeObject* Put(JSReceiver* key, Object* value); private: friend class MarkCompactCollector; - void AddEntry(int entry, Object* key, Object* value); + void AddEntry(int entry, JSReceiver* key, Object* value); void RemoveEntry(int entry, Heap* heap); inline void RemoveEntry(int entry); @@ -3107,9 +3020,6 @@ class SerializedScopeInfo : public FixedArray { return reinterpret_cast<SerializedScopeInfo*>(object); } - // Return the type of this scope. - ScopeType Type(); - // Does this scope call eval? bool CallsEval(); @@ -3125,9 +3035,6 @@ class SerializedScopeInfo : public FixedArray { // Return if this has context slots besides MIN_CONTEXT_SLOTS; bool HasHeapAllocatedLocals(); - // Return if contexts are allocated for this scope. - bool HasContext(); - // Lookup support for serialized scope info. Returns the // the stack slot index for a given slot name if the slot is // present; otherwise returns a value < 0. The name must be a symbol @@ -3150,7 +3057,7 @@ class SerializedScopeInfo : public FixedArray { // function context slot index if the function name is present (named // function expressions, only), otherwise returns a value < 0. The name // must be a symbol (canonicalized). - int FunctionContextSlotIndex(String* name, VariableMode* mode); + int FunctionContextSlotIndex(String* name); static Handle<SerializedScopeInfo> Create(Scope* scope); @@ -3839,11 +3746,6 @@ class Code: public HeapObject { inline bool has_debug_break_slots(); inline void set_has_debug_break_slots(bool value); - // [compiled_with_optimizing]: For FUNCTION kind, tells if it has - // been compiled with IsOptimizing set to true. - inline bool is_compiled_optimizable(); - inline void set_compiled_optimizable(bool value); - // [allow_osr_at_loop_nesting_level]: For FUNCTION kind, tells for // how long the function has been marked for OSR and therefore which // level of loop nesting we are willing to do on-stack replacement @@ -4039,7 +3941,6 @@ class Code: public HeapObject { class FullCodeFlagsHasDeoptimizationSupportField: public BitField<bool, 0, 1> {}; // NOLINT class FullCodeFlagsHasDebugBreakSlotsField: public BitField<bool, 1, 1> {}; - class FullCodeFlagsIsCompiledOptimizable: public BitField<bool, 2, 1> {}; static const int kBinaryOpReturnTypeOffset = kBinaryOpTypeOffset + 1; @@ -4221,9 +4122,6 @@ class Map: public HeapObject { return elements_kind() == DICTIONARY_ELEMENTS; } - static bool IsValidElementsTransition(ElementsKind from_kind, - ElementsKind to_kind); - // Tells whether the map is attached to SharedFunctionInfo // (for inobject slack tracking). inline void set_attached_to_shared_function_info(bool value); @@ -4332,9 +4230,6 @@ class Map: public HeapObject { inline void ClearCodeCache(Heap* heap); // Update code cache. - static void UpdateCodeCache(Handle<Map> map, - Handle<String> name, - Handle<Code> code); MUST_USE_RESULT MaybeObject* UpdateCodeCache(String* name, Code* code); // Returns the found code or undefined if absent. @@ -4393,13 +4288,6 @@ class Map: public HeapObject { MaybeObject* AddElementsTransition(ElementsKind elements_kind, Map* transitioned_map); - // Returns the transitioned map for this map with the most generic - // elements_kind that's found in |candidates|, or null handle if no match is - // found at all. - Handle<Map> FindTransitionedMap(MapHandleList* candidates); - Map* FindTransitionedMap(MapList* candidates); - - // Dispatched behavior. #ifdef OBJECT_PRINT inline void MapPrint() { @@ -4908,11 +4796,7 @@ class SharedFunctionInfo: public HeapObject { DECL_BOOLEAN_ACCESSORS(optimization_disabled) // Indicates whether the function is a strict mode function. - inline bool strict_mode(); - - // Indicates the mode of the function. - inline StrictModeFlag strict_mode_flag(); - inline void set_strict_mode_flag(StrictModeFlag strict_mode_flag); + DECL_BOOLEAN_ACCESSORS(strict_mode) // False if the function definitely does not allocate an arguments object. DECL_BOOLEAN_ACCESSORS(uses_arguments) @@ -5004,13 +4888,6 @@ class SharedFunctionInfo: public HeapObject { void SharedFunctionInfoVerify(); #endif - // Helpers to compile the shared code. Returns true on success, false on - // failure (e.g., stack overflow during compilation). - static bool EnsureCompiled(Handle<SharedFunctionInfo> shared, - ClearExceptionFlag flag); - static bool CompileLazy(Handle<SharedFunctionInfo> shared, - ClearExceptionFlag flag); - // Casting. static inline SharedFunctionInfo* cast(Object* obj); @@ -5161,7 +5038,7 @@ class SharedFunctionInfo: public HeapObject { public: // Constants for optimizing codegen for strict mode function and // native tests. - // Allows to use byte-width instructions. + // Allows to use byte-widgh instructions. static const int kStrictModeBitWithinByte = (kStrictModeFunction + kCompilerHintsSmiTagSize) % kBitsPerByte; @@ -5232,14 +5109,6 @@ class JSFunction: public JSObject { // recompiled the next time it is executed. void MarkForLazyRecompilation(); - // Helpers to compile this function. Returns true on success, false on - // failure (e.g., stack overflow during compilation). - static bool CompileLazy(Handle<JSFunction> function, - ClearExceptionFlag flag); - static bool CompileOptimized(Handle<JSFunction> function, - int osr_ast_id, - ClearExceptionFlag flag); - // Tells whether or not the function is already marked for lazy // recompilation. inline bool IsMarkedForLazyRecompilation(); @@ -5247,8 +5116,7 @@ class JSFunction: public JSObject { // Check whether or not this function is inlineable. bool IsInlineable(); - // [literals_or_bindings]: Fixed array holding either - // the materialized literals or the bindings of a bound function. + // [literals]: Fixed array holding the materialized literals. // // If the function contains object, regexp or array literals, the // literals array prefix contains the object, regexp, and array @@ -5257,17 +5125,7 @@ class JSFunction: public JSObject { // or array functions. Performing a dynamic lookup, we might end up // using the functions from a new context that we should not have // access to. - // - // On bound functions, the array is a (copy-on-write) fixed-array containing - // the function that was bound, bound this-value and any bound - // arguments. Bound functions never contain literals. - DECL_ACCESSORS(literals_or_bindings, FixedArray) - - inline FixedArray* literals(); - inline void set_literals(FixedArray* literals); - - inline FixedArray* function_bindings(); - inline void set_function_bindings(FixedArray* bindings); + DECL_ACCESSORS(literals, FixedArray) // The initial map for an object created by this constructor. inline Map* initial_map(); @@ -5355,11 +5213,6 @@ class JSFunction: public JSObject { static const int kLiteralsPrefixSize = 1; static const int kLiteralGlobalContextIndex = 0; - // Layout of the bound-function binding array. - static const int kBoundFunctionIndex = 0; - static const int kBoundThisIndex = 1; - static const int kBoundArgumentsStartIndex = 2; - private: DISALLOW_IMPLICIT_CONSTRUCTORS(JSFunction); }; @@ -5432,11 +5285,6 @@ class GlobalObject: public JSObject { } // Ensure that the global object has a cell for the given property name. - static Handle<JSGlobalPropertyCell> EnsurePropertyCell( - Handle<GlobalObject> global, - Handle<String> name); - // TODO(kmillikin): This function can be eliminated once the stub cache is - // full handlified (and the static helper can be written directly). MUST_USE_RESULT MaybeObject* EnsurePropertyCell(String* name); // Casting. @@ -5909,17 +5757,10 @@ class PolymorphicCodeCache: public Struct { public: DECL_ACCESSORS(cache, Object) - static void Update(Handle<PolymorphicCodeCache> cache, - MapHandleList* maps, - Code::Flags flags, - Handle<Code> code); - - MUST_USE_RESULT MaybeObject* Update(MapHandleList* maps, + MUST_USE_RESULT MaybeObject* Update(MapList* maps, Code::Flags flags, Code* code); - - // Returns an undefined value if the entry is not found. - Handle<Object> Lookup(MapHandleList* maps, Code::Flags flags); + Object* Lookup(MapList* maps, Code::Flags flags); static inline PolymorphicCodeCache* cast(Object* obj); @@ -5944,11 +5785,8 @@ class PolymorphicCodeCache: public Struct { class PolymorphicCodeCacheHashTable : public HashTable<CodeCacheHashTableShape, HashTableKey*> { public: - Object* Lookup(MapHandleList* maps, int code_kind); - - MUST_USE_RESULT MaybeObject* Put(MapHandleList* maps, - int code_kind, - Code* code); + Object* Lookup(MapList* maps, int code_kind); + MUST_USE_RESULT MaybeObject* Put(MapList* maps, int code_kind, Code* code); static inline PolymorphicCodeCacheHashTable* cast(Object* obj); @@ -6219,8 +6057,7 @@ class String: public HeapObject { RobustnessFlag robustness_flag = FAST_STRING_TRAVERSAL, int* length_output = 0); - inline int Utf8Length() { return Utf8Length(this, 0, length()); } - static int Utf8Length(String* input, int from, int to); + int Utf8Length(); // Return a 16 bit Unicode representation of the string. // The string should be nearly flat, otherwise the performance of @@ -7080,60 +6917,6 @@ class JSFunctionProxy: public JSProxy { }; -// The JSSet describes EcmaScript Harmony maps -class JSSet: public JSObject { - public: - // [set]: the backing hash set containing keys. - DECL_ACCESSORS(table, Object) - - // Casting. - static inline JSSet* cast(Object* obj); - -#ifdef OBJECT_PRINT - inline void JSSetPrint() { - JSSetPrint(stdout); - } - void JSSetPrint(FILE* out); -#endif -#ifdef DEBUG - void JSSetVerify(); -#endif - - static const int kTableOffset = JSObject::kHeaderSize; - static const int kSize = kTableOffset + kPointerSize; - - private: - DISALLOW_IMPLICIT_CONSTRUCTORS(JSSet); -}; - - -// The JSMap describes EcmaScript Harmony maps -class JSMap: public JSObject { - public: - // [table]: the backing hash table mapping keys to values. - DECL_ACCESSORS(table, Object) - - // Casting. - static inline JSMap* cast(Object* obj); - -#ifdef OBJECT_PRINT - inline void JSMapPrint() { - JSMapPrint(stdout); - } - void JSMapPrint(FILE* out); -#endif -#ifdef DEBUG - void JSMapVerify(); -#endif - - static const int kTableOffset = JSObject::kHeaderSize; - static const int kSize = kTableOffset + kPointerSize; - - private: - DISALLOW_IMPLICIT_CONSTRUCTORS(JSMap); -}; - - // The JSWeakMap describes EcmaScript Harmony weak maps class JSWeakMap: public JSObject { public: diff --git a/deps/v8/src/parser.cc b/deps/v8/src/parser.cc index 3c6c4ba1e..fb94a1a60 100644 --- a/deps/v8/src/parser.cc +++ b/deps/v8/src/parser.cc @@ -407,9 +407,9 @@ unsigned* ScriptDataImpl::ReadAddress(int position) { } -Scope* Parser::NewScope(Scope* parent, ScopeType type) { +Scope* Parser::NewScope(Scope* parent, Scope::Type type, bool inside_with) { Scope* result = new(zone()) Scope(parent, type); - result->Initialize(); + result->Initialize(inside_with); return result; } @@ -459,31 +459,13 @@ class TargetScope BASE_EMBEDDED { // ---------------------------------------------------------------------------- -// LexicalScope and SaveScope are stack allocated support classes to facilitate -// anipulation of the Parser's scope stack. The constructor sets the parser's -// top scope to the incoming scope, and the destructor resets it. Additionally, -// LexicalScope stores transient information used during parsing. - - -class SaveScope BASE_EMBEDDED { - public: - SaveScope(Parser* parser, Scope* scope) - : parser_(parser), - previous_top_scope_(parser->top_scope_) { - parser->top_scope_ = scope; - } - - ~SaveScope() { - parser_->top_scope_ = previous_top_scope_; - } - - private: - // Bookkeeping - Parser* parser_; - // Previous values - Scope* previous_top_scope_; -}; - +// LexicalScope is a support class to facilitate manipulation of the +// Parser's scope stack. The constructor sets the parser's top scope +// to the incoming scope, and the destructor resets it. +// +// Additionally, it stores transient information used during parsing. +// These scopes are not kept around after parsing or referenced by syntax +// trees so they can be stack-allocated and hence used by the pre-parser. class LexicalScope BASE_EMBEDDED { public: @@ -534,6 +516,7 @@ class LexicalScope BASE_EMBEDDED { // Previous values LexicalScope* lexical_scope_parent_; Scope* previous_scope_; + int previous_with_nesting_level_; unsigned previous_ast_node_id_; }; @@ -546,9 +529,11 @@ LexicalScope::LexicalScope(Parser* parser, Scope* scope, Isolate* isolate) parser_(parser), lexical_scope_parent_(parser->lexical_scope_), previous_scope_(parser->top_scope_), + previous_with_nesting_level_(parser->with_nesting_level_), previous_ast_node_id_(isolate->ast_node_id()) { parser->top_scope_ = scope; parser->lexical_scope_ = this; + parser->with_nesting_level_ = 0; isolate->set_ast_node_id(AstNode::kDeclarationsId + 1); } @@ -556,6 +541,7 @@ LexicalScope::LexicalScope(Parser* parser, Scope* scope, Isolate* isolate) LexicalScope::~LexicalScope() { parser_->top_scope_ = previous_scope_; parser_->lexical_scope_ = lexical_scope_parent_; + parser_->with_nesting_level_ = previous_with_nesting_level_; parser_->isolate()->set_ast_node_id(previous_ast_node_id_); } @@ -592,6 +578,7 @@ Parser::Parser(Handle<Script> script, script_(script), scanner_(isolate_->unicode_cache()), top_scope_(NULL), + with_nesting_level_(0), lexical_scope_(NULL), target_stack_(NULL), allow_natives_syntax_(allow_natives_syntax), @@ -636,7 +623,6 @@ FunctionLiteral* Parser::DoParseProgram(Handle<String> source, bool in_global_context, StrictModeFlag strict_mode, ZoneScope* zone_scope) { - ASSERT(top_scope_ == NULL); ASSERT(target_stack_ == NULL); if (pre_data_ != NULL) pre_data_->Initialize(); @@ -644,16 +630,18 @@ FunctionLiteral* Parser::DoParseProgram(Handle<String> source, mode_ = FLAG_lazy ? PARSE_LAZILY : PARSE_EAGERLY; if (allow_natives_syntax_ || extension_ != NULL) mode_ = PARSE_EAGERLY; - ScopeType type = in_global_context ? GLOBAL_SCOPE : EVAL_SCOPE; + Scope::Type type = + in_global_context + ? Scope::GLOBAL_SCOPE + : Scope::EVAL_SCOPE; Handle<String> no_name = isolate()->factory()->empty_symbol(); FunctionLiteral* result = NULL; - { Scope* scope = NewScope(top_scope_, type); - scope->set_start_position(0); - scope->set_end_position(source->length()); + { Scope* scope = NewScope(top_scope_, type, inside_with()); LexicalScope lexical_scope(this, scope, isolate()); - ASSERT(top_scope_->strict_mode_flag() == kNonStrictMode); - top_scope_->SetStrictModeFlag(strict_mode); + if (strict_mode == kStrictMode) { + top_scope_->EnableStrictMode(); + } ZoneList<Statement*>* body = new(zone()) ZoneList<Statement*>(16); bool ok = true; int beg_loc = scanner().location().beg_pos; @@ -677,6 +665,8 @@ FunctionLiteral* Parser::DoParseProgram(Handle<String> source, lexical_scope.only_simple_this_property_assignments(), lexical_scope.this_property_assignments(), 0, + 0, + source->length(), FunctionLiteral::ANONYMOUS_EXPRESSION, false); // Does not have duplicate parameters. } else if (stack_overflow_) { @@ -724,7 +714,6 @@ FunctionLiteral* Parser::ParseLazy(CompilationInfo* info, ZoneScope* zone_scope) { Handle<SharedFunctionInfo> shared_info = info->shared_info(); scanner_.Initialize(source); - ASSERT(top_scope_ == NULL); ASSERT(target_stack_ == NULL); Handle<String> name(String::cast(shared_info->name())); @@ -738,15 +727,16 @@ FunctionLiteral* Parser::ParseLazy(CompilationInfo* info, { // Parse the function literal. - Scope* scope = NewScope(top_scope_, GLOBAL_SCOPE); + Scope* scope = NewScope(top_scope_, Scope::GLOBAL_SCOPE, inside_with()); if (!info->closure().is_null()) { scope = Scope::DeserializeScopeChain(info, scope); } LexicalScope lexical_scope(this, scope, isolate()); - ASSERT(scope->strict_mode_flag() == kNonStrictMode || - scope->strict_mode_flag() == info->strict_mode_flag()); - ASSERT(info->strict_mode_flag() == shared_info->strict_mode_flag()); - scope->SetStrictModeFlag(shared_info->strict_mode_flag()); + + if (shared_info->strict_mode()) { + top_scope_->EnableStrictMode(); + } + FunctionLiteral::Type type = shared_info->is_expression() ? (shared_info->is_anonymous() ? FunctionLiteral::ANONYMOUS_EXPRESSION @@ -1138,14 +1128,14 @@ Statement* Parser::ParseSourceElement(ZoneStringList* labels, // In harmony mode we allow additionally the following productions // SourceElement: // LetDeclaration - // ConstDeclaration if (peek() == Token::FUNCTION) { return ParseFunctionDeclaration(ok); - } else if (peek() == Token::LET || peek() == Token::CONST) { + } else if (peek() == Token::LET) { return ParseVariableStatement(kSourceElement, ok); + } else { + return ParseStatement(labels, ok); } - return ParseStatement(labels, ok); } @@ -1193,7 +1183,7 @@ void* Parser::ParseSourceElements(ZoneList<Statement*>* processor, directive->Equals(isolate()->heap()->use_strict()) && token_loc.end_pos - token_loc.beg_pos == isolate()->heap()->use_strict()->length() + 2) { - top_scope_->SetStrictModeFlag(kStrictMode); + top_scope_->EnableStrictMode(); // "use strict" is the only directive for now. directive_prologue = false; } @@ -1331,7 +1321,7 @@ Statement* Parser::ParseStatement(ZoneStringList* labels, bool* ok) { // FunctionDeclaration // Common language extension is to allow function declaration in place // of any statement. This language extension is disabled in strict mode. - if (top_scope_->is_strict_mode() || harmony_scoping_) { + if (top_scope_->is_strict_mode()) { ReportMessageAt(scanner().peek_location(), "strict_function", Vector<const char*>::empty()); *ok = false; @@ -1363,10 +1353,6 @@ VariableProxy* Parser::Declare(Handle<String> name, // If we are inside a function, a declaration of a var/const variable is a // truly local variable, and the scope of the variable is always the function // scope. - // Let/const variables in harmony mode are always added to the immediately - // enclosing scope. - Scope* declaration_scope = (mode == LET || mode == CONST_HARMONY) - ? top_scope_ : top_scope_->DeclarationScope(); // If a function scope exists, then we can statically declare this // variable and also set its mode. In any case, a Declaration node @@ -1376,8 +1362,9 @@ VariableProxy* Parser::Declare(Handle<String> name, // to the calling function context. // Similarly, strict mode eval scope does not leak variable declarations to // the caller's scope so we declare all locals, too. - // Also for block scoped let/const bindings the variable can be - // statically declared. + + Scope* declaration_scope = mode == LET ? top_scope_ + : top_scope_->DeclarationScope(); if (declaration_scope->is_function_scope() || declaration_scope->is_strict_mode_eval_scope() || declaration_scope->is_block_scope()) { @@ -1402,7 +1389,6 @@ VariableProxy* Parser::Declare(Handle<String> name, // We only have vars, consts and lets in declarations. ASSERT(var->mode() == VAR || var->mode() == CONST || - var->mode() == CONST_HARMONY || var->mode() == LET); if (harmony_scoping_) { // In harmony mode we treat re-declarations as early errors. See @@ -1414,8 +1400,8 @@ VariableProxy* Parser::Declare(Handle<String> name, *ok = false; return NULL; } - const char* type = (var->mode() == VAR) - ? "var" : var->is_const_mode() ? "const" : "let"; + const char* type = (var->mode() == VAR) ? "var" : + (var->mode() == CONST) ? "const" : "let"; Handle<String> type_string = isolate()->factory()->NewStringFromUtf8(CStrVector(type), TENURED); Expression* expression = @@ -1443,13 +1429,12 @@ VariableProxy* Parser::Declare(Handle<String> name, // a performance issue since it may lead to repeated // Runtime::DeclareContextSlot() calls. VariableProxy* proxy = declaration_scope->NewUnresolved( - name, scanner().location().beg_pos); + name, false, scanner().location().beg_pos); declaration_scope->AddDeclaration( new(zone()) Declaration(proxy, mode, fun, top_scope_)); // For global const variables we bind the proxy to a variable. - if ((mode == CONST || mode == CONST_HARMONY) && - declaration_scope->is_global_scope()) { + if (mode == CONST && declaration_scope->is_global_scope()) { ASSERT(resolve); // should be set by all callers Variable::Kind kind = Variable::NORMAL; var = new(zone()) Variable(declaration_scope, name, CONST, true, kind); @@ -1597,14 +1582,20 @@ Block* Parser::ParseScopedBlock(ZoneStringList* labels, bool* ok) { // Construct block expecting 16 statements. Block* body = new(zone()) Block(isolate(), labels, 16, false); - Scope* block_scope = NewScope(top_scope_, BLOCK_SCOPE); + Scope* saved_scope = top_scope_; + Scope* block_scope = NewScope(top_scope_, + Scope::BLOCK_SCOPE, + inside_with()); + if (top_scope_->is_strict_mode()) { + block_scope->EnableStrictMode(); + } + top_scope_ = block_scope; // Parse the statements and collect escaping labels. + TargetCollector collector; + Target target(&this->target_stack_, &collector); Expect(Token::LBRACE, CHECK_OK); - block_scope->set_start_position(scanner().location().beg_pos); - { SaveScope save_scope(this, block_scope); - TargetCollector collector; - Target target(&this->target_stack_, &collector); + { Target target_body(&this->target_stack_, body); InitializationBlockFinder block_finder(top_scope_, target_stack_); @@ -1617,7 +1608,8 @@ Block* Parser::ParseScopedBlock(ZoneStringList* labels, bool* ok) { } } Expect(Token::RBRACE, CHECK_OK); - block_scope->set_end_position(scanner().location().end_pos); + top_scope_ = saved_scope; + block_scope = block_scope->FinalizeBlockScope(); body->set_block_scope(block_scope); return body; @@ -1631,7 +1623,6 @@ Block* Parser::ParseVariableStatement(VariableDeclarationContext var_context, Handle<String> ignore; Block* result = ParseVariableDeclarations(var_context, - NULL, &ignore, CHECK_OK); ExpectSemicolon(CHECK_OK); @@ -1650,24 +1641,12 @@ bool Parser::IsEvalOrArguments(Handle<String> string) { // *var is untouched; in particular, it is the caller's responsibility // to initialize it properly. This mechanism is used for the parsing // of 'for-in' loops. -Block* Parser::ParseVariableDeclarations( - VariableDeclarationContext var_context, - VariableDeclarationProperties* decl_props, - Handle<String>* out, - bool* ok) { +Block* Parser::ParseVariableDeclarations(VariableDeclarationContext var_context, + Handle<String>* out, + bool* ok) { // VariableDeclarations :: - // ('var' | 'const' | 'let') (Identifier ('=' AssignmentExpression)?)+[','] - // - // The ES6 Draft Rev3 specifies the following grammar for const declarations - // - // ConstDeclaration :: - // const ConstBinding (',' ConstBinding)* ';' - // ConstBinding :: - // Identifier '=' AssignmentExpression - // - // TODO(ES6): - // ConstBinding :: - // BindingPattern '=' AssignmentExpression + // ('var' | 'const') (Identifier ('=' AssignmentExpression)?)+[','] + VariableMode mode = VAR; // True if the binding needs initialization. 'let' and 'const' declared // bindings are created uninitialized by their declaration nodes and @@ -1680,32 +1659,19 @@ Block* Parser::ParseVariableDeclarations( Consume(Token::VAR); } else if (peek() == Token::CONST) { Consume(Token::CONST); - if (harmony_scoping_) { - if (var_context != kSourceElement && - var_context != kForStatement) { - // In harmony mode 'const' declarations are only allowed in source - // element positions. - ReportMessage("unprotected_const", Vector<const char*>::empty()); - *ok = false; - return NULL; - } - mode = CONST_HARMONY; - init_op = Token::INIT_CONST_HARMONY; - } else if (top_scope_->is_strict_mode()) { + if (top_scope_->is_strict_mode()) { ReportMessage("strict_const", Vector<const char*>::empty()); *ok = false; return NULL; - } else { - mode = CONST; - init_op = Token::INIT_CONST; } + mode = CONST; is_const = true; needs_init = true; + init_op = Token::INIT_CONST; } else if (peek() == Token::LET) { Consume(Token::LET); if (var_context != kSourceElement && var_context != kForStatement) { - // Let declarations are only allowed in source element positions. ASSERT(var_context == kStatement); ReportMessage("unprotected_let", Vector<const char*>::empty()); *ok = false; @@ -1718,7 +1684,7 @@ Block* Parser::ParseVariableDeclarations( UNREACHABLE(); // by current callers } - Scope* declaration_scope = (mode == LET || mode == CONST_HARMONY) + Scope* declaration_scope = (mode == LET) ? top_scope_ : top_scope_->DeclarationScope(); // The scope of a var/const declared variable anywhere inside a function // is the entire function (ECMA-262, 3rd, 10.1.3, and 12.2). Thus we can @@ -1763,10 +1729,8 @@ Block* Parser::ParseVariableDeclarations( // If we have a const declaration, in an inner scope, the proxy is always // bound to the declared variable (independent of possibly surrounding with // statements). - // For let/const declarations in harmony mode, we can also immediately - // pre-resolve the proxy because it resides in the same scope as the - // declaration. - Declare(name, mode, NULL, mode != VAR, CHECK_OK); + Declare(name, mode, NULL, is_const /* always bound for CONST! */, + CHECK_OK); nvars++; if (declaration_scope->num_var_or_const() > kMaxNumFunctionLocals) { ReportMessageAt(scanner().location(), "too_many_variables", @@ -1805,8 +1769,7 @@ Block* Parser::ParseVariableDeclarations( Scope* initialization_scope = is_const ? declaration_scope : top_scope_; Expression* value = NULL; int position = -1; - // Harmony consts have non-optional initializers. - if (peek() == Token::ASSIGN || mode == CONST_HARMONY) { + if (peek() == Token::ASSIGN) { Expect(Token::ASSIGN, CHECK_OK); position = scanner().location().beg_pos; value = ParseAssignmentExpression(var_context != kForStatement, CHECK_OK); @@ -1818,7 +1781,6 @@ Block* Parser::ParseVariableDeclarations( } else { fni_->RemoveLastFunction(); } - if (decl_props != NULL) *decl_props = kHasInitializers; } // Make sure that 'const x' and 'let x' initialize 'x' to undefined. @@ -1845,6 +1807,7 @@ Block* Parser::ParseVariableDeclarations( // declaration statement has been executed. This is important in // browsers where the global object (window) has lots of // properties defined in prototype objects. + if (initialization_scope->is_global_scope()) { // Compute the arguments for the runtime call. ZoneList<Expression*>* arguments = new(zone()) ZoneList<Expression*>(3); @@ -1869,7 +1832,9 @@ Block* Parser::ParseVariableDeclarations( } else { // Add strict mode. // We may want to pass singleton to avoid Literal allocations. - StrictModeFlag flag = initialization_scope->strict_mode_flag(); + StrictModeFlag flag = initialization_scope->is_strict_mode() + ? kStrictMode + : kNonStrictMode; arguments->Add(NewNumberLiteral(flag)); // Be careful not to assign a value to the global variable if @@ -1906,14 +1871,18 @@ Block* Parser::ParseVariableDeclarations( // dynamically looked-up variables and constants (the start context // for constant lookups is always the function context, while it is // the top context for var declared variables). Sigh... - // For 'let' and 'const' declared variables in harmony mode the - // initialization is in the same scope as the declaration. Thus dynamic - // lookups are unnecessary even if the block scope is inside a with. + // For 'let' declared variables the initialization is in the same scope + // as the declaration. Thus dynamic lookups are unnecessary even if the + // block scope is inside a with. if (value != NULL) { - VariableProxy* proxy = initialization_scope->NewUnresolved(name); + bool in_with = (mode == VAR) ? inside_with() : false; + VariableProxy* proxy = + initialization_scope->NewUnresolved(name, in_with); Assignment* assignment = new(zone()) Assignment(isolate(), init_op, proxy, value, position); - block->AddStatement(new(zone()) ExpressionStatement(assignment)); + if (block) { + block->AddStatement(new(zone()) ExpressionStatement(assignment)); + } } if (fni_ != NULL) fni_->Leave(); @@ -2136,14 +2105,10 @@ Statement* Parser::ParseWithStatement(ZoneStringList* labels, bool* ok) { Expression* expr = ParseExpression(true, CHECK_OK); Expect(Token::RPAREN, CHECK_OK); + ++with_nesting_level_; top_scope_->DeclarationScope()->RecordWithStatement(); - Scope* with_scope = NewScope(top_scope_, WITH_SCOPE); - Statement* stmt; - { SaveScope save_scope(this, with_scope); - with_scope->set_start_position(scanner().peek_location().beg_pos); - stmt = ParseStatement(labels, CHECK_OK); - with_scope->set_end_position(scanner().location().end_pos); - } + Statement* stmt = ParseStatement(labels, CHECK_OK); + --with_nesting_level_; return new(zone()) WithStatement(expr, stmt); } @@ -2268,8 +2233,6 @@ TryStatement* Parser::ParseTryStatement(bool* ok) { Consume(Token::CATCH); Expect(Token::LPAREN, CHECK_OK); - catch_scope = NewScope(top_scope_, CATCH_SCOPE); - catch_scope->set_start_position(scanner().location().beg_pos); name = ParseIdentifier(CHECK_OK); if (top_scope_->is_strict_mode() && IsEvalOrArguments(name)) { @@ -2282,15 +2245,21 @@ TryStatement* Parser::ParseTryStatement(bool* ok) { if (peek() == Token::LBRACE) { Target target(&this->target_stack_, &catch_collector); + catch_scope = NewScope(top_scope_, Scope::CATCH_SCOPE, inside_with()); + if (top_scope_->is_strict_mode()) { + catch_scope->EnableStrictMode(); + } VariableMode mode = harmony_scoping_ ? LET : VAR; catch_variable = catch_scope->DeclareLocal(name, mode); - SaveScope save_scope(this, catch_scope); + Scope* saved_scope = top_scope_; + top_scope_ = catch_scope; catch_block = ParseBlock(NULL, CHECK_OK); + top_scope_ = saved_scope; } else { Expect(Token::LBRACE, CHECK_OK); } - catch_scope->set_end_position(scanner().location().end_pos); + tok = peek(); } @@ -2396,22 +2365,16 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) { Statement* init = NULL; - // Create an in-between scope for let-bound iteration variables. - Scope* saved_scope = top_scope_; - Scope* for_scope = NewScope(top_scope_, BLOCK_SCOPE); - top_scope_ = for_scope; - Expect(Token::FOR, CHECK_OK); Expect(Token::LPAREN, CHECK_OK); - for_scope->set_start_position(scanner().location().beg_pos); if (peek() != Token::SEMICOLON) { if (peek() == Token::VAR || peek() == Token::CONST) { Handle<String> name; Block* variable_statement = - ParseVariableDeclarations(kForStatement, NULL, &name, CHECK_OK); + ParseVariableDeclarations(kForStatement, &name, CHECK_OK); if (peek() == Token::IN && !name.is_null()) { - VariableProxy* each = top_scope_->NewUnresolved(name); + VariableProxy* each = top_scope_->NewUnresolved(name, inside_with()); ForInStatement* loop = new(zone()) ForInStatement(isolate(), labels); Target target(&this->target_stack_, loop); @@ -2424,73 +2387,12 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) { Block* result = new(zone()) Block(isolate(), NULL, 2, false); result->AddStatement(variable_statement); result->AddStatement(loop); - top_scope_ = saved_scope; - for_scope->set_end_position(scanner().location().end_pos); - for_scope = for_scope->FinalizeBlockScope(); - ASSERT(for_scope == NULL); // Parsed for-in loop w/ variable/const declaration. return result; } else { init = variable_statement; } - } else if (peek() == Token::LET) { - Handle<String> name; - VariableDeclarationProperties decl_props = kHasNoInitializers; - Block* variable_statement = - ParseVariableDeclarations(kForStatement, - &decl_props, - &name, - CHECK_OK); - bool accept_IN = !name.is_null() && decl_props != kHasInitializers; - if (peek() == Token::IN && accept_IN) { - // Rewrite a for-in statement of the form - // - // for (let x in e) b - // - // into - // - // <let x' be a temporary variable> - // for (x' in e) { - // let x; - // x = x'; - // b; - // } - - // TODO(keuchel): Move the temporary variable to the block scope, after - // implementing stack allocated block scoped variables. - Variable* temp = top_scope_->DeclarationScope()->NewTemporary(name); - VariableProxy* temp_proxy = new(zone()) VariableProxy(isolate(), temp); - VariableProxy* each = top_scope_->NewUnresolved(name, inside_with()); - ForInStatement* loop = new(zone()) ForInStatement(isolate(), labels); - Target target(&this->target_stack_, loop); - - Expect(Token::IN, CHECK_OK); - Expression* enumerable = ParseExpression(true, CHECK_OK); - Expect(Token::RPAREN, CHECK_OK); - Statement* body = ParseStatement(NULL, CHECK_OK); - Block* body_block = new(zone()) Block(isolate(), NULL, 3, false); - Assignment* assignment = new(zone()) Assignment(isolate(), - Token::ASSIGN, - each, - temp_proxy, - RelocInfo::kNoPosition); - Statement* assignment_statement = - new(zone()) ExpressionStatement(assignment); - body_block->AddStatement(variable_statement); - body_block->AddStatement(assignment_statement); - body_block->AddStatement(body); - loop->Initialize(temp_proxy, enumerable, body_block); - top_scope_ = saved_scope; - for_scope->set_end_position(scanner().location().end_pos); - for_scope = for_scope->FinalizeBlockScope(); - body_block->set_block_scope(for_scope); - // Parsed for-in loop w/ let declaration. - return loop; - - } else { - init = variable_statement; - } } else { Expression* expression = ParseExpression(false, CHECK_OK); if (peek() == Token::IN) { @@ -2512,10 +2414,6 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) { Statement* body = ParseStatement(NULL, CHECK_OK); if (loop) loop->Initialize(expression, enumerable, body); - top_scope_ = saved_scope; - for_scope->set_end_position(scanner().location().end_pos); - for_scope = for_scope->FinalizeBlockScope(); - ASSERT(for_scope == NULL); // Parsed for-in loop. return loop; @@ -2546,31 +2444,8 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) { Expect(Token::RPAREN, CHECK_OK); Statement* body = ParseStatement(NULL, CHECK_OK); - top_scope_ = saved_scope; - for_scope->set_end_position(scanner().location().end_pos); - for_scope = for_scope->FinalizeBlockScope(); - if (for_scope != NULL) { - // Rewrite a for statement of the form - // - // for (let x = i; c; n) b - // - // into - // - // { - // let x = i; - // for (; c; n) b - // } - ASSERT(init != NULL); - Block* result = new(zone()) Block(isolate(), NULL, 2, false); - result->AddStatement(init); - result->AddStatement(loop); - result->set_block_scope(for_scope); - if (loop) loop->Initialize(NULL, cond, next, body); - return result; - } else { - if (loop) loop->Initialize(init, cond, next, body); - return loop; - } + if (loop) loop->Initialize(init, cond, next, body); + return loop; } @@ -3190,7 +3065,9 @@ Expression* Parser::ParsePrimaryExpression(bool* ok) { case Token::FUTURE_STRICT_RESERVED_WORD: { Handle<String> name = ParseIdentifier(CHECK_OK); if (fni_ != NULL) fni_->PushVariableName(name); - result = top_scope_->NewUnresolved(name, scanner().location().beg_pos); + result = top_scope_->NewUnresolved(name, + inside_with(), + scanner().location().beg_pos); break; } @@ -3307,11 +3184,9 @@ Expression* Parser::ParseArrayLiteral(bool* ok) { // Update the scope information before the pre-parsing bailout. int literal_index = lexical_scope_->NextMaterializedLiteralIndex(); - // Allocate a fixed array to hold all the object literals. - Handle<FixedArray> object_literals = + // Allocate a fixed array with all the literals. + Handle<FixedArray> literals = isolate()->factory()->NewFixedArray(values->length(), TENURED); - Handle<FixedDoubleArray> double_literals; - ElementsKind elements_kind = FAST_SMI_ONLY_ELEMENTS; // Fill in the literals. bool is_simple = true; @@ -3323,75 +3198,19 @@ Expression* Parser::ParseArrayLiteral(bool* ok) { } Handle<Object> boilerplate_value = GetBoilerplateValue(values->at(i)); if (boilerplate_value->IsUndefined()) { - object_literals->set_the_hole(i); - if (elements_kind == FAST_DOUBLE_ELEMENTS) { - double_literals->set_the_hole(i); - } + literals->set_the_hole(i); is_simple = false; } else { - // Examine each literal element, and adjust the ElementsKind if the - // literal element is not of a type that can be stored in the current - // ElementsKind. Start with FAST_SMI_ONLY_ELEMENTS, and transition to - // FAST_DOUBLE_ELEMENTS and FAST_ELEMENTS as necessary. Always remember - // the tagged value, no matter what the ElementsKind is in case we - // ultimately end up in FAST_ELEMENTS. - object_literals->set(i, *boilerplate_value); - if (elements_kind == FAST_SMI_ONLY_ELEMENTS) { - // Smi only elements. Notice if a transition to FAST_DOUBLE_ELEMENTS or - // FAST_ELEMENTS is required. - if (!boilerplate_value->IsSmi()) { - if (boilerplate_value->IsNumber() && FLAG_smi_only_arrays) { - // Allocate a double array on the FAST_DOUBLE_ELEMENTS transition to - // avoid over-allocating in TENURED space. - double_literals = isolate()->factory()->NewFixedDoubleArray( - values->length(), TENURED); - // Copy the contents of the FAST_SMI_ONLY_ELEMENT array to the - // FAST_DOUBLE_ELEMENTS array so that they are in sync. - for (int j = 0; j < i; ++j) { - Object* smi_value = object_literals->get(j); - if (smi_value->IsTheHole()) { - double_literals->set_the_hole(j); - } else { - double_literals->set(j, Smi::cast(smi_value)->value()); - } - } - double_literals->set(i, boilerplate_value->Number()); - elements_kind = FAST_DOUBLE_ELEMENTS; - } else { - elements_kind = FAST_ELEMENTS; - } - } - } else if (elements_kind == FAST_DOUBLE_ELEMENTS) { - // Continue to store double values in to FAST_DOUBLE_ELEMENTS arrays - // until the first value is seen that can't be stored as a double. - if (boilerplate_value->IsNumber()) { - double_literals->set(i, boilerplate_value->Number()); - } else { - elements_kind = FAST_ELEMENTS; - } - } + literals->set(i, *boilerplate_value); } } // Simple and shallow arrays can be lazily copied, we transform the // elements array to a copy-on-write array. - if (is_simple && depth == 1 && values->length() > 0 && - elements_kind != FAST_DOUBLE_ELEMENTS) { - object_literals->set_map(isolate()->heap()->fixed_cow_array_map()); + if (is_simple && depth == 1 && values->length() > 0) { + literals->set_map(isolate()->heap()->fixed_cow_array_map()); } - Handle<FixedArrayBase> element_values = elements_kind == FAST_DOUBLE_ELEMENTS - ? Handle<FixedArrayBase>(double_literals) - : Handle<FixedArrayBase>(object_literals); - - // Remember both the literal's constant values as well as the ElementsKind - // in a 2-element FixedArray. - Handle<FixedArray> literals = - isolate()->factory()->NewFixedArray(2, TENURED); - - literals->set(0, Smi::FromInt(elements_kind)); - literals->set(1, *element_values); - return new(zone()) ArrayLiteral( isolate(), literals, values, literal_index, is_simple, depth); } @@ -3896,11 +3715,13 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name, // hoisted. In harmony block scoping mode they are block scoped, so they // are not hoisted. Scope* scope = (type == FunctionLiteral::DECLARATION && !harmony_scoping_) - ? NewScope(top_scope_->DeclarationScope(), FUNCTION_SCOPE) - : NewScope(top_scope_, FUNCTION_SCOPE); + ? NewScope(top_scope_->DeclarationScope(), Scope::FUNCTION_SCOPE, false) + : NewScope(top_scope_, Scope::FUNCTION_SCOPE, inside_with()); ZoneList<Statement*>* body = new(zone()) ZoneList<Statement*>(8); int materialized_literal_count; int expected_property_count; + int start_pos; + int end_pos; bool only_simple_this_property_assignments; Handle<FixedArray> this_property_assignments; bool has_duplicate_parameters = false; @@ -3911,7 +3732,7 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name, // FormalParameterList :: // '(' (Identifier)*[','] ')' Expect(Token::LPAREN, CHECK_OK); - scope->set_start_position(scanner().location().beg_pos); + start_pos = scanner().location().beg_pos; Scanner::Location name_loc = Scanner::Location::invalid(); Scanner::Location dupe_loc = Scanner::Location::invalid(); Scanner::Location reserved_loc = Scanner::Location::invalid(); @@ -3957,21 +3778,13 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name, // future we can change the AST to only refer to VariableProxies // instead of Variables and Proxis as is the case now. if (type == FunctionLiteral::NAMED_EXPRESSION) { - VariableMode fvar_mode; - Token::Value fvar_init_op; - if (harmony_scoping_) { - fvar_mode = CONST_HARMONY; - fvar_init_op = Token::INIT_CONST_HARMONY; - } else { - fvar_mode = CONST; - fvar_init_op = Token::INIT_CONST; - } - Variable* fvar = top_scope_->DeclareFunctionVar(function_name, fvar_mode); - VariableProxy* fproxy = top_scope_->NewUnresolved(function_name); + Variable* fvar = top_scope_->DeclareFunctionVar(function_name); + VariableProxy* fproxy = + top_scope_->NewUnresolved(function_name, inside_with()); fproxy->BindTo(fvar); body->Add(new(zone()) ExpressionStatement( new(zone()) Assignment(isolate(), - fvar_init_op, + Token::INIT_CONST, fproxy, new(zone()) ThisFunction(isolate()), RelocInfo::kNoPosition))); @@ -3995,18 +3808,18 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name, // compile after all. is_lazily_compiled = false; } else { - scope->set_end_position(entry.end_pos()); - if (scope->end_position() <= function_block_pos) { + end_pos = entry.end_pos(); + if (end_pos <= function_block_pos) { // End position greater than end of stream is safe, and hard to check. ReportInvalidPreparseData(function_name, CHECK_OK); } isolate()->counters()->total_preparse_skipped()->Increment( - scope->end_position() - function_block_pos); + end_pos - function_block_pos); // Seek to position just before terminal '}'. - scanner().SeekForward(scope->end_position() - 1); + scanner().SeekForward(end_pos - 1); materialized_literal_count = entry.literal_count(); expected_property_count = entry.property_count(); - if (entry.strict_mode()) top_scope_->SetStrictModeFlag(kStrictMode); + if (entry.strict_mode()) top_scope_->EnableStrictMode(); only_simple_this_property_assignments = false; this_property_assignments = isolate()->factory()->empty_fixed_array(); Expect(Token::RBRACE, CHECK_OK); @@ -4023,13 +3836,12 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name, this_property_assignments = lexical_scope.this_property_assignments(); Expect(Token::RBRACE, CHECK_OK); - scope->set_end_position(scanner().location().end_pos); + end_pos = scanner().location().end_pos; } // Validate strict mode. if (top_scope_->is_strict_mode()) { if (IsEvalOrArguments(function_name)) { - int start_pos = scope->start_position(); int position = function_token_position != RelocInfo::kNoPosition ? function_token_position : (start_pos > 0 ? start_pos - 1 : start_pos); @@ -4052,7 +3864,6 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name, return NULL; } if (name_is_strict_reserved) { - int start_pos = scope->start_position(); int position = function_token_position != RelocInfo::kNoPosition ? function_token_position : (start_pos > 0 ? start_pos - 1 : start_pos); @@ -4068,9 +3879,7 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name, *ok = false; return NULL; } - CheckOctalLiteral(scope->start_position(), - scope->end_position(), - CHECK_OK); + CheckOctalLiteral(start_pos, end_pos, CHECK_OK); } } @@ -4088,6 +3897,8 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name, only_simple_this_property_assignments, this_property_assignments, num_parameters, + start_pos, + end_pos, type, has_duplicate_parameters); function_literal->set_function_token_position(function_token_position); @@ -5308,16 +5119,17 @@ int ScriptDataImpl::ReadNumber(byte** source) { // Create a Scanner for the preparser to use as input, and preparse the source. static ScriptDataImpl* DoPreParse(UC16CharacterStream* source, - int flags, - ParserRecorder* recorder) { + bool allow_lazy, + ParserRecorder* recorder, + bool harmony_scoping) { Isolate* isolate = Isolate::Current(); JavaScriptScanner scanner(isolate->unicode_cache()); - scanner.SetHarmonyScoping((flags & kHarmonyScoping) != 0); + scanner.SetHarmonyScoping(harmony_scoping); scanner.Initialize(source); intptr_t stack_limit = isolate->stack_guard()->real_climit(); if (!preparser::PreParser::PreParseProgram(&scanner, recorder, - flags, + allow_lazy, stack_limit)) { isolate->StackOverflow(); return NULL; @@ -5334,28 +5146,25 @@ static ScriptDataImpl* DoPreParse(UC16CharacterStream* source, // even if the preparser data is only used once. ScriptDataImpl* ParserApi::PartialPreParse(UC16CharacterStream* source, v8::Extension* extension, - int flags) { + bool harmony_scoping) { bool allow_lazy = FLAG_lazy && (extension == NULL); if (!allow_lazy) { // Partial preparsing is only about lazily compiled functions. // If we don't allow lazy compilation, the log data will be empty. return NULL; } - flags |= kAllowLazy; PartialParserRecorder recorder; - return DoPreParse(source, flags, &recorder); + return DoPreParse(source, allow_lazy, &recorder, harmony_scoping); } ScriptDataImpl* ParserApi::PreParse(UC16CharacterStream* source, v8::Extension* extension, - int flags) { + bool harmony_scoping) { Handle<Script> no_script; - if (FLAG_lazy && (extension == NULL)) { - flags |= kAllowLazy; - } + bool allow_lazy = FLAG_lazy && (extension == NULL); CompleteParserRecorder recorder; - return DoPreParse(source, flags, &recorder); + return DoPreParse(source, allow_lazy, &recorder, harmony_scoping); } @@ -5387,16 +5196,13 @@ bool ParserApi::Parse(CompilationInfo* info) { Handle<Script> script = info->script(); bool harmony_scoping = !info->is_native() && FLAG_harmony_scoping; if (info->is_lazy()) { - bool allow_natives_syntax = - FLAG_allow_natives_syntax || - info->is_native(); - Parser parser(script, allow_natives_syntax, NULL, NULL); + Parser parser(script, true, NULL, NULL); parser.SetHarmonyScoping(harmony_scoping); result = parser.ParseLazy(info); } else { // Whether we allow %identifier(..) syntax. bool allow_natives_syntax = - info->is_native() || FLAG_allow_natives_syntax; + info->allows_natives_syntax() || FLAG_allow_natives_syntax; ScriptDataImpl* pre_data = info->pre_parse_data(); Parser parser(script, allow_natives_syntax, @@ -5418,7 +5224,7 @@ bool ParserApi::Parse(CompilationInfo* info) { Handle<String> source = Handle<String>(String::cast(script->source())); result = parser.ParseProgram(source, info->is_global(), - info->strict_mode_flag()); + info->StrictMode()); } } info->SetFunction(result); diff --git a/deps/v8/src/parser.h b/deps/v8/src/parser.h index 268b09474..359bb3848 100644 --- a/deps/v8/src/parser.h +++ b/deps/v8/src/parser.h @@ -33,7 +33,6 @@ #include "preparse-data-format.h" #include "preparse-data.h" #include "scopes.h" -#include "preparser.h" namespace v8 { namespace internal { @@ -44,7 +43,6 @@ class ParserLog; class PositionStack; class Target; class LexicalScope; -class SaveScope; template <typename T> class ZoneListWrapper; @@ -166,13 +164,13 @@ class ParserApi { // Generic preparser generating full preparse data. static ScriptDataImpl* PreParse(UC16CharacterStream* source, v8::Extension* extension, - int flags); + bool harmony_scoping); // Preparser that only does preprocessing that makes sense if only used // immediately after. static ScriptDataImpl* PartialPreParse(UC16CharacterStream* source, v8::Extension* extension, - int flags); + bool harmony_scoping); }; // ---------------------------------------------------------------------------- @@ -461,12 +459,6 @@ class Parser { kForStatement }; - // If a list of variable declarations includes any initializers. - enum VariableDeclarationProperties { - kHasInitializers, - kHasNoInitializers - }; - Isolate* isolate() { return isolate_; } Zone* zone() { return isolate_->zone(); } @@ -481,7 +473,7 @@ class Parser { void ReportInvalidPreparseData(Handle<String> name, bool* ok); void ReportMessage(const char* message, Vector<const char*> args); - bool inside_with() const { return top_scope_->inside_with(); } + bool inside_with() const { return with_nesting_level_ > 0; } JavaScriptScanner& scanner() { return scanner_; } Mode mode() const { return mode_; } ScriptDataImpl* pre_data() const { return pre_data_; } @@ -500,10 +492,10 @@ class Parser { Statement* ParseFunctionDeclaration(bool* ok); Statement* ParseNativeDeclaration(bool* ok); Block* ParseBlock(ZoneStringList* labels, bool* ok); + Block* ParseScopedBlock(ZoneStringList* labels, bool* ok); Block* ParseVariableStatement(VariableDeclarationContext var_context, bool* ok); Block* ParseVariableDeclarations(VariableDeclarationContext var_context, - VariableDeclarationProperties* decl_props, Handle<String>* out, bool* ok); Statement* ParseExpressionOrLabelledStatement(ZoneStringList* labels, @@ -523,9 +515,6 @@ class Parser { TryStatement* ParseTryStatement(bool* ok); DebuggerStatement* ParseDebuggerStatement(bool* ok); - // Support for hamony block scoped bindings. - Block* ParseScopedBlock(ZoneStringList* labels, bool* ok); - Expression* ParseExpression(bool accept_IN, bool* ok); Expression* ParseAssignmentExpression(bool accept_IN, bool* ok); Expression* ParseConditionalExpression(bool accept_IN, bool* ok); @@ -680,7 +669,7 @@ class Parser { return ∅ } - Scope* NewScope(Scope* parent, ScopeType type); + Scope* NewScope(Scope* parent, Scope::Type type, bool inside_with); Handle<String> LookupSymbol(int symbol_id); @@ -725,6 +714,7 @@ class Parser { JavaScriptScanner scanner_; Scope* top_scope_; + int with_nesting_level_; LexicalScope* lexical_scope_; Mode mode_; @@ -744,7 +734,6 @@ class Parser { bool harmony_scoping_; friend class LexicalScope; - friend class SaveScope; }; diff --git a/deps/v8/src/preparser-api.cc b/deps/v8/src/preparser-api.cc index 25c7a823c..899489e25 100644 --- a/deps/v8/src/preparser-api.cc +++ b/deps/v8/src/preparser-api.cc @@ -188,7 +188,7 @@ PreParserData Preparse(UnicodeInputStream* input, size_t max_stack) { preparser::PreParser::PreParseResult result = preparser::PreParser::PreParseProgram(&scanner, &recorder, - internal::kAllowLazy, + true, stack_limit); if (result == preparser::PreParser::kPreParseStackOverflow) { return PreParserData::StackOverflow(); diff --git a/deps/v8/src/preparser.cc b/deps/v8/src/preparser.cc index 3313658ef..9f8e1eecc 100644 --- a/deps/v8/src/preparser.cc +++ b/deps/v8/src/preparser.cc @@ -125,13 +125,11 @@ PreParser::Statement PreParser::ParseSourceElement(bool* ok) { // In harmony mode we allow additionally the following productions // SourceElement: // LetDeclaration - // ConstDeclaration switch (peek()) { case i::Token::FUNCTION: return ParseFunctionDeclaration(ok); case i::Token::LET: - case i::Token::CONST: return ParseVariableStatement(kSourceElement, ok); default: return ParseStatement(ok); @@ -242,7 +240,7 @@ PreParser::Statement PreParser::ParseStatement(bool* ok) { i::Scanner::Location start_location = scanner_->peek_location(); Statement statement = ParseFunctionDeclaration(CHECK_OK); i::Scanner::Location end_location = scanner_->location(); - if (strict_mode() || harmony_scoping_) { + if (strict_mode()) { ReportMessageAt(start_location.beg_pos, end_location.end_pos, "strict_function", NULL); *ok = false; @@ -314,7 +312,6 @@ PreParser::Statement PreParser::ParseVariableStatement( Statement result = ParseVariableDeclarations(var_context, NULL, - NULL, CHECK_OK); ExpectSemicolon(CHECK_OK); return result; @@ -328,37 +325,15 @@ PreParser::Statement PreParser::ParseVariableStatement( // of 'for-in' loops. PreParser::Statement PreParser::ParseVariableDeclarations( VariableDeclarationContext var_context, - VariableDeclarationProperties* decl_props, int* num_decl, bool* ok) { // VariableDeclarations :: // ('var' | 'const') (Identifier ('=' AssignmentExpression)?)+[','] - // - // The ES6 Draft Rev3 specifies the following grammar for const declarations - // - // ConstDeclaration :: - // const ConstBinding (',' ConstBinding)* ';' - // ConstBinding :: - // Identifier '=' AssignmentExpression - // - // TODO(ES6): - // ConstBinding :: - // BindingPattern '=' AssignmentExpression - bool require_initializer = false; + if (peek() == i::Token::VAR) { Consume(i::Token::VAR); } else if (peek() == i::Token::CONST) { - if (harmony_scoping_) { - if (var_context != kSourceElement && - var_context != kForStatement) { - i::Scanner::Location location = scanner_->peek_location(); - ReportMessageAt(location.beg_pos, location.end_pos, - "unprotected_const", NULL); - *ok = false; - return Statement::Default(); - } - require_initializer = true; - } else if (strict_mode()) { + if (strict_mode()) { i::Scanner::Location location = scanner_->peek_location(); ReportMessageAt(location, "strict_const", NULL); *ok = false; @@ -397,10 +372,9 @@ PreParser::Statement PreParser::ParseVariableDeclarations( return Statement::Default(); } nvars++; - if (peek() == i::Token::ASSIGN || require_initializer) { + if (peek() == i::Token::ASSIGN) { Expect(i::Token::ASSIGN, CHECK_OK); ParseAssignmentExpression(var_context != kForStatement, CHECK_OK); - if (decl_props != NULL) *decl_props = kHasInitializers; } } while (peek() == i::Token::COMMA); @@ -595,14 +569,9 @@ PreParser::Statement PreParser::ParseForStatement(bool* ok) { if (peek() != i::Token::SEMICOLON) { if (peek() == i::Token::VAR || peek() == i::Token::CONST || peek() == i::Token::LET) { - bool is_let = peek() == i::Token::LET; int decl_count; - VariableDeclarationProperties decl_props = kHasNoInitializers; - ParseVariableDeclarations( - kForStatement, &decl_props, &decl_count, CHECK_OK); - bool accept_IN = decl_count == 1 && - !(is_let && decl_props == kHasInitializers); - if (peek() == i::Token::IN && accept_IN) { + ParseVariableDeclarations(kForStatement, &decl_count, CHECK_OK); + if (peek() == i::Token::IN && decl_count == 1) { Expect(i::Token::IN, CHECK_OK); ParseExpression(true, CHECK_OK); Expect(i::Token::RPAREN, CHECK_OK); @@ -1384,11 +1353,8 @@ PreParser::Expression PreParser::ParseFunctionLiteral(bool* ok) { PreParser::Expression PreParser::ParseV8Intrinsic(bool* ok) { // CallRuntime :: // '%' Identifier Arguments + Expect(i::Token::MOD, CHECK_OK); - if (!allow_natives_syntax_) { - *ok = false; - return Expression::Default(); - } ParseIdentifier(CHECK_OK); ParseArguments(ok); diff --git a/deps/v8/src/preparser.h b/deps/v8/src/preparser.h index 6a0b97a56..cb1d5fb4e 100644 --- a/deps/v8/src/preparser.h +++ b/deps/v8/src/preparser.h @@ -118,12 +118,9 @@ class PreParser { // during parsing. static PreParseResult PreParseProgram(i::JavaScriptScanner* scanner, i::ParserRecorder* log, - int flags, + bool allow_lazy, uintptr_t stack_limit) { - bool allow_lazy = (flags & i::kAllowLazy) != 0; - bool allow_natives_syntax = (flags & i::kAllowNativesSyntax) != 0; - return PreParser(scanner, log, stack_limit, - allow_lazy, allow_natives_syntax).PreParse(); + return PreParser(scanner, log, stack_limit, allow_lazy).PreParse(); } private: @@ -182,12 +179,6 @@ class PreParser { kForStatement }; - // If a list of variable declarations includes any initializers. - enum VariableDeclarationProperties { - kHasInitializers, - kHasNoInitializers - }; - class Expression; class Identifier { @@ -408,16 +399,6 @@ class PreParser { typedef int Arguments; - // The Strict Mode (ECMA-262 5th edition, 4.2.2). - enum StrictModeFlag { - kNonStrictMode, - kStrictMode, - // This value is never used, but is needed to prevent GCC 4.5 from failing - // to compile when we assert that a flag is either kNonStrictMode or - // kStrictMode. - kInvalidStrictFlag - }; - class Scope { public: Scope(Scope** variable, ScopeType type) @@ -427,8 +408,7 @@ class PreParser { materialized_literal_count_(0), expected_properties_(0), with_nesting_count_(0), - strict_mode_flag_((prev_ != NULL) ? prev_->strict_mode_flag() - : kNonStrictMode) { + strict_((prev_ != NULL) && prev_->is_strict()) { *variable = this; } ~Scope() { *variable_ = prev_; } @@ -438,13 +418,8 @@ class PreParser { int expected_properties() { return expected_properties_; } int materialized_literal_count() { return materialized_literal_count_; } bool IsInsideWith() { return with_nesting_count_ != 0; } - bool is_strict_mode() { return strict_mode_flag_ == kStrictMode; } - StrictModeFlag strict_mode_flag() { - return strict_mode_flag_; - } - void set_strict_mode_flag(StrictModeFlag strict_mode_flag) { - strict_mode_flag_ = strict_mode_flag; - } + bool is_strict() { return strict_; } + void set_strict() { strict_ = true; } void EnterWith() { with_nesting_count_++; } void LeaveWith() { with_nesting_count_--; } @@ -455,15 +430,14 @@ class PreParser { int materialized_literal_count_; int expected_properties_; int with_nesting_count_; - StrictModeFlag strict_mode_flag_; + bool strict_; }; // Private constructor only used in PreParseProgram. PreParser(i::JavaScriptScanner* scanner, i::ParserRecorder* log, uintptr_t stack_limit, - bool allow_lazy, - bool allow_natives_syntax) + bool allow_lazy) : scanner_(scanner), log_(log), scope_(NULL), @@ -471,8 +445,7 @@ class PreParser { strict_mode_violation_location_(i::Scanner::Location::invalid()), strict_mode_violation_type_(NULL), stack_overflow_(false), - allow_lazy_(allow_lazy), - allow_natives_syntax_(allow_natives_syntax), + allow_lazy_(true), parenthesized_function_(false), harmony_scoping_(scanner->HarmonyScoping()) { } @@ -486,7 +459,7 @@ class PreParser { if (stack_overflow_) return kPreParseStackOverflow; if (!ok) { ReportUnexpectedToken(scanner_->current_token()); - } else if (scope_->is_strict_mode()) { + } else if (scope_->is_strict()) { CheckOctalLiteral(start_position, scanner_->location().end_pos, &ok); } return kPreParseSuccess; @@ -520,7 +493,6 @@ class PreParser { Statement ParseVariableStatement(VariableDeclarationContext var_context, bool* ok); Statement ParseVariableDeclarations(VariableDeclarationContext var_context, - VariableDeclarationProperties* decl_props, int* num_decl, bool* ok); Statement ParseExpressionOrLabelledStatement(bool* ok); @@ -591,10 +563,10 @@ class PreParser { bool peek_any_identifier(); void set_strict_mode() { - scope_->set_strict_mode_flag(kStrictMode); + scope_->set_strict(); } - bool strict_mode() { return scope_->strict_mode_flag() == kStrictMode; } + bool strict_mode() { return scope_->is_strict(); } void Consume(i::Token::Value token) { Next(); } @@ -635,7 +607,6 @@ class PreParser { const char* strict_mode_violation_type_; bool stack_overflow_; bool allow_lazy_; - bool allow_natives_syntax_; bool parenthesized_function_; bool harmony_scoping_; }; diff --git a/deps/v8/src/profile-generator.cc b/deps/v8/src/profile-generator.cc index 9812c26e9..bae35c89e 100644 --- a/deps/v8/src/profile-generator.cc +++ b/deps/v8/src/profile-generator.cc @@ -1930,11 +1930,9 @@ void V8HeapExplorer::ExtractReferences(HeapObject* obj) { SetInternalReference(js_fun, entry, "context", js_fun->unchecked_context(), JSFunction::kContextOffset); - TagObject(js_fun->literals_or_bindings(), - "(function literals_or_bindings)"); + TagObject(js_fun->literals(), "(function literals)"); SetInternalReference(js_fun, entry, - "literals_or_bindings", - js_fun->literals_or_bindings(), + "literals", js_fun->literals(), JSFunction::kLiteralsOffset); } TagObject(js_obj->properties(), "(object properties)"); @@ -1951,10 +1949,6 @@ void V8HeapExplorer::ExtractReferences(HeapObject* obj) { SetInternalReference(obj, entry, 1, cs->first()); SetInternalReference(obj, entry, 2, cs->second()); } - if (obj->IsSlicedString()) { - SlicedString* ss = SlicedString::cast(obj); - SetInternalReference(obj, entry, "parent", ss->parent()); - } extract_indexed_refs = false; } else if (obj->IsGlobalContext()) { Context* context = Context::cast(obj); @@ -2170,16 +2164,15 @@ void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, String* V8HeapExplorer::GetConstructorName(JSObject* object) { - Heap* heap = object->GetHeap(); - if (object->IsJSFunction()) return heap->closure_symbol(); + if (object->IsJSFunction()) return HEAP->closure_symbol(); String* constructor_name = object->constructor_name(); - if (constructor_name == heap->Object_symbol()) { + if (constructor_name == HEAP->Object_symbol()) { // Look up an immediate "constructor" property, if it is a function, // return its name. This is for instances of binding objects, which // have prototype constructor type "Object". Object* constructor_prop = NULL; - LookupResult result(heap->isolate()); - object->LocalLookupRealNamedProperty(heap->constructor_symbol(), &result); + LookupResult result; + object->LocalLookupRealNamedProperty(HEAP->constructor_symbol(), &result); if (result.IsProperty()) { constructor_prop = result.GetLazyValue(); } diff --git a/deps/v8/src/property.cc b/deps/v8/src/property.cc index 6e043e268..7cc2df5a3 100644 --- a/deps/v8/src/property.cc +++ b/deps/v8/src/property.cc @@ -31,15 +31,6 @@ namespace v8 { namespace internal { -void LookupResult::Iterate(ObjectVisitor* visitor) { - LookupResult* current = this; // Could be NULL. - while (current != NULL) { - visitor->VisitPointer(BitCast<Object**>(¤t->holder_)); - current = current->next_; - } -} - - #ifdef OBJECT_PRINT void LookupResult::Print(FILE* out) { if (!IsFound()) { diff --git a/deps/v8/src/property.h b/deps/v8/src/property.h index ffea41e66..ee2e8c844 100644 --- a/deps/v8/src/property.h +++ b/deps/v8/src/property.h @@ -164,20 +164,10 @@ class CallbacksDescriptor: public Descriptor { class LookupResult BASE_EMBEDDED { public: - explicit LookupResult(Isolate* isolate) - : isolate_(isolate), - next_(isolate->top_lookup_result()), - lookup_type_(NOT_FOUND), - holder_(NULL), + LookupResult() + : lookup_type_(NOT_FOUND), cacheable_(true), - details_(NONE, NORMAL) { - isolate->SetTopLookupResult(this); - } - - ~LookupResult() { - ASSERT(isolate_->top_lookup_result() == this); - isolate_->SetTopLookupResult(next_); - } + details_(NONE, NORMAL) {} void DescriptorResult(JSObject* holder, PropertyDetails details, int number) { lookup_type_ = DESCRIPTOR_TYPE; @@ -225,7 +215,6 @@ class LookupResult BASE_EMBEDDED { void NotFound() { lookup_type_ = NOT_FOUND; - holder_ = NULL; } JSObject* holder() { @@ -357,12 +346,7 @@ class LookupResult BASE_EMBEDDED { return holder()->GetNormalizedProperty(this); } - void Iterate(ObjectVisitor* visitor); - private: - Isolate* isolate_; - LookupResult* next_; - // Where did we find the result; enum { NOT_FOUND, diff --git a/deps/v8/src/proxy.js b/deps/v8/src/proxy.js index 3cd467faf..a51f09ae5 100644 --- a/deps/v8/src/proxy.js +++ b/deps/v8/src/proxy.js @@ -32,10 +32,7 @@ var $Proxy = global.Proxy $Proxy.create = function(handler, proto) { if (!IS_SPEC_OBJECT(handler)) throw MakeTypeError("handler_non_object", ["create"]) - if (IS_UNDEFINED(proto)) - proto = null - else if (!(IS_SPEC_OBJECT(proto) || proto === null)) - throw MakeTypeError("proto_non_object", ["create"]) + if (!IS_SPEC_OBJECT(proto)) proto = null // Mozilla does this... return %CreateJSProxy(handler, proto) } @@ -44,20 +41,20 @@ $Proxy.createFunction = function(handler, callTrap, constructTrap) { throw MakeTypeError("handler_non_object", ["create"]) if (!IS_SPEC_FUNCTION(callTrap)) throw MakeTypeError("trap_function_expected", ["createFunction", "call"]) + var construct if (IS_UNDEFINED(constructTrap)) { - constructTrap = DerivedConstructTrap(callTrap) + construct = DerivedConstructTrap(callTrap) } else if (IS_SPEC_FUNCTION(constructTrap)) { - // Make sure the trap receives 'undefined' as this. - var construct = constructTrap - constructTrap = function() { - return %Apply(construct, void 0, arguments, 0, %_ArgumentsLength()); + construct = function() { + // Make sure the trap receives 'undefined' as this. + return %Apply(constructTrap, void 0, arguments, 0, %_ArgumentsLength()); } } else { throw MakeTypeError("trap_function_expected", ["createFunction", "construct"]) } return %CreateJSFunctionProxy( - handler, callTrap, constructTrap, $Function.prototype) + handler, callTrap, construct, $Function.prototype) } @@ -156,32 +153,9 @@ function DerivedKeysTrap() { var enumerableNames = [] for (var i = 0, count = 0; i < names.length; ++i) { var name = names[i] - var desc = this.getOwnPropertyDescriptor(TO_STRING_INLINE(name)) - if (!IS_UNDEFINED(desc) && desc.enumerable) { + if (this.getOwnPropertyDescriptor(TO_STRING_INLINE(name)).enumerable) { enumerableNames[count++] = names[i] } } return enumerableNames } - -function DerivedEnumerateTrap() { - var names = this.getPropertyNames() - var enumerableNames = [] - for (var i = 0, count = 0; i < names.length; ++i) { - var name = names[i] - var desc = this.getPropertyDescriptor(TO_STRING_INLINE(name)) - if (!IS_UNDEFINED(desc) && desc.enumerable) { - enumerableNames[count++] = names[i] - } - } - return enumerableNames -} - -function ProxyEnumerate(proxy) { - var handler = %GetHandler(proxy) - if (IS_UNDEFINED(handler.enumerate)) { - return %Apply(DerivedEnumerateTrap, handler, [], 0, 0) - } else { - return ToStringArray(handler.enumerate(), "enumerate") - } -} diff --git a/deps/v8/src/regexp.js b/deps/v8/src/regexp.js index f373ceb67..0ab86f333 100644 --- a/deps/v8/src/regexp.js +++ b/deps/v8/src/regexp.js @@ -174,6 +174,13 @@ function RegExpExec(string) { ['RegExp.prototype.exec', this]); } + if (%_ArgumentsLength() === 0) { + var regExpInput = LAST_INPUT(lastMatchInfo); + if (IS_UNDEFINED(regExpInput)) { + throw MakeError('no_input_to_regexp', [this]); + } + string = regExpInput; + } string = TO_STRING_INLINE(string); var lastIndex = this.lastIndex; @@ -222,6 +229,14 @@ function RegExpTest(string) { throw MakeTypeError('incompatible_method_receiver', ['RegExp.prototype.test', this]); } + if (%_ArgumentsLength() == 0) { + var regExpInput = LAST_INPUT(lastMatchInfo); + if (IS_UNDEFINED(regExpInput)) { + throw MakeError('no_input_to_regexp', [this]); + } + string = regExpInput; + } + string = TO_STRING_INLINE(string); var lastIndex = this.lastIndex; diff --git a/deps/v8/src/runtime.cc b/deps/v8/src/runtime.cc index 9c23c2c96..e0f507e17 100644 --- a/deps/v8/src/runtime.cc +++ b/deps/v8/src/runtime.cc @@ -432,77 +432,64 @@ static Handle<Object> CreateArrayLiteralBoilerplate( // Create the JSArray. Handle<JSFunction> constructor( JSFunction::GlobalContextFromLiterals(*literals)->array_function()); - Handle<JSArray> object = - Handle<JSArray>::cast(isolate->factory()->NewJSObject(constructor)); - - ElementsKind constant_elements_kind = - static_cast<ElementsKind>(Smi::cast(elements->get(0))->value()); - Handle<FixedArrayBase> constant_elements_values( - FixedArrayBase::cast(elements->get(1))); - - ASSERT(FLAG_smi_only_arrays || constant_elements_kind == FAST_ELEMENTS || - constant_elements_kind == FAST_SMI_ONLY_ELEMENTS); - bool allow_literal_kind_transition = FLAG_smi_only_arrays && - constant_elements_kind > object->GetElementsKind(); - - if (!FLAG_smi_only_arrays && - constant_elements_values->length() > kSmiOnlyLiteralMinimumLength && - constant_elements_kind != object->GetElementsKind()) { - allow_literal_kind_transition = true; - } - - // If the ElementsKind of the constant values of the array literal are less - // specific than the ElementsKind of the boilerplate array object, change the - // boilerplate array object's map to reflect that kind. - if (allow_literal_kind_transition) { - Handle<Map> transitioned_array_map = - isolate->factory()->GetElementsTransitionMap(object, - constant_elements_kind); - object->set_map(*transitioned_array_map); - } - - Handle<FixedArrayBase> copied_elements_values; - if (constant_elements_kind == FAST_DOUBLE_ELEMENTS) { - ASSERT(FLAG_smi_only_arrays); - copied_elements_values = isolate->factory()->CopyFixedDoubleArray( - Handle<FixedDoubleArray>::cast(constant_elements_values)); - } else { - ASSERT(constant_elements_kind == FAST_SMI_ONLY_ELEMENTS || - constant_elements_kind == FAST_ELEMENTS); - const bool is_cow = - (constant_elements_values->map() == - isolate->heap()->fixed_cow_array_map()); - if (is_cow) { - copied_elements_values = constant_elements_values; -#if DEBUG - Handle<FixedArray> fixed_array_values = - Handle<FixedArray>::cast(copied_elements_values); - for (int i = 0; i < fixed_array_values->length(); i++) { - ASSERT(!fixed_array_values->get(i)->IsFixedArray()); + Handle<Object> object = isolate->factory()->NewJSObject(constructor); + + if (elements->length() > kSmiOnlyLiteralMinimumLength) { + Handle<Map> smi_array_map = isolate->factory()->GetElementsTransitionMap( + Handle<JSObject>::cast(object), + FAST_SMI_ONLY_ELEMENTS); + HeapObject::cast(*object)->set_map(*smi_array_map); + } + + const bool is_cow = + (elements->map() == isolate->heap()->fixed_cow_array_map()); + Handle<FixedArray> copied_elements = + is_cow ? elements : isolate->factory()->CopyFixedArray(elements); + + Handle<FixedArray> content = Handle<FixedArray>::cast(copied_elements); + bool has_non_smi = false; + if (is_cow) { + // Copy-on-write arrays must be shallow (and simple). + for (int i = 0; i < content->length(); i++) { + Object* current = content->get(i); + ASSERT(!current->IsFixedArray()); + if (!current->IsSmi() && !current->IsTheHole()) { + has_non_smi = true; } + } +#if DEBUG + for (int i = 0; i < content->length(); i++) { + ASSERT(!content->get(i)->IsFixedArray()); + } #endif - } else { - Handle<FixedArray> fixed_array_values = - Handle<FixedArray>::cast(constant_elements_values); - Handle<FixedArray> fixed_array_values_copy = - isolate->factory()->CopyFixedArray(fixed_array_values); - copied_elements_values = fixed_array_values_copy; - for (int i = 0; i < fixed_array_values->length(); i++) { - Object* current = fixed_array_values->get(i); - if (current->IsFixedArray()) { - // The value contains the constant_properties of a - // simple object or array literal. - Handle<FixedArray> fa(FixedArray::cast(fixed_array_values->get(i))); - Handle<Object> result = - CreateLiteralBoilerplate(isolate, literals, fa); - if (result.is_null()) return result; - fixed_array_values_copy->set(i, *result); + } else { + for (int i = 0; i < content->length(); i++) { + Object* current = content->get(i); + if (current->IsFixedArray()) { + // The value contains the constant_properties of a + // simple object or array literal. + Handle<FixedArray> fa(FixedArray::cast(content->get(i))); + Handle<Object> result = + CreateLiteralBoilerplate(isolate, literals, fa); + if (result.is_null()) return result; + content->set(i, *result); + has_non_smi = true; + } else { + if (!current->IsSmi() && !current->IsTheHole()) { + has_non_smi = true; } } } } - object->set_elements(*copied_elements_values); - object->set_length(Smi::FromInt(copied_elements_values->length())); + + // Set the elements. + Handle<JSArray> js_object(Handle<JSArray>::cast(object)); + isolate->factory()->SetContent(js_object, content); + + if (has_non_smi && js_object->HasFastSmiOnlyElements()) { + isolate->factory()->EnsureCanContainNonSmiElements(js_object); + } + return object; } @@ -717,82 +704,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_Fix) { } -RUNTIME_FUNCTION(MaybeObject*, Runtime_SetInitialize) { - HandleScope scope(isolate); - ASSERT(args.length() == 1); - CONVERT_ARG_CHECKED(JSSet, holder, 0); - Handle<ObjectHashSet> table = isolate->factory()->NewObjectHashSet(0); - holder->set_table(*table); - return *holder; -} - - -RUNTIME_FUNCTION(MaybeObject*, Runtime_SetAdd) { - HandleScope scope(isolate); - ASSERT(args.length() == 2); - CONVERT_ARG_CHECKED(JSSet, holder, 0); - Handle<Object> key(args[1]); - Handle<ObjectHashSet> table(ObjectHashSet::cast(holder->table())); - table = ObjectHashSetAdd(table, key); - holder->set_table(*table); - return isolate->heap()->undefined_symbol(); -} - - -RUNTIME_FUNCTION(MaybeObject*, Runtime_SetHas) { - HandleScope scope(isolate); - ASSERT(args.length() == 2); - CONVERT_ARG_CHECKED(JSSet, holder, 0); - Handle<Object> key(args[1]); - Handle<ObjectHashSet> table(ObjectHashSet::cast(holder->table())); - return isolate->heap()->ToBoolean(table->Contains(*key)); -} - - -RUNTIME_FUNCTION(MaybeObject*, Runtime_SetDelete) { - HandleScope scope(isolate); - ASSERT(args.length() == 2); - CONVERT_ARG_CHECKED(JSSet, holder, 0); - Handle<Object> key(args[1]); - Handle<ObjectHashSet> table(ObjectHashSet::cast(holder->table())); - table = ObjectHashSetRemove(table, key); - holder->set_table(*table); - return isolate->heap()->undefined_symbol(); -} - - -RUNTIME_FUNCTION(MaybeObject*, Runtime_MapInitialize) { - HandleScope scope(isolate); - ASSERT(args.length() == 1); - CONVERT_ARG_CHECKED(JSMap, holder, 0); - Handle<ObjectHashTable> table = isolate->factory()->NewObjectHashTable(0); - holder->set_table(*table); - return *holder; -} - - -RUNTIME_FUNCTION(MaybeObject*, Runtime_MapGet) { - HandleScope scope(isolate); - ASSERT(args.length() == 2); - CONVERT_ARG_CHECKED(JSMap, holder, 0); - Handle<Object> key(args[1]); - return ObjectHashTable::cast(holder->table())->Lookup(*key); -} - - -RUNTIME_FUNCTION(MaybeObject*, Runtime_MapSet) { - HandleScope scope(isolate); - ASSERT(args.length() == 3); - CONVERT_ARG_CHECKED(JSMap, holder, 0); - Handle<Object> key(args[1]); - Handle<Object> value(args[2]); - Handle<ObjectHashTable> table(ObjectHashTable::cast(holder->table())); - Handle<ObjectHashTable> new_table = PutIntoObjectHashTable(table, key, value); - holder->set_table(*new_table); - return *value; -} - - RUNTIME_FUNCTION(MaybeObject*, Runtime_WeakMapInitialize) { HandleScope scope(isolate); ASSERT(args.length() == 1); @@ -1050,7 +961,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOwnProperty) { HandleScope scope(isolate); Handle<FixedArray> elms = isolate->factory()->NewFixedArray(DESCRIPTOR_SIZE); Handle<JSArray> desc = isolate->factory()->NewJSArrayWithElements(elms); - LookupResult result(isolate); + LookupResult result; CONVERT_ARG_CHECKED(JSObject, obj, 0); CONVERT_ARG_CHECKED(String, name, 1); @@ -1081,7 +992,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOwnProperty) { case JSObject::INTERCEPTED_ELEMENT: case JSObject::FAST_ELEMENT: { elms->set(IS_ACCESSOR_INDEX, heap->false_value()); - Handle<Object> value = Object::GetElement(obj, index); + Handle<Object> value = GetElement(obj, index); RETURN_IF_EMPTY_HANDLE(isolate, value); elms->set(VALUE_INDEX, *value); elms->set(WRITABLE_INDEX, heap->true_value()); @@ -1125,7 +1036,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOwnProperty) { case NORMAL: { // This is a data property. elms->set(IS_ACCESSOR_INDEX, heap->false_value()); - Handle<Object> value = Object::GetElement(obj, index); + Handle<Object> value = GetElement(obj, index); ASSERT(!value.is_null()); elms->set(VALUE_INDEX, *value); elms->set(WRITABLE_INDEX, heap->ToBoolean(!details.IsReadOnly())); @@ -1329,7 +1240,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeclareGlobals) { if (value->IsUndefined() || is_const_property) { // Lookup the property in the global object, and don't set the // value of the variable if the property is already there. - LookupResult lookup(isolate); + LookupResult lookup; global->Lookup(*name, &lookup); if (lookup.IsProperty()) { // We found an existing property. Unless it was an interceptor @@ -1356,7 +1267,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeclareGlobals) { value = function; } - LookupResult lookup(isolate); + LookupResult lookup; global->LocalLookup(*name, &lookup); // Compute the property attributes. According to ECMA-262, section @@ -1364,10 +1275,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeclareGlobals) { // non-deletable. However, neither SpiderMonkey nor KJS creates the // property as read-only, so we don't either. int attr = NONE; - if (!DeclareGlobalsEvalFlag::decode(flags)) { + if ((flags & kDeclareGlobalsEvalFlag) == 0) { attr |= DONT_DELETE; } - bool is_native = DeclareGlobalsNativeFlag::decode(flags); + bool is_native = (flags & kDeclareGlobalsNativeFlag) != 0; if (is_const_property || (is_native && is_function_declaration)) { attr |= READ_ONLY; } @@ -1392,7 +1303,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeclareGlobals) { value, attributes)); } else { - StrictModeFlag strict_mode = DeclareGlobalsStrictModeFlag::decode(flags); + StrictModeFlag strict_mode = + ((flags & kDeclareGlobalsStrictModeFlag) != 0) ? kStrictMode + : kNonStrictMode; RETURN_IF_EMPTY_HANDLE(isolate, SetProperty(global, name, @@ -1486,7 +1399,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeclareContextSlot) { // not real JSObjects. if (initial_value->IsTheHole() && !object->IsJSContextExtensionObject()) { - LookupResult lookup(isolate); + LookupResult lookup; object->Lookup(*name, &lookup); if (lookup.IsProperty() && (lookup.type() == CALLBACKS)) { return ThrowRedeclarationError(isolate, "const", name); @@ -1530,7 +1443,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InitializeVarGlobal) { // Note that objects can have hidden prototypes, so we need to traverse // the whole chain of hidden prototypes to do a 'local' lookup. Object* object = global; - LookupResult lookup(isolate); + LookupResult lookup; while (object->IsJSObject() && JSObject::cast(object)->map()->is_hidden_prototype()) { JSObject* raw_holder = JSObject::cast(object); @@ -1584,7 +1497,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InitializeConstGlobal) { // add it as a local property even in case of callbacks in the // prototype chain (this rules out using SetProperty). // We use SetLocalPropertyIgnoreAttributes instead - LookupResult lookup(isolate); + LookupResult lookup; global->LocalLookup(*name, &lookup); if (!lookup.IsProperty()) { return global->SetLocalPropertyIgnoreAttributes(*name, @@ -1701,7 +1614,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InitializeConstContextSlot) { // This is the property that was introduced by the const declaration. // Set it if it hasn't been set before. NOTE: We cannot use // GetProperty() to get the current value as it 'unholes' the value. - LookupResult lookup(isolate); + LookupResult lookup; object->LocalLookupRealNamedProperty(*name, &lookup); ASSERT(lookup.IsProperty()); // the property was declared ASSERT(lookup.IsReadOnly()); // and it was declared as read-only @@ -1750,6 +1663,19 @@ RUNTIME_FUNCTION(MaybeObject*, } +RUNTIME_FUNCTION(MaybeObject*, Runtime_NonSmiElementStored) { + ASSERT(args.length() == 1); + CONVERT_ARG_CHECKED(JSObject, object, 0); + if (object->HasFastSmiOnlyElements()) { + MaybeObject* maybe_map = object->GetElementsTransitionMap(FAST_ELEMENTS); + Map* map; + if (!maybe_map->To<Map>(&map)) return maybe_map; + object->set_map(Map::cast(map)); + } + return *object; +} + + RUNTIME_FUNCTION(MaybeObject*, Runtime_RegExpExec) { HandleScope scope(isolate); ASSERT(args.length() == 4); @@ -2004,6 +1930,15 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionMarkNameShouldPrintAsAnonymous) { } +RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetBound) { + HandleScope scope(isolate); + ASSERT(args.length() == 1); + + CONVERT_CHECKED(JSFunction, fun, args[0]); + fun->shared()->set_bound(true); + return isolate->heap()->undefined_value(); +} + RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionRemovePrototype) { NoHandleAllocation ha; ASSERT(args.length() == 1); @@ -2082,6 +2017,24 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetLength) { } +// Creates a local, readonly, property called length with the correct +// length (when read by the user). This effectively overwrites the +// interceptor used to normally provide the length. +RUNTIME_FUNCTION(MaybeObject*, Runtime_BoundFunctionSetLength) { + NoHandleAllocation ha; + ASSERT(args.length() == 2); + CONVERT_CHECKED(JSFunction, fun, args[0]); + CONVERT_CHECKED(Smi, length, args[1]); + MaybeObject* maybe_name = + isolate->heap()->AllocateStringFromAscii(CStrVector("length")); + String* name; + if (!maybe_name->To(&name)) return maybe_name; + PropertyAttributes attr = + static_cast<PropertyAttributes>(DONT_DELETE | DONT_ENUM | READ_ONLY); + return fun->AddProperty(name, length, attr, kNonStrictMode); +} + + RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetPrototype) { NoHandleAllocation ha; ASSERT(args.length() == 2); @@ -2184,12 +2137,13 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetCode) { Handle<JSFunction> fun = Handle<JSFunction>::cast(code); Handle<SharedFunctionInfo> shared(fun->shared()); - if (!SharedFunctionInfo::EnsureCompiled(shared, KEEP_EXCEPTION)) { + if (!EnsureCompiled(shared, KEEP_EXCEPTION)) { return Failure::Exception(); } // Since we don't store the source for this we should never // optimize this. shared->code()->set_optimizable(false); + // Set the code, scope info, formal parameter count, // and the length of the target function. target->shared()->set_code(shared->code()); @@ -4115,6 +4069,11 @@ MaybeObject* Runtime::GetElementOrCharAt(Isolate* isolate, return prototype->GetElement(index); } + return GetElement(object, index); +} + + +MaybeObject* Runtime::GetElement(Handle<Object> object, uint32_t index) { return object->GetElement(index); } @@ -4203,7 +4162,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_KeyedGetProperty) { return value->IsTheHole() ? isolate->heap()->undefined_value() : value; } // Lookup cache miss. Perform lookup and update the cache if appropriate. - LookupResult result(isolate); + LookupResult result; receiver->LocalLookup(key, &result); if (result.IsProperty() && result.type() == FIELD) { int offset = result.GetFieldIndex(); @@ -4258,7 +4217,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DefineOrRedefineAccessorProperty) { int unchecked = flag_attr->value(); RUNTIME_ASSERT((unchecked & ~(READ_ONLY | DONT_ENUM | DONT_DELETE)) == 0); RUNTIME_ASSERT(!obj->IsNull()); - LookupResult result(isolate); + LookupResult result; obj->LocalLookupRealNamedProperty(name, &result); PropertyAttributes attr = static_cast<PropertyAttributes>(unchecked); @@ -4300,11 +4259,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DefineOrRedefineDataProperty) { uint32_t index; bool is_element = name->AsArrayIndex(&index); - // Special case for elements if any of the flags might be involved. + // Special case for elements if any of the flags are true. // If elements are in fast case we always implicitly assume that: // DONT_DELETE: false, DONT_ENUM: false, READ_ONLY: false. - if (is_element && (attr != NONE || - js_object->HasLocalElement(index) == JSObject::DICTIONARY_ELEMENT)) { + if (((unchecked & (DONT_DELETE | DONT_ENUM | READ_ONLY)) != 0) && + is_element) { // Normalize the elements to enable attributes on the property. if (js_object->IsJSGlobalProxy()) { // We do not need to do access checks here since these has already @@ -4342,7 +4301,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DefineOrRedefineDataProperty) { return *obj_value; } - LookupResult result(isolate); + LookupResult result; js_object->LocalLookupRealNamedProperty(*name, &result); // To be compatible with safari we do not change the value on API objects @@ -4609,39 +4568,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetProperty) { } -MaybeObject* TransitionElements(Handle<Object> object, - ElementsKind to_kind, - Isolate* isolate) { - HandleScope scope(isolate); - if (!object->IsJSObject()) return isolate->ThrowIllegalOperation(); - ElementsKind from_kind = - Handle<JSObject>::cast(object)->map()->elements_kind(); - if (Map::IsValidElementsTransition(from_kind, to_kind)) { - Handle<Object> result = - TransitionElementsKind(Handle<JSObject>::cast(object), to_kind); - if (result.is_null()) return isolate->ThrowIllegalOperation(); - return *result; - } - return isolate->ThrowIllegalOperation(); -} - - -RUNTIME_FUNCTION(MaybeObject*, Runtime_TransitionElementsSmiToDouble) { - NoHandleAllocation ha; - RUNTIME_ASSERT(args.length() == 1); - Handle<Object> object = args.at<Object>(0); - return TransitionElements(object, FAST_DOUBLE_ELEMENTS, isolate); -} - - -RUNTIME_FUNCTION(MaybeObject*, Runtime_TransitionElementsDoubleToObject) { - NoHandleAllocation ha; - RUNTIME_ASSERT(args.length() == 1); - Handle<Object> object = args.at<Object>(0); - return TransitionElements(object, FAST_ELEMENTS, isolate); -} - - // Set the native flag on the function. // This is used to decide if we should transform null and undefined // into the global object when doing call and apply. @@ -4825,11 +4751,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_IsPropertyEnumerable) { RUNTIME_FUNCTION(MaybeObject*, Runtime_GetPropertyNames) { HandleScope scope(isolate); ASSERT(args.length() == 1); - CONVERT_ARG_CHECKED(JSReceiver, object, 0); - bool threw = false; - Handle<JSArray> result = GetKeysFor(object, &threw); - if (threw) return Failure::Exception(); - return *result; + CONVERT_ARG_CHECKED(JSObject, object, 0); + return *GetKeysFor(object); } @@ -4841,16 +4764,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetPropertyNames) { RUNTIME_FUNCTION(MaybeObject*, Runtime_GetPropertyNamesFast) { ASSERT(args.length() == 1); - CONVERT_CHECKED(JSReceiver, raw_object, args[0]); + CONVERT_CHECKED(JSObject, raw_object, args[0]); if (raw_object->IsSimpleEnum()) return raw_object->map(); HandleScope scope(isolate); - Handle<JSReceiver> object(raw_object); - bool threw = false; - Handle<FixedArray> content = - GetKeysInFixedArrayFor(object, INCLUDE_PROTOS, &threw); - if (threw) return Failure::Exception(); + Handle<JSObject> object(raw_object); + Handle<FixedArray> content = GetKeysInFixedArrayFor(object, + INCLUDE_PROTOS); // Test again, since cache may have been built by preceding call. if (object->IsSimpleEnum()) return object->map(); @@ -5047,11 +4968,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LocalKeys) { object = Handle<JSObject>::cast(proto); } - bool threw = false; - Handle<FixedArray> contents = - GetKeysInFixedArrayFor(object, LOCAL_ONLY, &threw); - if (threw) return Failure::Exception(); - + Handle<FixedArray> contents = GetKeysInFixedArrayFor(object, + LOCAL_ONLY); // Some fast paths through GetKeysInFixedArrayFor reuse a cached // property array and since the result is mutable we have to create // a fresh clone on each invocation. @@ -7844,21 +7762,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DateYMDFromTime) { int year, month, day; DateYMDFromTime(static_cast<int>(floor(t / 86400000)), year, month, day); - FixedArrayBase* elms_base = FixedArrayBase::cast(res_array->elements()); - RUNTIME_ASSERT(elms_base->length() == 3); - RUNTIME_ASSERT(res_array->GetElementsKind() <= FAST_DOUBLE_ELEMENTS); + RUNTIME_ASSERT(res_array->elements()->map() == + isolate->heap()->fixed_array_map()); + FixedArray* elms = FixedArray::cast(res_array->elements()); + RUNTIME_ASSERT(elms->length() == 3); - if (res_array->HasFastDoubleElements()) { - FixedDoubleArray* elms = FixedDoubleArray::cast(res_array->elements()); - elms->set(0, year); - elms->set(1, month); - elms->set(2, day); - } else { - FixedArray* elms = FixedArray::cast(res_array->elements()); - elms->set(0, Smi::FromInt(year)); - elms->set(1, Smi::FromInt(month)); - elms->set(2, Smi::FromInt(day)); - } + elms->set(0, Smi::FromInt(year)); + elms->set(1, Smi::FromInt(month)); + elms->set(2, Smi::FromInt(day)); return isolate->heap()->undefined_value(); } @@ -8015,11 +7926,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NewClosure) { } -// Find the arguments of the JavaScript function invocation that called -// into C++ code. Collect these in a newly allocated array of handles (possibly -// prefixed by a number of empty handles). -static SmartArrayPointer<Handle<Object> > GetCallerArguments( - int prefix_argc, +static SmartArrayPointer<Handle<Object> > GetNonBoundArguments( + int bound_argc, int* total_argc) { // Find frame containing arguments passed to the caller. JavaScriptFrameIterator it; @@ -8035,12 +7943,12 @@ static SmartArrayPointer<Handle<Object> > GetCallerArguments( inlined_frame_index, &args_slots); - *total_argc = prefix_argc + args_count; + *total_argc = bound_argc + args_count; SmartArrayPointer<Handle<Object> > param_data( NewArray<Handle<Object> >(*total_argc)); for (int i = 0; i < args_count; i++) { Handle<Object> val = args_slots[i].GetValue(); - param_data[prefix_argc + i] = val; + param_data[bound_argc + i] = val; } return param_data; } else { @@ -8048,131 +7956,49 @@ static SmartArrayPointer<Handle<Object> > GetCallerArguments( frame = it.frame(); int args_count = frame->ComputeParametersCount(); - *total_argc = prefix_argc + args_count; + *total_argc = bound_argc + args_count; SmartArrayPointer<Handle<Object> > param_data( NewArray<Handle<Object> >(*total_argc)); for (int i = 0; i < args_count; i++) { Handle<Object> val = Handle<Object>(frame->GetParameter(i)); - param_data[prefix_argc + i] = val; + param_data[bound_argc + i] = val; } return param_data; } } -RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionBindArguments) { - HandleScope scope(isolate); - ASSERT(args.length() == 4); - CONVERT_ARG_CHECKED(JSFunction, bound_function, 0); - RUNTIME_ASSERT(args[3]->IsNumber()); - Handle<Object> bindee = args.at<Object>(1); - - // TODO(lrn): Create bound function in C++ code from premade shared info. - bound_function->shared()->set_bound(true); - // Get all arguments of calling function (Function.prototype.bind). - int argc = 0; - SmartArrayPointer<Handle<Object> > arguments = GetCallerArguments(0, &argc); - // Don't count the this-arg. - if (argc > 0) { - ASSERT(*arguments[0] == args[2]); - argc--; - } else { - ASSERT(args[2]->IsUndefined()); - } - // Initialize array of bindings (function, this, and any existing arguments - // if the function was already bound). - Handle<FixedArray> new_bindings; - int i; - if (bindee->IsJSFunction() && JSFunction::cast(*bindee)->shared()->bound()) { - Handle<FixedArray> old_bindings( - JSFunction::cast(*bindee)->function_bindings()); - new_bindings = - isolate->factory()->NewFixedArray(old_bindings->length() + argc); - bindee = Handle<Object>(old_bindings->get(JSFunction::kBoundFunctionIndex)); - i = 0; - for (int n = old_bindings->length(); i < n; i++) { - new_bindings->set(i, old_bindings->get(i)); - } - } else { - int array_size = JSFunction::kBoundArgumentsStartIndex + argc; - new_bindings = isolate->factory()->NewFixedArray(array_size); - new_bindings->set(JSFunction::kBoundFunctionIndex, *bindee); - new_bindings->set(JSFunction::kBoundThisIndex, args[2]); - i = 2; - } - // Copy arguments, skipping the first which is "this_arg". - for (int j = 0; j < argc; j++, i++) { - new_bindings->set(i, *arguments[j + 1]); - } - new_bindings->set_map(isolate->heap()->fixed_cow_array_map()); - bound_function->set_function_bindings(*new_bindings); - - // Update length. - Handle<String> length_symbol = isolate->factory()->length_symbol(); - Handle<Object> new_length(args.at<Object>(3)); - PropertyAttributes attr = - static_cast<PropertyAttributes>(DONT_DELETE | DONT_ENUM | READ_ONLY); - ForceSetProperty(bound_function, length_symbol, new_length, attr); - return *bound_function; -} - - -RUNTIME_FUNCTION(MaybeObject*, Runtime_BoundFunctionGetBindings) { - HandleScope handles(isolate); - ASSERT(args.length() == 1); - CONVERT_ARG_CHECKED(JSObject, callable, 0); - if (callable->IsJSFunction()) { - Handle<JSFunction> function = Handle<JSFunction>::cast(callable); - if (function->shared()->bound()) { - Handle<FixedArray> bindings(function->function_bindings()); - ASSERT(bindings->map() == isolate->heap()->fixed_cow_array_map()); - return *isolate->factory()->NewJSArrayWithElements(bindings); - } - } - return isolate->heap()->undefined_value(); -} - - RUNTIME_FUNCTION(MaybeObject*, Runtime_NewObjectFromBound) { HandleScope scope(isolate); - ASSERT(args.length() == 1); + ASSERT(args.length() == 2); // First argument is a function to use as a constructor. CONVERT_ARG_CHECKED(JSFunction, function, 0); - RUNTIME_ASSERT(function->shared()->bound()); - - // The argument is a bound function. Extract its bound arguments - // and callable. - Handle<FixedArray> bound_args = - Handle<FixedArray>(FixedArray::cast(function->function_bindings())); - int bound_argc = bound_args->length() - JSFunction::kBoundArgumentsStartIndex; - Handle<Object> bound_function( - JSReceiver::cast(bound_args->get(JSFunction::kBoundFunctionIndex))); - ASSERT(!bound_function->IsJSFunction() || - !Handle<JSFunction>::cast(bound_function)->shared()->bound()); + + // Second argument is either null or an array of bound arguments. + Handle<FixedArray> bound_args; + int bound_argc = 0; + if (!args[1]->IsNull()) { + CONVERT_ARG_CHECKED(JSArray, params, 1); + RUNTIME_ASSERT(params->HasFastTypeElements()); + bound_args = Handle<FixedArray>(FixedArray::cast(params->elements())); + bound_argc = Smi::cast(params->length())->value(); + } int total_argc = 0; SmartArrayPointer<Handle<Object> > param_data = - GetCallerArguments(bound_argc, &total_argc); + GetNonBoundArguments(bound_argc, &total_argc); for (int i = 0; i < bound_argc; i++) { - param_data[i] = Handle<Object>(bound_args->get( - JSFunction::kBoundArgumentsStartIndex + i)); + Handle<Object> val = Handle<Object>(bound_args->get(i)); + param_data[i] = val; } - if (!bound_function->IsJSFunction()) { - bool exception_thrown; - bound_function = Execution::TryGetConstructorDelegate(bound_function, - &exception_thrown); - if (exception_thrown) return Failure::Exception(); - } - ASSERT(bound_function->IsJSFunction()); - bool exception = false; Handle<Object> result = - Execution::New(Handle<JSFunction>::cast(bound_function), - total_argc, *param_data, &exception); + Execution::New(function, total_argc, *param_data, &exception); if (exception) { - return Failure::Exception(); + return Failure::Exception(); } + ASSERT(!result.is_null()); return *result; } @@ -8185,8 +8011,7 @@ static void TrySettingInlineConstructStub(Isolate* isolate, prototype = Handle<Object>(function->instance_prototype(), isolate); } if (function->shared()->CanGenerateInlineConstructor(*prototype)) { - HandleScope scope(isolate); - ConstructStubCompiler compiler(isolate); + ConstructStubCompiler compiler; MaybeObject* code = compiler.CompileConstructStub(*function); if (!code->IsFailure()) { function->shared()->set_construct_stub( @@ -8250,11 +8075,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NewObject) { // available. We cannot use EnsureCompiled because that forces a // compilation through the shared function info which makes it // impossible for us to optimize. - if (!function->is_compiled()) { - JSFunction::CompileLazy(function, CLEAR_EXCEPTION); - } - Handle<SharedFunctionInfo> shared(function->shared(), isolate); + if (!function->is_compiled()) CompileLazy(function, CLEAR_EXCEPTION); + if (!function->has_initial_map() && shared->IsInobjectSlackTrackingInProgress()) { // The tracking is already in progress for another function. We can only @@ -8305,7 +8128,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LazyCompile) { // Compile the target function. ASSERT(!function->is_compiled()); - if (!JSFunction::CompileLazy(function, KEEP_EXCEPTION)) { + if (!CompileLazy(function, KEEP_EXCEPTION)) { return Failure::Exception(); } @@ -8342,9 +8165,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LazyRecompile) { function->ReplaceCode(function->shared()->code()); return function->code(); } - if (JSFunction::CompileOptimized(function, - AstNode::kNoNumber, - CLEAR_EXCEPTION)) { + if (CompileOptimized(function, AstNode::kNoNumber, CLEAR_EXCEPTION)) { return function->code(); } if (FLAG_trace_opt) { @@ -8585,7 +8406,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) { // Try to compile the optimized code. A true return value from // CompileOptimized means that compilation succeeded, not necessarily // that optimization succeeded. - if (JSFunction::CompileOptimized(function, ast_id, CLEAR_EXCEPTION) && + if (CompileOptimized(function, ast_id, CLEAR_EXCEPTION) && function->IsOptimized()) { DeoptimizationInputData* data = DeoptimizationInputData::cast( function->code()->deoptimization_data()); @@ -8941,26 +8762,13 @@ static ObjectPair LoadContextSlotHelper(Arguments args, Handle<Object> receiver = isolate->factory()->the_hole_value(); Object* value = Context::cast(*holder)->get(index); // Check for uninitialized bindings. - switch (binding_flags) { - case MUTABLE_CHECK_INITIALIZED: - case IMMUTABLE_CHECK_INITIALIZED_HARMONY: - if (value->IsTheHole()) { - Handle<Object> reference_error = - isolate->factory()->NewReferenceError("not_defined", - HandleVector(&name, 1)); - return MakePair(isolate->Throw(*reference_error), NULL); - } - // FALLTHROUGH - case MUTABLE_IS_INITIALIZED: - case IMMUTABLE_IS_INITIALIZED: - case IMMUTABLE_IS_INITIALIZED_HARMONY: - ASSERT(!value->IsTheHole()); - return MakePair(value, *receiver); - case IMMUTABLE_CHECK_INITIALIZED: - return MakePair(Unhole(isolate->heap(), value, attributes), *receiver); - case MISSING_BINDING: - UNREACHABLE(); - return MakePair(NULL, NULL); + if (binding_flags == MUTABLE_CHECK_INITIALIZED && value->IsTheHole()) { + Handle<Object> reference_error = + isolate->factory()->NewReferenceError("not_defined", + HandleVector(&name, 1)); + return MakePair(isolate->Throw(*reference_error), NULL); + } else { + return MakePair(Unhole(isolate->heap(), value, attributes), *receiver); } } @@ -9139,6 +8947,42 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StackGuard) { } +// NOTE: These PrintXXX functions are defined for all builds (not just +// DEBUG builds) because we may want to be able to trace function +// calls in all modes. +static void PrintString(String* str) { + // not uncommon to have empty strings + if (str->length() > 0) { + SmartArrayPointer<char> s = + str->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL); + PrintF("%s", *s); + } +} + + +static void PrintObject(Object* obj) { + if (obj->IsSmi()) { + PrintF("%d", Smi::cast(obj)->value()); + } else if (obj->IsString() || obj->IsSymbol()) { + PrintString(String::cast(obj)); + } else if (obj->IsNumber()) { + PrintF("%g", obj->Number()); + } else if (obj->IsFailure()) { + PrintF("<failure>"); + } else if (obj->IsUndefined()) { + PrintF("<undefined>"); + } else if (obj->IsNull()) { + PrintF("<null>"); + } else if (obj->IsTrue()) { + PrintF("<true>"); + } else if (obj->IsFalse()) { + PrintF("<false>"); + } else { + PrintF("%p", reinterpret_cast<void*>(obj)); + } +} + + static int StackSize() { int n = 0; for (JavaScriptFrameIterator it; !it.done(); it.Advance()) n++; @@ -9157,33 +9001,38 @@ static void PrintTransition(Object* result) { } if (result == NULL) { - JavaScriptFrame::PrintTop(stdout, true, false); - PrintF(" {\n"); + // constructor calls + JavaScriptFrameIterator it; + JavaScriptFrame* frame = it.frame(); + if (frame->IsConstructor()) PrintF("new "); + // function name + Object* fun = frame->function(); + if (fun->IsJSFunction()) { + PrintObject(JSFunction::cast(fun)->shared()->name()); + } else { + PrintObject(fun); + } + // function arguments + // (we are intentionally only printing the actually + // supplied parameters, not all parameters required) + PrintF("(this="); + PrintObject(frame->receiver()); + const int length = frame->ComputeParametersCount(); + for (int i = 0; i < length; i++) { + PrintF(", "); + PrintObject(frame->GetParameter(i)); + } + PrintF(") {\n"); + } else { // function result PrintF("} -> "); - result->ShortPrint(); + PrintObject(result); PrintF("\n"); } } -RUNTIME_FUNCTION(MaybeObject*, Runtime_TraceElementsKindTransition) { - ASSERT(args.length() == 5); - CONVERT_ARG_CHECKED(JSObject, obj, 0); - CONVERT_SMI_ARG_CHECKED(from_kind, 1); - CONVERT_ARG_CHECKED(FixedArrayBase, from_elements, 2); - CONVERT_SMI_ARG_CHECKED(to_kind, 3); - CONVERT_ARG_CHECKED(FixedArrayBase, to_elements, 4); - NoHandleAllocation ha; - PrintF("*"); - obj->PrintElementsTransition(stdout, - static_cast<ElementsKind>(from_kind), *from_elements, - static_cast<ElementsKind>(to_kind), *to_elements); - return isolate->heap()->undefined_value(); -} - - RUNTIME_FUNCTION(MaybeObject*, Runtime_TraceEnter) { ASSERT(args.length() == 0); NoHandleAllocation ha; @@ -9932,8 +9781,8 @@ static bool IterateElements(Isolate* isolate, } else if (receiver->HasElement(j)) { // Call GetElement on receiver, not its prototype, or getters won't // have the correct receiver. - element_value = Object::GetElement(receiver, j); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate, element_value, false); + element_value = GetElement(receiver, j); + if (element_value.is_null()) return false; visitor->visit(j, element_value); } } @@ -9951,8 +9800,8 @@ static bool IterateElements(Isolate* isolate, while (j < n) { HandleScope loop_scope; uint32_t index = indices[j]; - Handle<Object> element = Object::GetElement(receiver, index); - RETURN_IF_EMPTY_HANDLE_VALUE(isolate, element, false); + Handle<Object> element = GetElement(receiver, index); + if (element.is_null()) return false; visitor->visit(index, element); // Skip to next different index (i.e., omit duplicates). do { @@ -10202,9 +10051,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SwapElements) { } Handle<JSObject> jsobject = Handle<JSObject>::cast(object); - Handle<Object> tmp1 = Object::GetElement(jsobject, index1); + Handle<Object> tmp1 = GetElement(jsobject, index1); RETURN_IF_EMPTY_HANDLE(isolate, tmp1); - Handle<Object> tmp2 = Object::GetElement(jsobject, index2); + Handle<Object> tmp2 = GetElement(jsobject, index2); RETURN_IF_EMPTY_HANDLE(isolate, tmp2); RETURN_IF_EMPTY_HANDLE(isolate, @@ -10229,11 +10078,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetArrayKeys) { if (array->elements()->IsDictionary()) { // Create an array and get all the keys into it, then remove all the // keys that are not integers in the range 0 to length-1. - bool threw = false; - Handle<FixedArray> keys = - GetKeysInFixedArrayFor(array, INCLUDE_PROTOS, &threw); - if (threw) return Failure::Exception(); - + Handle<FixedArray> keys = GetKeysInFixedArrayFor(array, INCLUDE_PROTOS); int keys_length = keys->length(); for (int i = 0; i < keys_length; i++) { Object* key = keys->get(i); @@ -10458,7 +10303,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugGetPropertyDetails) { // Try local lookup on each of the objects. Handle<JSObject> jsproto = obj; for (int i = 0; i < length; i++) { - LookupResult result(isolate); + LookupResult result; jsproto->LocalLookup(*name, &result); if (result.IsProperty()) { // LookupResult is not GC safe as it holds raw object pointers. @@ -10515,7 +10360,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugGetProperty) { CONVERT_ARG_CHECKED(JSObject, obj, 0); CONVERT_ARG_CHECKED(String, name, 1); - LookupResult result(isolate); + LookupResult result; obj->Lookup(*name, &result); if (result.IsProperty()) { return DebugLookupResultValue(isolate->heap(), *obj, *name, &result, NULL); @@ -11052,11 +10897,7 @@ static Handle<JSObject> MaterializeLocalScope( if (function_context->has_extension() && !function_context->IsGlobalContext()) { Handle<JSObject> ext(JSObject::cast(function_context->extension())); - bool threw = false; - Handle<FixedArray> keys = - GetKeysInFixedArrayFor(ext, INCLUDE_PROTOS, &threw); - if (threw) return Handle<JSObject>(); - + Handle<FixedArray> keys = GetKeysInFixedArrayFor(ext, INCLUDE_PROTOS); for (int i = 0; i < keys->length(); i++) { // Names of variables introduced by eval are strings. ASSERT(keys->get(i)->IsString()); @@ -11104,11 +10945,7 @@ static Handle<JSObject> MaterializeClosure(Isolate* isolate, // be variables introduced by eval. if (context->has_extension()) { Handle<JSObject> ext(JSObject::cast(context->extension())); - bool threw = false; - Handle<FixedArray> keys = - GetKeysInFixedArrayFor(ext, INCLUDE_PROTOS, &threw); - if (threw) return Handle<JSObject>(); - + Handle<FixedArray> keys = GetKeysInFixedArrayFor(ext, INCLUDE_PROTOS); for (int i = 0; i < keys->length(); i++) { // Names of variables introduced by eval are strings. ASSERT(keys->get(i)->IsString()); @@ -11173,10 +11010,9 @@ static Handle<JSObject> MaterializeBlockScope( } -// Iterate over the actual scopes visible from a stack frame. The iteration -// proceeds from the innermost visible nested scope outwards. All scopes are +// Iterate over the actual scopes visible from a stack frame. All scopes are // backed by an actual context except the local scope, which is inserted -// "artificially" in the context chain. +// "artifically" in the context chain. class ScopeIterator { public: enum ScopeType { @@ -11196,52 +11032,28 @@ class ScopeIterator { inlined_frame_index_(inlined_frame_index), function_(JSFunction::cast(frame->function())), context_(Context::cast(frame->context())), - nested_scope_chain_(4) { + local_done_(false), + at_local_(false) { - // Catch the case when the debugger stops in an internal function. - Handle<SharedFunctionInfo> shared_info(function_->shared()); - if (shared_info->script() == isolate->heap()->undefined_value()) { - while (context_->closure() == *function_) { - context_ = Handle<Context>(context_->previous(), isolate_); - } - return; - } - - // Check whether we are in global code or function code. If there is a stack - // slot for .result then this function has been created for evaluating - // global code and it is not a real function. + // Check whether the first scope is actually a local scope. + // If there is a stack slot for .result then this local scope has been + // created for evaluating top level code and it is not a real local scope. // Checking for the existence of .result seems fragile, but the scope info // saved with the code object does not otherwise have that information. - int index = shared_info->scope_info()-> + int index = function_->shared()->scope_info()-> StackSlotIndex(isolate_->heap()->result_symbol()); - - // Reparse the code and analyze the scopes. - ZoneScope zone_scope(isolate, DELETE_ON_EXIT); - Handle<Script> script(Script::cast(shared_info->script())); - Scope* scope; if (index >= 0) { - // Global code - CompilationInfo info(script); - info.MarkAsGlobal(); - bool result = ParserApi::Parse(&info); - ASSERT(result); - result = Scope::Analyze(&info); - ASSERT(result); - scope = info.function()->scope(); - } else { - // Function code - CompilationInfo info(shared_info); - bool result = ParserApi::Parse(&info); - ASSERT(result); - result = Scope::Analyze(&info); - ASSERT(result); - scope = info.function()->scope(); + local_done_ = true; + } else if (context_->IsGlobalContext() || + context_->IsFunctionContext()) { + at_local_ = true; + } else if (context_->closure() != *function_) { + // The context_ is a block or with or catch block from the outer function. + ASSERT(context_->IsWithContext() || + context_->IsCatchContext() || + context_->IsBlockContext()); + at_local_ = true; } - - // Retrieve the scope chain for the current position. - int statement_position = - shared_info->code()->SourceStatementPosition(frame_->pc()); - scope->GetNestedScopeChain(&nested_scope_chain_, statement_position); } // More scopes? @@ -11249,48 +11061,40 @@ class ScopeIterator { // Move to the next scope. void Next() { - ScopeType scope_type = Type(); - if (scope_type == ScopeTypeGlobal) { - // The global scope is always the last in the chain. - ASSERT(context_->IsGlobalContext()); + // If at a local scope mark the local scope as passed. + if (at_local_) { + at_local_ = false; + local_done_ = true; + + // If the current context is not associated with the local scope the + // current context is the next real scope, so don't move to the next + // context in this case. + if (context_->closure() != *function_) { + return; + } + } + + // The global scope is always the last in the chain. + if (context_->IsGlobalContext()) { context_ = Handle<Context>(); return; } - if (nested_scope_chain_.is_empty()) { - context_ = Handle<Context>(context_->previous(), isolate_); - } else { - if (nested_scope_chain_.last()->HasContext()) { - context_ = Handle<Context>(context_->previous(), isolate_); - } - nested_scope_chain_.RemoveLast(); + + // Move to the next context. + context_ = Handle<Context>(context_->previous(), isolate_); + + // If passing the local scope indicate that the current scope is now the + // local scope. + if (!local_done_ && + (context_->IsGlobalContext() || context_->IsFunctionContext())) { + at_local_ = true; } } // Return the type of the current scope. ScopeType Type() { - if (!nested_scope_chain_.is_empty()) { - Handle<SerializedScopeInfo> scope_info = nested_scope_chain_.last(); - switch (scope_info->Type()) { - case FUNCTION_SCOPE: - ASSERT(context_->IsFunctionContext() || - !scope_info->HasContext()); - return ScopeTypeLocal; - case GLOBAL_SCOPE: - ASSERT(context_->IsGlobalContext()); - return ScopeTypeGlobal; - case WITH_SCOPE: - ASSERT(context_->IsWithContext()); - return ScopeTypeWith; - case CATCH_SCOPE: - ASSERT(context_->IsCatchContext()); - return ScopeTypeCatch; - case BLOCK_SCOPE: - ASSERT(!scope_info->HasContext() || - context_->IsBlockContext()); - return ScopeTypeBlock; - case EVAL_SCOPE: - UNREACHABLE(); - } + if (at_local_) { + return ScopeTypeLocal; } if (context_->IsGlobalContext()) { ASSERT(context_->global()->IsGlobalObject()); @@ -11316,7 +11120,6 @@ class ScopeIterator { return Handle<JSObject>(CurrentContext()->global()); case ScopeIterator::ScopeTypeLocal: // Materialize the content of the local scope into a JSObject. - ASSERT(nested_scope_chain_.length() == 1); return MaterializeLocalScope(isolate_, frame_, inlined_frame_index_); case ScopeIterator::ScopeTypeWith: // Return the with object. @@ -11333,30 +11136,13 @@ class ScopeIterator { return Handle<JSObject>(); } - Handle<SerializedScopeInfo> CurrentScopeInfo() { - if (!nested_scope_chain_.is_empty()) { - return nested_scope_chain_.last(); - } else if (context_->IsBlockContext()) { - return Handle<SerializedScopeInfo>( - SerializedScopeInfo::cast(context_->extension())); - } else if (context_->IsFunctionContext()) { - return Handle<SerializedScopeInfo>( - context_->closure()->shared()->scope_info()); - } - return Handle<SerializedScopeInfo>::null(); - } - // Return the context for this scope. For the local context there might not // be an actual context. Handle<Context> CurrentContext() { - if (Type() == ScopeTypeGlobal || - nested_scope_chain_.is_empty()) { - return context_; - } else if (nested_scope_chain_.last()->HasContext()) { - return context_; - } else { + if (at_local_ && context_->closure() != *function_) { return Handle<Context>(); } + return context_; } #ifdef DEBUG @@ -11419,7 +11205,8 @@ class ScopeIterator { int inlined_frame_index_; Handle<JSFunction> function_; Handle<Context> context_; - List<Handle<SerializedScopeInfo> > nested_scope_chain_; + bool local_done_; + bool at_local_; DISALLOW_IMPLICIT_CONSTRUCTORS(ScopeIterator); }; @@ -11734,7 +11521,7 @@ Object* Runtime::FindSharedFunctionInfoInScript(Isolate* isolate, if (!done) { // If the candidate is not compiled compile it to reveal any inner // functions which might contain the requested source position. - SharedFunctionInfo::CompileLazy(target, KEEP_EXCEPTION); + CompileLazyShared(target, KEEP_EXCEPTION); } } // End while loop. @@ -11882,65 +11669,46 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ClearStepping) { // Creates a copy of the with context chain. The copy of the context chain is // is linked to the function context supplied. -static Handle<Context> CopyNestedScopeContextChain(Isolate* isolate, - Handle<JSFunction> function, - Handle<Context> base, - JavaScriptFrame* frame, - int inlined_frame_index) { - HandleScope scope(isolate); - List<Handle<SerializedScopeInfo> > scope_chain; - List<Handle<Context> > context_chain; - - ScopeIterator it(isolate, frame, inlined_frame_index); - for (; it.Type() != ScopeIterator::ScopeTypeGlobal && - it.Type() != ScopeIterator::ScopeTypeLocal ; it.Next()) { - ASSERT(!it.Done()); - scope_chain.Add(it.CurrentScopeInfo()); - context_chain.Add(it.CurrentContext()); +static Handle<Context> CopyWithContextChain(Isolate* isolate, + Handle<JSFunction> function, + Handle<Context> current, + Handle<Context> base) { + // At the end of the chain. Return the base context to link to. + if (current->IsFunctionContext() || current->IsGlobalContext()) { + return base; } - // At the end of the chain. Return the base context to link to. - Handle<Context> context = base; - - // Iteratively copy and or materialize the nested contexts. - while (!scope_chain.is_empty()) { - Handle<SerializedScopeInfo> scope_info = scope_chain.RemoveLast(); - Handle<Context> current = context_chain.RemoveLast(); - ASSERT(!(scope_info->HasContext() & current.is_null())); - - if (scope_info->Type() == CATCH_SCOPE) { - Handle<String> name(String::cast(current->extension())); - Handle<Object> thrown_object(current->get(Context::THROWN_OBJECT_INDEX)); - context = - isolate->factory()->NewCatchContext(function, - context, - name, - thrown_object); - } else if (scope_info->Type() == BLOCK_SCOPE) { - // Materialize the contents of the block scope into a JSObject. - Handle<JSObject> block_scope_object = - MaterializeBlockScope(isolate, current); - if (block_scope_object.is_null()) { - return Handle<Context>::null(); - } - // Allocate a new function context for the debug evaluation and set the - // extension object. - Handle<Context> new_context = - isolate->factory()->NewFunctionContext(Context::MIN_CONTEXT_SLOTS, - function); - new_context->set_extension(*block_scope_object); - new_context->set_previous(*context); - context = new_context; - } else { - ASSERT(scope_info->Type() == WITH_SCOPE); - ASSERT(current->IsWithContext()); - Handle<JSObject> extension(JSObject::cast(current->extension())); - context = - isolate->factory()->NewWithContext(function, context, extension); + // Recursively copy the with and catch contexts. + HandleScope scope(isolate); + Handle<Context> previous(current->previous()); + Handle<Context> new_previous = + CopyWithContextChain(isolate, function, previous, base); + Handle<Context> new_current; + if (current->IsCatchContext()) { + Handle<String> name(String::cast(current->extension())); + Handle<Object> thrown_object(current->get(Context::THROWN_OBJECT_INDEX)); + new_current = + isolate->factory()->NewCatchContext(function, + new_previous, + name, + thrown_object); + } else if (current->IsBlockContext()) { + Handle<SerializedScopeInfo> scope_info( + SerializedScopeInfo::cast(current->extension())); + new_current = + isolate->factory()->NewBlockContext(function, new_previous, scope_info); + // Copy context slots. + int num_context_slots = scope_info->NumberOfContextSlots(); + for (int i = Context::MIN_CONTEXT_SLOTS; i < num_context_slots; ++i) { + new_current->set(i, current->get(i)); } + } else { + ASSERT(current->IsWithContext()); + Handle<JSObject> extension(JSObject::cast(current->extension())); + new_current = + isolate->factory()->NewWithContext(function, new_previous, extension); } - - return scope.CloseAndEscape(context); + return scope.CloseAndEscape(new_current); } @@ -12078,11 +11846,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugEvaluate) { if (scope_info->HasHeapAllocatedLocals()) { function_context = Handle<Context>(frame_context->declaration_context()); } - context = CopyNestedScopeContextChain(isolate, - go_between, - context, - frame, - inlined_frame_index); + context = CopyWithContextChain(isolate, go_between, frame_context, context); if (additional_context->IsJSObject()) { Handle<JSObject> extension = Handle<JSObject>::cast(additional_context); @@ -12481,7 +12245,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugDisassembleFunction) { // Get the function and make sure it is compiled. CONVERT_ARG_CHECKED(JSFunction, func, 0); Handle<SharedFunctionInfo> shared(func->shared()); - if (!SharedFunctionInfo::EnsureCompiled(shared, KEEP_EXCEPTION)) { + if (!EnsureCompiled(shared, KEEP_EXCEPTION)) { return Failure::Exception(); } func->code()->PrintLn(); @@ -12497,7 +12261,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugDisassembleConstructor) { // Get the function and make sure it is compiled. CONVERT_ARG_CHECKED(JSFunction, func, 0); Handle<SharedFunctionInfo> shared(func->shared()); - if (!SharedFunctionInfo::EnsureCompiled(shared, KEEP_EXCEPTION)) { + if (!EnsureCompiled(shared, KEEP_EXCEPTION)) { return Failure::Exception(); } shared->construct_stub()->PrintLn(); @@ -13103,32 +12867,34 @@ static bool ShowFrameInStackTrace(StackFrame* raw_frame, Object* caller, bool* seen_caller) { // Only display JS frames. - if (!raw_frame->is_java_script()) { + if (!raw_frame->is_java_script()) return false; - } JavaScriptFrame* frame = JavaScriptFrame::cast(raw_frame); Object* raw_fun = frame->function(); // Not sure when this can happen but skip it just in case. - if (!raw_fun->IsJSFunction()) { + if (!raw_fun->IsJSFunction()) return false; - } if ((raw_fun == caller) && !(*seen_caller)) { *seen_caller = true; return false; } // Skip all frames until we've seen the caller. if (!(*seen_caller)) return false; - // Also, skip non-visible built-in functions and any call with the builtins - // object as receiver, so as to not reveal either the builtins object or - // an internal function. - // The --builtins-in-stack-traces command line flag allows including - // internal call sites in the stack trace for debugging purposes. - if (!FLAG_builtins_in_stack_traces) { - JSFunction* fun = JSFunction::cast(raw_fun); - if (frame->receiver()->IsJSBuiltinsObject() || - (fun->IsBuiltin() && !fun->shared()->native())) { - return false; - } + // Also, skip the most obvious builtin calls. We recognize builtins + // as (1) functions called with the builtins object as the receiver and + // as (2) functions from native scripts called with undefined as the + // receiver (direct calls to helper functions in the builtins + // code). Some builtin calls (such as Number.ADD which is invoked + // using 'call') are very difficult to recognize so we're leaving + // them in for now. + if (frame->receiver()->IsJSBuiltinsObject()) { + return false; + } + JSFunction* fun = JSFunction::cast(raw_fun); + Object* raw_script = fun->shared()->script(); + if (frame->receiver()->IsUndefined() && raw_script->IsScript()) { + int script_type = Script::cast(raw_script)->type()->value(); + return script_type != Script::TYPE_NATIVE; } return true; } @@ -13275,9 +13041,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFromCache) { } #ifdef DEBUG - if (FLAG_verify_heap) { - cache_handle->JSFunctionResultCacheVerify(); - } + cache_handle->JSFunctionResultCacheVerify(); #endif // Function invocation may have cleared the cache. Reread all the data. @@ -13306,9 +13070,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFromCache) { cache_handle->set_finger_index(index); #ifdef DEBUG - if (FLAG_verify_heap) { - cache_handle->JSFunctionResultCacheVerify(); - } + cache_handle->JSFunctionResultCacheVerify(); #endif return *value; diff --git a/deps/v8/src/runtime.h b/deps/v8/src/runtime.h index 67fc6282a..ed9c2b889 100644 --- a/deps/v8/src/runtime.h +++ b/deps/v8/src/runtime.h @@ -211,14 +211,14 @@ namespace internal { /* Reflection */ \ F(FunctionSetInstanceClassName, 2, 1) \ F(FunctionSetLength, 2, 1) \ + F(BoundFunctionSetLength, 2, 1) \ F(FunctionSetPrototype, 2, 1) \ F(FunctionSetReadOnlyPrototype, 1, 1) \ F(FunctionGetName, 1, 1) \ F(FunctionSetName, 2, 1) \ F(FunctionNameShouldPrintAsAnonymous, 1, 1) \ F(FunctionMarkNameShouldPrintAsAnonymous, 1, 1) \ - F(FunctionBindArguments, 4, 1) \ - F(BoundFunctionGetBindings, 1, 1) \ + F(FunctionSetBound, 1, 1) \ F(FunctionRemovePrototype, 1, 1) \ F(FunctionGetSourceCode, 1, 1) \ F(FunctionGetScript, 1, 1) \ @@ -278,7 +278,7 @@ namespace internal { \ /* Literals */ \ F(MaterializeRegExpLiteral, 4, 1)\ - F(CreateArrayLiteralBoilerplate, 4, 1) \ + F(CreateArrayLiteralBoilerplate, 3, 1) \ F(CloneLiteralBoilerplate, 1, 1) \ F(CloneShallowLiteralBoilerplate, 1, 1) \ F(CreateObjectLiteral, 4, 1) \ @@ -296,17 +296,6 @@ namespace internal { F(GetConstructTrap, 1, 1) \ F(Fix, 1, 1) \ \ - /* Harmony sets */ \ - F(SetInitialize, 1, 1) \ - F(SetAdd, 2, 1) \ - F(SetHas, 2, 1) \ - F(SetDelete, 2, 1) \ - \ - /* Harmony maps */ \ - F(MapInitialize, 1, 1) \ - F(MapGet, 2, 1) \ - F(MapSet, 3, 1) \ - \ /* Harmony weakmaps */ \ F(WeakMapInitialize, 1, 1) \ F(WeakMapGet, 2, 1) \ @@ -315,7 +304,7 @@ namespace internal { /* Statements */ \ F(NewClosure, 3, 1) \ F(NewObject, 1, 1) \ - F(NewObjectFromBound, 1, 1) \ + F(NewObjectFromBound, 2, 1) \ F(FinalizeInstanceSize, 1, 1) \ F(Throw, 1, 1) \ F(ReThrow, 1, 1) \ @@ -341,10 +330,11 @@ namespace internal { F(InitializeConstContextSlot, 3, 1) \ F(OptimizeObjectForAddingMultipleProperties, 2, 1) \ \ + /* Arrays */ \ + F(NonSmiElementStored, 1, 1) \ /* Debugging */ \ F(DebugPrint, 1, 1) \ F(DebugTrace, 0, 1) \ - F(TraceElementsKindTransition, 5, 1) \ F(TraceEnter, 0, 1) \ F(TraceExit, 1, 1) \ F(Abort, 2, 1) \ @@ -380,8 +370,6 @@ namespace internal { F(HasExternalUnsignedIntElements, 1, 1) \ F(HasExternalFloatElements, 1, 1) \ F(HasExternalDoubleElements, 1, 1) \ - F(TransitionElementsSmiToDouble, 1, 1) \ - F(TransitionElementsDoubleToObject, 1, 1) \ F(HaveSameMap, 2, 1) \ /* profiler */ \ F(ProfilerResume, 0, 1) \ @@ -640,14 +628,16 @@ class Runtime : public AllStatic { static bool IsUpperCaseChar(RuntimeState* runtime_state, uint16_t ch); - // TODO(1240886): Some of the following methods are *not* handle safe, but - // accept handle arguments. This seems fragile. + // TODO(1240886): The following three methods are *not* handle safe, + // but accept handle arguments. This seems fragile. // Support getting the characters in a string using [] notation as // in Firefox/SpiderMonkey, Safari and Opera. MUST_USE_RESULT static MaybeObject* GetElementOrCharAt(Isolate* isolate, Handle<Object> object, uint32_t index); + MUST_USE_RESULT static MaybeObject* GetElement(Handle<Object> object, + uint32_t index); MUST_USE_RESULT static MaybeObject* SetObjectProperty( Isolate* isolate, @@ -687,9 +677,11 @@ class Runtime : public AllStatic { //--------------------------------------------------------------------------- // Constants used by interface to runtime functions. -class DeclareGlobalsEvalFlag: public BitField<bool, 0, 1> {}; -class DeclareGlobalsStrictModeFlag: public BitField<StrictModeFlag, 1, 1> {}; -class DeclareGlobalsNativeFlag: public BitField<bool, 2, 1> {}; +enum kDeclareGlobalsFlags { + kDeclareGlobalsEvalFlag = 1 << 0, + kDeclareGlobalsStrictModeFlag = 1 << 1, + kDeclareGlobalsNativeFlag = 1 << 2 +}; } } // namespace v8::internal diff --git a/deps/v8/src/runtime.js b/deps/v8/src/runtime.js index 108b928ea..a12f6c7b0 100644 --- a/deps/v8/src/runtime.js +++ b/deps/v8/src/runtime.js @@ -375,12 +375,6 @@ function INSTANCE_OF(F) { return 1; } - // Check if function is bound, if so, get [[BoundFunction]] from it - // and use that instead of F. - var bindings = %BoundFunctionGetBindings(F); - if (bindings) { - F = bindings[kBoundFunctionIndex]; // Always a non-bound function. - } // Get the prototype of F; if it is not an object, throw an error. var O = F.prototype; if (!IS_SPEC_OBJECT(O)) { @@ -392,6 +386,13 @@ function INSTANCE_OF(F) { } +// Get an array of property keys for the given object. Used in +// for-in statements. +function GET_KEYS() { + return %GetPropertyNames(this); +} + + // Filter a given key against an object by checking if the object // has a property with the given key; return the key as a string if // it has. Otherwise returns 0 (smi). Used in for-in statements. @@ -462,7 +463,7 @@ function APPLY_PREPARE(args) { } // Make sure the arguments list has the right type. - if (args != null && !IS_SPEC_OBJECT(args)) { + if (args != null && !IS_ARRAY(args) && !IS_ARGUMENTS(args)) { throw %MakeTypeError('apply_wrong_args', []); } diff --git a/deps/v8/src/scanner.h b/deps/v8/src/scanner.h index a2e64a9d2..6651c3875 100644 --- a/deps/v8/src/scanner.h +++ b/deps/v8/src/scanner.h @@ -41,17 +41,6 @@ namespace v8 { namespace internal { - -// General collection of bit-flags that can be passed to scanners and -// parsers to signify their (initial) mode of operation. -enum ParsingFlags { - kNoParsingFlags = 0, - kAllowLazy = 1, - kAllowNativesSyntax = 2, - kHarmonyScoping = 4 -}; - - // Returns the value (0 .. 15) of a hexadecimal character c. // If c is not a legal hexadecimal character, returns a value < 0. inline int HexValue(uc32 c) { diff --git a/deps/v8/src/scopeinfo.cc b/deps/v8/src/scopeinfo.cc index 8ea5f1e73..1aa51603d 100644 --- a/deps/v8/src/scopeinfo.cc +++ b/deps/v8/src/scopeinfo.cc @@ -51,7 +51,6 @@ ScopeInfo<Allocator>::ScopeInfo(Scope* scope) : function_name_(FACTORY->empty_symbol()), calls_eval_(scope->calls_eval()), is_strict_mode_(scope->is_strict_mode()), - type_(scope->type()), parameters_(scope->num_parameters()), stack_slots_(scope->num_stack_slots()), context_slots_(scope->num_heap_slots()), @@ -139,7 +138,7 @@ ScopeInfo<Allocator>::ScopeInfo(Scope* scope) ASSERT(proxy->var()->index() - Context::MIN_CONTEXT_SLOTS == context_modes_.length()); context_slots_.Add(FACTORY->empty_symbol()); - context_modes_.Add(proxy->var()->mode()); + context_modes_.Add(INTERNAL); } } } @@ -151,10 +150,6 @@ ScopeInfo<Allocator>::ScopeInfo(Scope* scope) // // - calls eval boolean flag // -// - is strict mode scope -// -// - scope type -// // - number of variables in the context object (smi) (= function context // slot index + 1) // - list of pairs (name, Var mode) of context-allocated variables (starting @@ -186,9 +181,8 @@ ScopeInfo<Allocator>::ScopeInfo(Scope* scope) // present) -template <class T> -static inline Object** ReadInt(Object** p, T* x) { - *x = static_cast<T>((reinterpret_cast<Smi*>(*p++))->value()); +static inline Object** ReadInt(Object** p, int* x) { + *x = (reinterpret_cast<Smi*>(*p++))->value(); return p; } @@ -199,21 +193,20 @@ static inline Object** ReadBool(Object** p, bool* x) { } -template <class T> -static inline Object** ReadObject(Object** p, Handle<T>* s) { - *s = Handle<T>::cast(Handle<Object>(*p++)); +static inline Object** ReadSymbol(Object** p, Handle<String>* s) { + *s = Handle<String>(reinterpret_cast<String*>(*p++)); return p; } -template <class Allocator, class T> -static Object** ReadList(Object** p, List<Handle<T>, Allocator >* list) { +template <class Allocator> +static Object** ReadList(Object** p, List<Handle<String>, Allocator >* list) { ASSERT(list->is_empty()); int n; p = ReadInt(p, &n); while (n-- > 0) { - Handle<T> s; - p = ReadObject(p, &s); + Handle<String> s; + p = ReadSymbol(p, &s); list->Add(s); } return p; @@ -230,7 +223,7 @@ static Object** ReadList(Object** p, while (n-- > 0) { Handle<String> s; int m; - p = ReadObject(p, &s); + p = ReadSymbol(p, &s); p = ReadInt(p, &m); list->Add(s); modes->Add(static_cast<VariableMode>(m)); @@ -249,10 +242,9 @@ ScopeInfo<Allocator>::ScopeInfo(SerializedScopeInfo* data) if (data->length() > 0) { Object** p0 = data->data_start(); Object** p = p0; - p = ReadObject(p, &function_name_); + p = ReadSymbol(p, &function_name_); p = ReadBool(p, &calls_eval_); p = ReadBool(p, &is_strict_mode_); - p = ReadInt(p, &type_); p = ReadList<Allocator>(p, &context_slots_, &context_modes_); p = ReadList<Allocator>(p, ¶meters_); p = ReadList<Allocator>(p, &stack_slots_); @@ -273,19 +265,18 @@ static inline Object** WriteBool(Object** p, bool b) { } -template <class T> -static inline Object** WriteObject(Object** p, Handle<T> s) { +static inline Object** WriteSymbol(Object** p, Handle<String> s) { *p++ = *s; return p; } -template <class Allocator, class T> -static Object** WriteList(Object** p, List<Handle<T>, Allocator >* list) { +template <class Allocator> +static Object** WriteList(Object** p, List<Handle<String>, Allocator >* list) { const int n = list->length(); p = WriteInt(p, n); for (int i = 0; i < n; i++) { - p = WriteObject(p, list->at(i)); + p = WriteSymbol(p, list->at(i)); } return p; } @@ -298,7 +289,7 @@ static Object** WriteList(Object** p, const int n = list->length(); p = WriteInt(p, n); for (int i = 0; i < n; i++) { - p = WriteObject(p, list->at(i)); + p = WriteSymbol(p, list->at(i)); p = WriteInt(p, modes->at(i)); } return p; @@ -307,9 +298,8 @@ static Object** WriteList(Object** p, template<class Allocator> Handle<SerializedScopeInfo> ScopeInfo<Allocator>::Serialize() { - // function name, calls eval, is_strict_mode, scope type, - // length for 3 tables: - const int extra_slots = 1 + 1 + 1 + 1 + 3; + // function name, calls eval, is_strict_mode, length for 3 tables: + const int extra_slots = 1 + 1 + 1 + 3; int length = extra_slots + context_slots_.length() * 2 + parameters_.length() + @@ -321,10 +311,9 @@ Handle<SerializedScopeInfo> ScopeInfo<Allocator>::Serialize() { Object** p0 = data->data_start(); Object** p = p0; - p = WriteObject(p, function_name_); + p = WriteSymbol(p, function_name_); p = WriteBool(p, calls_eval_); p = WriteBool(p, is_strict_mode_); - p = WriteInt(p, type_); p = WriteList(p, &context_slots_, &context_modes_); p = WriteList(p, ¶meters_); p = WriteList(p, &stack_slots_); @@ -372,8 +361,8 @@ SerializedScopeInfo* SerializedScopeInfo::Empty() { Object** SerializedScopeInfo::ContextEntriesAddr() { ASSERT(length() > 0); - // +4 for function name, calls eval, strict mode, scope type. - return data_start() + 4; + // +3 for function name, calls eval, strict mode. + return data_start() + 3; } @@ -417,16 +406,6 @@ bool SerializedScopeInfo::IsStrictMode() { } -ScopeType SerializedScopeInfo::Type() { - ASSERT(length() > 0); - // +3 for function name, calls eval, strict mode. - Object** p = data_start() + 3; - ScopeType type; - p = ReadInt(p, &type); - return type; -} - - int SerializedScopeInfo::NumberOfStackSlots() { if (length() > 0) { Object** p = StackSlotEntriesAddr(); @@ -460,12 +439,6 @@ bool SerializedScopeInfo::HasHeapAllocatedLocals() { } -bool SerializedScopeInfo::HasContext() { - return HasHeapAllocatedLocals() || - Type() == WITH_SCOPE; -} - - int SerializedScopeInfo::StackSlotIndex(String* name) { ASSERT(name->IsSymbol()); if (length() > 0) { @@ -540,24 +513,16 @@ int SerializedScopeInfo::ParameterIndex(String* name) { } -int SerializedScopeInfo::FunctionContextSlotIndex(String* name, - VariableMode* mode) { +int SerializedScopeInfo::FunctionContextSlotIndex(String* name) { ASSERT(name->IsSymbol()); if (length() > 0) { Object** p = data_start(); if (*p == name) { p = ContextEntriesAddr(); int number_of_context_slots; - p = ReadInt(p, &number_of_context_slots); + ReadInt(p, &number_of_context_slots); ASSERT(number_of_context_slots != 0); // The function context slot is the last entry. - if (mode != NULL) { - // Seek to context slot entry. - p += (number_of_context_slots - 1) * 2; - // Seek to mode. - ++p; - ReadInt(p, mode); - } return number_of_context_slots + Context::MIN_CONTEXT_SLOTS - 1; } } diff --git a/deps/v8/src/scopeinfo.h b/deps/v8/src/scopeinfo.h index eeb30475f..03f321be7 100644 --- a/deps/v8/src/scopeinfo.h +++ b/deps/v8/src/scopeinfo.h @@ -35,10 +35,17 @@ namespace v8 { namespace internal { -// ScopeInfo represents information about different scopes of a source -// program and the allocation of the scope's variables. Scope information -// is stored in a compressed form in SerializedScopeInfo objects and is used +// Scope information represents information about a functions's +// scopes (currently only one, because we don't do any inlining) +// and the allocation of the scope's variables. Scope information +// is stored in a compressed form in FixedArray objects and is used // at runtime (stack dumps, deoptimization, etc.). +// +// Historical note: In other VMs built by this team, ScopeInfo was +// usually called DebugInfo since the information was used (among +// other things) for on-demand debugging (Self, Smalltalk). However, +// DebugInfo seems misleading, since this information is primarily used +// in debugging-unrelated contexts. // Forward defined as // template <class Allocator = FreeStoreAllocationPolicy> class ScopeInfo; @@ -76,7 +83,6 @@ class ScopeInfo BASE_EMBEDDED { Handle<String> LocalName(int i) const; int NumberOfLocals() const; - ScopeType type() const { return type_; } // -------------------------------------------------------------------------- // Debugging support @@ -88,7 +94,6 @@ class ScopeInfo BASE_EMBEDDED { Handle<String> function_name_; bool calls_eval_; bool is_strict_mode_; - ScopeType type_; List<Handle<String>, Allocator > parameters_; List<Handle<String>, Allocator > stack_slots_; List<Handle<String>, Allocator > context_slots_; diff --git a/deps/v8/src/scopes.cc b/deps/v8/src/scopes.cc index 3167c4d09..e67b7f826 100644 --- a/deps/v8/src/scopes.cc +++ b/deps/v8/src/scopes.cc @@ -114,7 +114,7 @@ Variable* VariableMap::Lookup(Handle<String> name) { // Dummy constructor -Scope::Scope(ScopeType type) +Scope::Scope(Type type) : isolate_(Isolate::Current()), inner_scopes_(0), variables_(false), @@ -127,7 +127,7 @@ Scope::Scope(ScopeType type) } -Scope::Scope(Scope* outer_scope, ScopeType type) +Scope::Scope(Scope* outer_scope, Type type) : isolate_(Isolate::Current()), inner_scopes_(4), variables_(), @@ -146,7 +146,7 @@ Scope::Scope(Scope* outer_scope, ScopeType type) Scope::Scope(Scope* inner_scope, - ScopeType type, + Type type, Handle<SerializedScopeInfo> scope_info) : isolate_(Isolate::Current()), inner_scopes_(4), @@ -156,8 +156,9 @@ Scope::Scope(Scope* inner_scope, unresolved_(16), decls_(4), already_resolved_(true) { + ASSERT(!scope_info.is_null()); SetDefaults(type, NULL, scope_info); - if (!scope_info.is_null() && scope_info->HasHeapAllocatedLocals()) { + if (scope_info->HasHeapAllocatedLocals()) { num_heap_slots_ = scope_info_->NumberOfContextSlots(); } AddInnerScope(inner_scope); @@ -185,7 +186,7 @@ Scope::Scope(Scope* inner_scope, Handle<String> catch_variable_name) } -void Scope::SetDefaults(ScopeType type, +void Scope::SetDefaults(Type type, Scope* outer_scope, Handle<SerializedScopeInfo> scope_info) { outer_scope_ = outer_scope; @@ -200,17 +201,16 @@ void Scope::SetDefaults(ScopeType type, scope_contains_with_ = false; scope_calls_eval_ = false; // Inherit the strict mode from the parent scope. - strict_mode_flag_ = (outer_scope != NULL) - ? outer_scope->strict_mode_flag_ : kNonStrictMode; + strict_mode_ = (outer_scope != NULL) && outer_scope->strict_mode_; + outer_scope_calls_eval_ = false; outer_scope_calls_non_strict_eval_ = false; inner_scope_calls_eval_ = false; + outer_scope_is_eval_scope_ = false; force_eager_compilation_ = false; num_var_or_const_ = 0; num_stack_slots_ = 0; num_heap_slots_ = 0; scope_info_ = scope_info; - start_position_ = RelocInfo::kNoPosition; - end_position_ = RelocInfo::kNoPosition; } @@ -224,31 +224,30 @@ Scope* Scope::DeserializeScopeChain(CompilationInfo* info, bool contains_with = false; while (!context->IsGlobalContext()) { if (context->IsWithContext()) { - Scope* with_scope = new Scope(current_scope, WITH_SCOPE, - Handle<SerializedScopeInfo>::null()); - current_scope = with_scope; // All the inner scopes are inside a with. contains_with = true; for (Scope* s = innermost_scope; s != NULL; s = s->outer_scope()) { s->scope_inside_with_ = true; } - } else if (context->IsFunctionContext()) { - SerializedScopeInfo* scope_info = - context->closure()->shared()->scope_info(); - current_scope = new Scope(current_scope, FUNCTION_SCOPE, - Handle<SerializedScopeInfo>(scope_info)); - } else if (context->IsBlockContext()) { - SerializedScopeInfo* scope_info = - SerializedScopeInfo::cast(context->extension()); - current_scope = new Scope(current_scope, BLOCK_SCOPE, - Handle<SerializedScopeInfo>(scope_info)); } else { - ASSERT(context->IsCatchContext()); - String* name = String::cast(context->extension()); - current_scope = new Scope(current_scope, Handle<String>(name)); + if (context->IsFunctionContext()) { + SerializedScopeInfo* scope_info = + context->closure()->shared()->scope_info(); + current_scope = new Scope(current_scope, FUNCTION_SCOPE, + Handle<SerializedScopeInfo>(scope_info)); + } else if (context->IsBlockContext()) { + SerializedScopeInfo* scope_info = + SerializedScopeInfo::cast(context->extension()); + current_scope = new Scope(current_scope, BLOCK_SCOPE, + Handle<SerializedScopeInfo>(scope_info)); + } else { + ASSERT(context->IsCatchContext()); + String* name = String::cast(context->extension()); + current_scope = new Scope(current_scope, Handle<String>(name)); + } + if (contains_with) current_scope->RecordWithStatement(); + if (innermost_scope == NULL) innermost_scope = current_scope; } - if (contains_with) current_scope->RecordWithStatement(); - if (innermost_scope == NULL) innermost_scope = current_scope; // Forget about a with when we move to a context for a different function. if (context->previous()->closure() != context->closure()) { @@ -282,15 +281,15 @@ bool Scope::Analyze(CompilationInfo* info) { } -void Scope::Initialize() { +void Scope::Initialize(bool inside_with) { ASSERT(!already_resolved()); // Add this scope as a new inner scope of the outer scope. if (outer_scope_ != NULL) { outer_scope_->inner_scopes_.Add(this); - scope_inside_with_ = outer_scope_->scope_inside_with_ || is_with_scope(); + scope_inside_with_ = outer_scope_->scope_inside_with_ || inside_with; } else { - scope_inside_with_ = is_with_scope(); + scope_inside_with_ = inside_with; } // Declare convenience variables. @@ -301,7 +300,13 @@ void Scope::Initialize() { // instead load them directly from the stack. Currently, the only // such parameter is 'this' which is passed on the stack when // invoking scripts - if (is_declaration_scope()) { + if (is_catch_scope() || is_block_scope()) { + ASSERT(outer_scope() != NULL); + receiver_ = outer_scope()->receiver(); + } else { + ASSERT(is_function_scope() || + is_global_scope() || + is_eval_scope()); Variable* var = variables_.Declare(this, isolate_->factory()->this_symbol(), @@ -310,9 +315,6 @@ void Scope::Initialize() { Variable::THIS); var->AllocateTo(Variable::PARAMETER, -1); receiver_ = var; - } else { - ASSERT(outer_scope() != NULL); - receiver_ = outer_scope()->receiver(); } if (is_function_scope()) { @@ -379,7 +381,7 @@ Variable* Scope::LocalLookup(Handle<String> name) { index = scope_info_->ParameterIndex(*name); if (index < 0) { // Check the function name. - index = scope_info_->FunctionContextSlotIndex(*name, NULL); + index = scope_info_->FunctionContextSlotIndex(*name); if (index < 0) return NULL; } } @@ -402,10 +404,10 @@ Variable* Scope::Lookup(Handle<String> name) { } -Variable* Scope::DeclareFunctionVar(Handle<String> name, VariableMode mode) { +Variable* Scope::DeclareFunctionVar(Handle<String> name) { ASSERT(is_function_scope() && function_ == NULL); Variable* function_var = - new Variable(this, name, mode, true, Variable::NORMAL); + new Variable(this, name, CONST, true, Variable::NORMAL); function_ = new(isolate_->zone()) VariableProxy(isolate_, function_var); return function_var; } @@ -425,10 +427,7 @@ Variable* Scope::DeclareLocal(Handle<String> name, VariableMode mode) { // This function handles VAR and CONST modes. DYNAMIC variables are // introduces during variable allocation, INTERNAL variables are allocated // explicitly, and TEMPORARY variables are allocated via NewTemporary(). - ASSERT(mode == VAR || - mode == CONST || - mode == CONST_HARMONY || - mode == LET); + ASSERT(mode == VAR || mode == CONST || mode == LET); ++num_var_or_const_; return variables_.Declare(this, name, mode, true, Variable::NORMAL); } @@ -442,13 +441,15 @@ Variable* Scope::DeclareGlobal(Handle<String> name) { } -VariableProxy* Scope::NewUnresolved(Handle<String> name, int position) { +VariableProxy* Scope::NewUnresolved(Handle<String> name, + bool inside_with, + int position) { // Note that we must not share the unresolved variables with // the same name because they may be removed selectively via // RemoveUnresolved(). ASSERT(!already_resolved()); VariableProxy* proxy = new(isolate_->zone()) VariableProxy( - isolate_, name, false, position); + isolate_, name, false, inside_with, position); unresolved_.Add(proxy); return proxy; } @@ -504,19 +505,17 @@ Declaration* Scope::CheckConflictingVarDeclarations() { Declaration* decl = decls_[i]; if (decl->mode() != VAR) continue; Handle<String> name = decl->proxy()->name(); - - // Iterate through all scopes until and including the declaration scope. - Scope* previous = NULL; - Scope* current = decl->scope(); - do { + bool cond = true; + for (Scope* scope = decl->scope(); cond ; scope = scope->outer_scope_) { // There is a conflict if there exists a non-VAR binding. - Variable* other_var = current->variables_.Lookup(name); + Variable* other_var = scope->variables_.Lookup(name); if (other_var != NULL && other_var->mode() != VAR) { return decl; } - previous = current; - current = current->outer_scope_; - } while (!previous->is_declaration_scope()); + + // Include declaration scope in the iteration but stop after. + if (!scope->is_block_scope() && !scope->is_catch_scope()) cond = false; + } } return NULL; } @@ -564,11 +563,16 @@ void Scope::AllocateVariables(Handle<Context> context) { // this information in the ScopeInfo and then use it here (by traversing // the call chain stack, at compile time). + bool eval_scope = is_eval_scope(); + bool outer_scope_calls_eval = false; bool outer_scope_calls_non_strict_eval = false; if (!is_global_scope()) { - context->ComputeEvalScopeInfo(&outer_scope_calls_non_strict_eval); + context->ComputeEvalScopeInfo(&outer_scope_calls_eval, + &outer_scope_calls_non_strict_eval); } - PropagateScopeInfo(outer_scope_calls_non_strict_eval); + PropagateScopeInfo(outer_scope_calls_eval, + outer_scope_calls_non_strict_eval, + eval_scope); // 2) Resolve variables. Scope* global_scope = NULL; @@ -621,7 +625,8 @@ int Scope::ContextChainLength(Scope* scope) { Scope* Scope::DeclarationScope() { Scope* scope = this; - while (!scope->is_declaration_scope()) { + while (scope->is_catch_scope() || + scope->is_block_scope()) { scope = scope->outer_scope(); } return scope; @@ -636,33 +641,14 @@ Handle<SerializedScopeInfo> Scope::GetSerializedScopeInfo() { } -void Scope::GetNestedScopeChain( - List<Handle<SerializedScopeInfo> >* chain, - int position) { - chain->Add(Handle<SerializedScopeInfo>(GetSerializedScopeInfo())); - - for (int i = 0; i < inner_scopes_.length(); i++) { - Scope* scope = inner_scopes_[i]; - int beg_pos = scope->start_position(); - int end_pos = scope->end_position(); - ASSERT(beg_pos >= 0 && end_pos >= 0); - if (beg_pos <= position && position <= end_pos) { - scope->GetNestedScopeChain(chain, position); - return; - } - } -} - - #ifdef DEBUG -static const char* Header(ScopeType type) { +static const char* Header(Scope::Type type) { switch (type) { - case EVAL_SCOPE: return "eval"; - case FUNCTION_SCOPE: return "function"; - case GLOBAL_SCOPE: return "global"; - case CATCH_SCOPE: return "catch"; - case BLOCK_SCOPE: return "block"; - case WITH_SCOPE: return "with"; + case Scope::EVAL_SCOPE: return "eval"; + case Scope::FUNCTION_SCOPE: return "function"; + case Scope::GLOBAL_SCOPE: return "global"; + case Scope::CATCH_SCOPE: return "catch"; + case Scope::BLOCK_SCOPE: return "block"; } UNREACHABLE(); return NULL; @@ -762,10 +748,14 @@ void Scope::Print(int n) { if (scope_inside_with_) Indent(n1, "// scope inside 'with'\n"); if (scope_contains_with_) Indent(n1, "// scope contains 'with'\n"); if (scope_calls_eval_) Indent(n1, "// scope calls 'eval'\n"); + if (outer_scope_calls_eval_) Indent(n1, "// outer scope calls 'eval'\n"); if (outer_scope_calls_non_strict_eval_) { Indent(n1, "// outer scope calls 'eval' in non-strict context\n"); } if (inner_scope_calls_eval_) Indent(n1, "// inner scope calls 'eval'\n"); + if (outer_scope_is_eval_scope_) { + Indent(n1, "// outer scope is 'eval' scope\n"); + } if (num_stack_slots_ > 0) { Indent(n1, "// "); PrintF("%d stack slots\n", num_stack_slots_); } if (num_heap_slots_ > 0) { Indent(n1, "// "); @@ -819,68 +809,74 @@ Variable* Scope::NonLocal(Handle<String> name, VariableMode mode) { } +// Lookup a variable starting with this scope. The result is either +// the statically resolved variable belonging to an outer scope, or +// NULL. It may be NULL because a) we couldn't find a variable, or b) +// because the variable is just a guess (and may be shadowed by +// another variable that is introduced dynamically via an 'eval' call +// or a 'with' statement). Variable* Scope::LookupRecursive(Handle<String> name, - Handle<Context> context, - BindingKind* binding_kind) { - ASSERT(binding_kind != NULL); + bool from_inner_scope, + Variable** invalidated_local) { + // If we find a variable, but the current scope calls 'eval', the found + // variable may not be the correct one (the 'eval' may introduce a + // property with the same name). In that case, remember that the variable + // found is just a guess. + bool guess = scope_calls_eval_; + // Try to find the variable in this scope. Variable* var = LocalLookup(name); - // We found a variable and we are done. (Even if there is an 'eval' in - // this scope which introduces the same variable again, the resulting - // variable remains the same.) if (var != NULL) { - *binding_kind = BOUND; - return var; + // We found a variable. If this is not an inner lookup, we are done. + // (Even if there is an 'eval' in this scope which introduces the + // same variable again, the resulting variable remains the same. + // Note that enclosing 'with' statements are handled at the call site.) + if (!from_inner_scope) + return var; + + } else { + // We did not find a variable locally. Check against the function variable, + // if any. We can do this for all scopes, since the function variable is + // only present - if at all - for function scopes. + // + // This lookup corresponds to a lookup in the "intermediate" scope sitting + // between this scope and the outer scope. (ECMA-262, 3rd., requires that + // the name of named function literal is kept in an intermediate scope + // in between this scope and the next outer scope.) + if (function_ != NULL && function_->name().is_identical_to(name)) { + var = function_->var(); + + } else if (outer_scope_ != NULL) { + var = outer_scope_->LookupRecursive(name, true, invalidated_local); + // We may have found a variable in an outer scope. However, if + // the current scope is inside a 'with', the actual variable may + // be a property introduced via the 'with' statement. Then, the + // variable we may have found is just a guess. + if (scope_inside_with_) + guess = true; + } + + // If we did not find a variable, we are done. + if (var == NULL) + return NULL; } - // We did not find a variable locally. Check against the function variable, - // if any. We can do this for all scopes, since the function variable is - // only present - if at all - for function scopes. - // - // This lookup corresponds to a lookup in the "intermediate" scope sitting - // between this scope and the outer scope. (ECMA-262, 3rd., requires that - // the name of named function literal is kept in an intermediate scope - // in between this scope and the next outer scope.) - *binding_kind = UNBOUND; - if (function_ != NULL && function_->name().is_identical_to(name)) { - var = function_->var(); - *binding_kind = BOUND; - } else if (outer_scope_ != NULL) { - var = outer_scope_->LookupRecursive(name, context, binding_kind); - if (*binding_kind == BOUND) var->MarkAsAccessedFromInnerScope(); + ASSERT(var != NULL); + + // If this is a lookup from an inner scope, mark the variable. + if (from_inner_scope) { + var->MarkAsAccessedFromInnerScope(); } - if (is_with_scope()) { - // The current scope is a with scope, so the variable binding can not be - // statically resolved. However, note that it was necessary to do a lookup - // in the outer scope anyway, because if a binding exists in an outer scope, - // the associated variable has to be marked as potentially being accessed - // from inside of an inner with scope (the property may not be in the 'with' - // object). - *binding_kind = DYNAMIC_LOOKUP; - return NULL; - } else if (is_eval_scope()) { - // No local binding was found, no 'with' statements have been encountered - // and the code is executed as part of a call to 'eval'. The calling context - // contains scope information that we can use to determine if the variable - // is global, i.e. the calling context chain does not contain a binding and - // no 'with' contexts. - ASSERT(*binding_kind == UNBOUND); - *binding_kind = context->GlobalIfNotShadowedByEval(name) - ? UNBOUND_EVAL_SHADOWED : DYNAMIC_LOOKUP; - return NULL; - } else if (calls_non_strict_eval()) { - // A variable binding may have been found in an outer scope, but the current - // scope makes a non-strict 'eval' call, so the found variable may not be - // the correct one (the 'eval' may introduce a binding with the same name). - // In that case, change the lookup result to reflect this situation. - if (*binding_kind == BOUND) { - *binding_kind = BOUND_EVAL_SHADOWED; - } else if (*binding_kind == UNBOUND) { - *binding_kind = UNBOUND_EVAL_SHADOWED; - } + // If the variable we have found is just a guess, invalidate the + // result. If the found variable is local, record that fact so we + // can generate fast code to get it if it is not shadowed by eval. + if (guess) { + if (!var->is_global()) *invalidated_local = var; + var = NULL; } + return var; } @@ -895,44 +891,71 @@ void Scope::ResolveVariable(Scope* global_scope, if (proxy->var() != NULL) return; // Otherwise, try to resolve the variable. - BindingKind binding_kind; - Variable* var = LookupRecursive(proxy->name(), context, &binding_kind); - switch (binding_kind) { - case BOUND: - // We found a variable binding. - break; + Variable* invalidated_local = NULL; + Variable* var = LookupRecursive(proxy->name(), false, &invalidated_local); + + if (proxy->inside_with()) { + // If we are inside a local 'with' statement, all bets are off + // and we cannot resolve the proxy to a local variable even if + // we found an outer matching variable. + // Note that we must do a lookup anyway, because if we find one, + // we must mark that variable as potentially accessed from this + // inner scope (the property may not be in the 'with' object). + var = NonLocal(proxy->name(), DYNAMIC); - case BOUND_EVAL_SHADOWED: - // We found a variable variable binding that might be shadowed - // by 'eval' introduced variable bindings. - if (var->is_global()) { - var = NonLocal(proxy->name(), DYNAMIC_GLOBAL); - } else { - Variable* invalidated = var; + } else { + // We are not inside a local 'with' statement. + + if (var == NULL) { + // We did not find the variable. We have a global variable + // if we are in the global scope (we know already that we + // are outside a 'with' statement) or if there is no way + // that the variable might be introduced dynamically (through + // a local or outer eval() call, or an outer 'with' statement), + // or we don't know about the outer scope (because we are + // in an eval scope). + if (is_global_scope() || + !(scope_inside_with_ || outer_scope_is_eval_scope_ || + scope_calls_eval_ || outer_scope_calls_eval_)) { + // We must have a global variable. + ASSERT(global_scope != NULL); + var = global_scope->DeclareGlobal(proxy->name()); + + } else if (scope_inside_with_) { + // If we are inside a with statement we give up and look up + // the variable at runtime. + var = NonLocal(proxy->name(), DYNAMIC); + + } else if (invalidated_local != NULL) { + // No with statements are involved and we found a local + // variable that might be shadowed by eval introduced + // variables. var = NonLocal(proxy->name(), DYNAMIC_LOCAL); - var->set_local_if_not_shadowed(invalidated); - } - break; - - case UNBOUND: - // No binding has been found. Declare a variable in global scope. - ASSERT(global_scope != NULL); - var = global_scope->DeclareGlobal(proxy->name()); - break; - - case UNBOUND_EVAL_SHADOWED: - // No binding has been found. But some scope makes a - // non-strict 'eval' call. - var = NonLocal(proxy->name(), DYNAMIC_GLOBAL); - break; + var->set_local_if_not_shadowed(invalidated_local); + + } else if (outer_scope_is_eval_scope_) { + // No with statements and we did not find a local and the code + // is executed with a call to eval. The context contains + // scope information that we can use to determine if the + // variable is global if it is not shadowed by eval-introduced + // variables. + if (context->GlobalIfNotShadowedByEval(proxy->name())) { + var = NonLocal(proxy->name(), DYNAMIC_GLOBAL); + + } else { + var = NonLocal(proxy->name(), DYNAMIC); + } - case DYNAMIC_LOOKUP: - // The variable could not be resolved statically. - var = NonLocal(proxy->name(), DYNAMIC); - break; + } else { + // No with statements and we did not find a local and the code + // is not executed with a call to eval. We know that this + // variable is global unless it is shadowed by eval-introduced + // variables. + var = NonLocal(proxy->name(), DYNAMIC_GLOBAL); + } + } } - ASSERT(var != NULL); proxy->BindTo(var); } @@ -953,17 +976,31 @@ void Scope::ResolveVariablesRecursively(Scope* global_scope, } -bool Scope::PropagateScopeInfo(bool outer_scope_calls_non_strict_eval ) { +bool Scope::PropagateScopeInfo(bool outer_scope_calls_eval, + bool outer_scope_calls_non_strict_eval, + bool outer_scope_is_eval_scope) { + if (outer_scope_calls_eval) { + outer_scope_calls_eval_ = true; + } + if (outer_scope_calls_non_strict_eval) { outer_scope_calls_non_strict_eval_ = true; } + if (outer_scope_is_eval_scope) { + outer_scope_is_eval_scope_ = true; + } + + bool calls_eval = scope_calls_eval_ || outer_scope_calls_eval_; + bool is_eval = is_eval_scope() || outer_scope_is_eval_scope_; bool calls_non_strict_eval = (scope_calls_eval_ && !is_strict_mode()) || outer_scope_calls_non_strict_eval_; for (int i = 0; i < inner_scopes_.length(); i++) { Scope* inner_scope = inner_scopes_[i]; - if (inner_scope->PropagateScopeInfo(calls_non_strict_eval)) { + if (inner_scope->PropagateScopeInfo(calls_eval, + calls_non_strict_eval, + is_eval)) { inner_scope_calls_eval_ = true; } if (inner_scope->force_eager_compilation_) { diff --git a/deps/v8/src/scopes.h b/deps/v8/src/scopes.h index a1418874e..7e789b8bd 100644 --- a/deps/v8/src/scopes.h +++ b/deps/v8/src/scopes.h @@ -89,7 +89,15 @@ class Scope: public ZoneObject { // --------------------------------------------------------------------------- // Construction - Scope(Scope* outer_scope, ScopeType type); + enum Type { + EVAL_SCOPE, // The top-level scope for an eval source. + FUNCTION_SCOPE, // The top-level scope for a function. + GLOBAL_SCOPE, // The top-level scope for a program or a top-level eval. + CATCH_SCOPE, // The scope introduced by catch. + BLOCK_SCOPE // The scope introduced by a new block. + }; + + Scope(Scope* outer_scope, Type type); // Compute top scope and allocate variables. For lazy compilation the top // scope only contains the single lazily compiled function, so this @@ -102,7 +110,7 @@ class Scope: public ZoneObject { // The scope name is only used for printing/debugging. void SetScopeName(Handle<String> scope_name) { scope_name_ = scope_name; } - void Initialize(); + void Initialize(bool inside_with); // Checks if the block scope is redundant, i.e. it does not contain any // block scoped declarations. In that case it is removed from the scope @@ -122,7 +130,7 @@ class Scope: public ZoneObject { // Declare the function variable for a function literal. This variable // is in an intermediate scope between this function scope and the the // outer scope. Only possible for function scopes; at most one variable. - Variable* DeclareFunctionVar(Handle<String> name, VariableMode mode); + Variable* DeclareFunctionVar(Handle<String> name); // Declare a parameter in this scope. When there are duplicated // parameters the rightmost one 'wins'. However, the implementation @@ -141,6 +149,7 @@ class Scope: public ZoneObject { // Create a new unresolved variable. VariableProxy* NewUnresolved(Handle<String> name, + bool inside_with, int position = RelocInfo::kNoPosition); // Remove a unresolved variable. During parsing, an unresolved variable @@ -190,42 +199,11 @@ class Scope: public ZoneObject { void RecordWithStatement() { scope_contains_with_ = true; } // Inform the scope that the corresponding code contains an eval call. - void RecordEvalCall() { if (!is_global_scope()) scope_calls_eval_ = true; } + void RecordEvalCall() { scope_calls_eval_ = true; } - // Set the strict mode flag (unless disabled by a global flag). - void SetStrictModeFlag(StrictModeFlag strict_mode_flag) { - strict_mode_flag_ = FLAG_strict_mode ? strict_mode_flag : kNonStrictMode; - } - - // Position in the source where this scope begins and ends. - // - // * For the scope of a with statement - // with (obj) stmt - // start position: start position of first token of 'stmt' - // end position: end position of last token of 'stmt' - // * For the scope of a block - // { stmts } - // start position: start position of '{' - // end position: end position of '}' - // * For the scope of a function literal or decalaration - // function fun(a,b) { stmts } - // start position: start position of '(' - // end position: end position of '}' - // * For the scope of a catch block - // try { stms } catch(e) { stmts } - // start position: start position of '(' - // end position: end position of ')' - // * For the scope of a for-statement - // for (let x ...) stmt - // start position: start position of '(' - // end position: end position of last token of 'stmt' - int start_position() const { return start_position_; } - void set_start_position(int statement_pos) { - start_position_ = statement_pos; - } - int end_position() const { return end_position_; } - void set_end_position(int statement_pos) { - end_position_ = statement_pos; + // Enable strict mode for the scope (unless disabled by a global flag). + void EnableStrictMode() { + strict_mode_ = FLAG_strict_mode; } // --------------------------------------------------------------------------- @@ -237,20 +215,14 @@ class Scope: public ZoneObject { bool is_global_scope() const { return type_ == GLOBAL_SCOPE; } bool is_catch_scope() const { return type_ == CATCH_SCOPE; } bool is_block_scope() const { return type_ == BLOCK_SCOPE; } - bool is_with_scope() const { return type_ == WITH_SCOPE; } - bool is_declaration_scope() const { - return is_eval_scope() || is_function_scope() || is_global_scope(); - } - bool is_strict_mode() const { return strict_mode_flag() == kStrictMode; } + bool is_strict_mode() const { return strict_mode_; } bool is_strict_mode_eval_scope() const { return is_eval_scope() && is_strict_mode(); } // Information about which scopes calls eval. bool calls_eval() const { return scope_calls_eval_; } - bool calls_non_strict_eval() { - return scope_calls_eval_ && !is_strict_mode(); - } + bool outer_scope_calls_eval() const { return outer_scope_calls_eval_; } bool outer_scope_calls_non_strict_eval() const { return outer_scope_calls_non_strict_eval_; } @@ -266,12 +238,6 @@ class Scope: public ZoneObject { // --------------------------------------------------------------------------- // Accessors. - // The type of this scope. - ScopeType type() const { return type_; } - - // The strict mode of this scope. - StrictModeFlag strict_mode_flag() const { return strict_mode_flag_; } - // The variable corresponding the 'this' value. Variable* receiver() { return receiver_; } @@ -298,8 +264,6 @@ class Scope: public ZoneObject { // Declarations list. ZoneList<Declaration*>* declarations() { return &decls_; } - // Inner scope list. - ZoneList<Scope*>* inner_scopes() { return &inner_scopes_; } // --------------------------------------------------------------------------- // Variable allocation. @@ -343,13 +307,6 @@ class Scope: public ZoneObject { Handle<SerializedScopeInfo> GetSerializedScopeInfo(); - // Get the chain of nested scopes within this scope for the source statement - // position. The scopes will be added to the list from the outermost scope to - // the innermost scope. Only nested block, catch or with scopes are tracked - // and will be returned, but no inner function scopes. - void GetNestedScopeChain(List<Handle<SerializedScopeInfo> >* chain, - int statement_position); - // --------------------------------------------------------------------------- // Strict mode support. bool IsDeclared(Handle<String> name) { @@ -373,7 +330,7 @@ class Scope: public ZoneObject { protected: friend class ParserFactory; - explicit Scope(ScopeType type); + explicit Scope(Type type); Isolate* const isolate_; @@ -382,7 +339,7 @@ class Scope: public ZoneObject { ZoneList<Scope*> inner_scopes_; // the immediately enclosed inner scopes // The scope type. - ScopeType type_; + Type type_; // Debugging support. Handle<String> scope_name_; @@ -423,14 +380,13 @@ class Scope: public ZoneObject { // the 'eval' call site this scope is the declaration scope. bool scope_calls_eval_; // This scope is a strict mode scope. - StrictModeFlag strict_mode_flag_; - // Source positions. - int start_position_; - int end_position_; + bool strict_mode_; // Computed via PropagateScopeInfo. + bool outer_scope_calls_eval_; bool outer_scope_calls_non_strict_eval_; bool inner_scope_calls_eval_; + bool outer_scope_is_eval_scope_; bool force_eager_compilation_; // True if it doesn't need scope resolution (e.g., if the scope was @@ -440,7 +396,7 @@ class Scope: public ZoneObject { // Computed as variables are declared. int num_var_or_const_; - // Computed via AllocateVariables; function, block and catch scopes only. + // Computed via AllocateVariables; function scopes only. int num_stack_slots_; int num_heap_slots_; @@ -453,57 +409,9 @@ class Scope: public ZoneObject { Variable* NonLocal(Handle<String> name, VariableMode mode); // Variable resolution. - // Possible results of a recursive variable lookup telling if and how a - // variable is bound. These are returned in the output parameter *binding_kind - // of the LookupRecursive function. - enum BindingKind { - // The variable reference could be statically resolved to a variable binding - // which is returned. There is no 'with' statement between the reference and - // the binding and no scope between the reference scope (inclusive) and - // binding scope (exclusive) makes a non-strict 'eval' call. - BOUND, - - // The variable reference could be statically resolved to a variable binding - // which is returned. There is no 'with' statement between the reference and - // the binding, but some scope between the reference scope (inclusive) and - // binding scope (exclusive) makes a non-strict 'eval' call, that might - // possibly introduce variable bindings shadowing the found one. Thus the - // found variable binding is just a guess. - BOUND_EVAL_SHADOWED, - - // The variable reference could not be statically resolved to any binding - // and thus should be considered referencing a global variable. NULL is - // returned. The variable reference is not inside any 'with' statement and - // no scope between the reference scope (inclusive) and global scope - // (exclusive) makes a non-strict 'eval' call. - UNBOUND, - - // The variable reference could not be statically resolved to any binding - // NULL is returned. The variable reference is not inside any 'with' - // statement, but some scope between the reference scope (inclusive) and - // global scope (exclusive) makes a non-strict 'eval' call, that might - // possibly introduce a variable binding. Thus the reference should be - // considered referencing a global variable unless it is shadowed by an - // 'eval' introduced binding. - UNBOUND_EVAL_SHADOWED, - - // The variable could not be statically resolved and needs to be looked up - // dynamically. NULL is returned. There are two possible reasons: - // * A 'with' statement has been encountered and there is no variable - // binding for the name between the variable reference and the 'with'. - // The variable potentially references a property of the 'with' object. - // * The code is being executed as part of a call to 'eval' and the calling - // context chain contains either a variable binding for the name or it - // contains a 'with' context. - DYNAMIC_LOOKUP - }; - - // Lookup a variable reference given by name recursively starting with this - // scope. If the code is executed because of a call to 'eval', the context - // parameter should be set to the calling context of 'eval'. Variable* LookupRecursive(Handle<String> name, - Handle<Context> context, - BindingKind* binding_kind); + bool from_inner_function, + Variable** invalidated_local); void ResolveVariable(Scope* global_scope, Handle<Context> context, VariableProxy* proxy); @@ -511,7 +419,9 @@ class Scope: public ZoneObject { Handle<Context> context); // Scope analysis. - bool PropagateScopeInfo(bool outer_scope_calls_non_strict_eval); + bool PropagateScopeInfo(bool outer_scope_calls_eval, + bool outer_scope_calls_non_strict_eval, + bool outer_scope_is_eval_scope); bool HasTrivialContext() const; // Predicates. @@ -528,10 +438,8 @@ class Scope: public ZoneObject { void AllocateVariablesRecursively(); private: - // Construct a scope based on the scope info. - Scope(Scope* inner_scope, - ScopeType type, - Handle<SerializedScopeInfo> scope_info); + // Construct a function or block scope based on the scope info. + Scope(Scope* inner_scope, Type type, Handle<SerializedScopeInfo> scope_info); // Construct a catch scope with a binding for the name. Scope(Scope* inner_scope, Handle<String> catch_variable_name); @@ -543,7 +451,7 @@ class Scope: public ZoneObject { } } - void SetDefaults(ScopeType type, + void SetDefaults(Type type, Scope* outer_scope, Handle<SerializedScopeInfo> scope_info); }; diff --git a/deps/v8/src/serialize.cc b/deps/v8/src/serialize.cc index ba7b2a518..84ab94a97 100644 --- a/deps/v8/src/serialize.cc +++ b/deps/v8/src/serialize.cc @@ -318,10 +318,10 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) { // Miscellaneous - Add(ExternalReference::roots_array_start(isolate).address(), + Add(ExternalReference::roots_address(isolate).address(), UNCLASSIFIED, 3, - "Heap::roots_array_start()"); + "Heap::roots_address()"); Add(ExternalReference::address_of_stack_limit(isolate).address(), UNCLASSIFIED, 4, @@ -490,10 +490,6 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) { UNCLASSIFIED, 44, "canonical_nan"); - Add(ExternalReference::address_of_the_hole_nan().address(), - UNCLASSIFIED, - 45, - "the_hole_nan"); } @@ -757,13 +753,8 @@ static const int kUnknownOffsetFromStart = -1; void Deserializer::ReadChunk(Object** current, Object** limit, int source_space, - Address current_object_address) { + Address address) { Isolate* const isolate = isolate_; - bool write_barrier_needed = (current_object_address != NULL && - source_space != NEW_SPACE && - source_space != CELL_SPACE && - source_space != CODE_SPACE && - source_space != OLD_DATA_SPACE); while (current < limit) { int data = source_->Get(); switch (data) { @@ -783,7 +774,9 @@ void Deserializer::ReadChunk(Object** current, if (where == kNewObject && how == kPlain && within == kStartOfObject) {\ ASSIGN_DEST_SPACE(space_number) \ ReadObject(space_number, dest_space, current); \ - emit_write_barrier = (space_number == NEW_SPACE); \ + emit_write_barrier = (space_number == NEW_SPACE && \ + source_space != NEW_SPACE && \ + source_space != CELL_SPACE); \ } else { \ Object* new_object = NULL; /* May not be a real Object pointer. */ \ if (where == kNewObject) { \ @@ -791,25 +784,27 @@ void Deserializer::ReadChunk(Object** current, ReadObject(space_number, dest_space, &new_object); \ } else if (where == kRootArray) { \ int root_id = source_->GetInt(); \ - new_object = isolate->heap()->roots_array_start()[root_id]; \ - emit_write_barrier = isolate->heap()->InNewSpace(new_object); \ + new_object = isolate->heap()->roots_address()[root_id]; \ } else if (where == kPartialSnapshotCache) { \ int cache_index = source_->GetInt(); \ new_object = isolate->serialize_partial_snapshot_cache() \ [cache_index]; \ - emit_write_barrier = isolate->heap()->InNewSpace(new_object); \ } else if (where == kExternalReference) { \ int reference_id = source_->GetInt(); \ Address address = external_reference_decoder_-> \ Decode(reference_id); \ new_object = reinterpret_cast<Object*>(address); \ } else if (where == kBackref) { \ - emit_write_barrier = (space_number == NEW_SPACE); \ + emit_write_barrier = (space_number == NEW_SPACE && \ + source_space != NEW_SPACE && \ + source_space != CELL_SPACE); \ new_object = GetAddressFromEnd(data & kSpaceMask); \ } else { \ ASSERT(where == kFromStart); \ if (offset_from_start == kUnknownOffsetFromStart) { \ - emit_write_barrier = (space_number == NEW_SPACE); \ + emit_write_barrier = (space_number == NEW_SPACE && \ + source_space != NEW_SPACE && \ + source_space != CELL_SPACE); \ new_object = GetAddressFromStart(data & kSpaceMask); \ } else { \ Address object_address = pages_[space_number][0] + \ @@ -836,14 +831,12 @@ void Deserializer::ReadChunk(Object** current, *current = new_object; \ } \ } \ - if (emit_write_barrier && write_barrier_needed) { \ - Address current_address = reinterpret_cast<Address>(current); \ - isolate->heap()->RecordWrite( \ - current_object_address, \ - static_cast<int>(current_address - current_object_address)); \ + if (emit_write_barrier) { \ + isolate->heap()->RecordWrite(address, static_cast<int>( \ + reinterpret_cast<Address>(current) - address)); \ } \ if (!current_was_incremented) { \ - current++; \ + current++; /* Increment current if it wasn't done above. */ \ } \ break; \ } \ @@ -890,17 +883,11 @@ void Deserializer::ReadChunk(Object** current, CASE_STATEMENT(where, how, within, kLargeCode) \ CASE_BODY(where, how, within, kLargeCode, kUnknownOffsetFromStart) -#define FOUR_CASES(byte_code) \ - case byte_code: \ - case byte_code + 1: \ - case byte_code + 2: \ - case byte_code + 3: - -#define SIXTEEN_CASES(byte_code) \ - FOUR_CASES(byte_code) \ - FOUR_CASES(byte_code + 4) \ - FOUR_CASES(byte_code + 8) \ - FOUR_CASES(byte_code + 12) +#define EMIT_COMMON_REFERENCE_PATTERNS(pseudo_space_number, \ + space_number, \ + offset_from_start) \ + CASE_STATEMENT(kFromStart, kPlain, kStartOfObject, pseudo_space_number) \ + CASE_BODY(kFromStart, kPlain, kStartOfObject, space_number, offset_from_start) // We generate 15 cases and bodies that process special tags that combine // the raw data tag and the length into one byte. @@ -924,38 +911,6 @@ void Deserializer::ReadChunk(Object** current, break; } - SIXTEEN_CASES(kRootArrayLowConstants) - SIXTEEN_CASES(kRootArrayHighConstants) { - int root_id = RootArrayConstantFromByteCode(data); - Object* object = isolate->heap()->roots_array_start()[root_id]; - ASSERT(!isolate->heap()->InNewSpace(object)); - *current++ = object; - break; - } - - case kRepeat: { - int repeats = source_->GetInt(); - Object* object = current[-1]; - ASSERT(!isolate->heap()->InNewSpace(object)); - for (int i = 0; i < repeats; i++) current[i] = object; - current += repeats; - break; - } - - STATIC_ASSERT(kRootArrayNumberOfConstantEncodings == - Heap::kOldSpaceRoots); - STATIC_ASSERT(kMaxRepeats == 12); - FOUR_CASES(kConstantRepeat) - FOUR_CASES(kConstantRepeat + 4) - FOUR_CASES(kConstantRepeat + 8) { - int repeats = RepeatsForCode(data); - Object* object = current[-1]; - ASSERT(!isolate->heap()->InNewSpace(object)); - for (int i = 0; i < repeats; i++) current[i] = object; - current += repeats; - break; - } - // Deserialize a new object and write a pointer to it to the current // object. ONE_PER_SPACE(kNewObject, kPlain, kStartOfObject) @@ -981,6 +936,9 @@ void Deserializer::ReadChunk(Object** current, // start and write a pointer to its first instruction to the current code // object. ALL_SPACES(kFromStart, kFromCode, kFirstInstruction) + // Find an already deserialized object at one of the predetermined popular + // offsets from the start and write a pointer to it in the current object. + COMMON_REFERENCE_PATTERNS(EMIT_COMMON_REFERENCE_PATTERNS) // Find an object in the roots array and write a pointer to it to the // current object. CASE_STATEMENT(kRootArray, kPlain, kStartOfObject, 0) @@ -1022,6 +980,7 @@ void Deserializer::ReadChunk(Object** current, #undef CASE_BODY #undef ONE_PER_SPACE #undef ALL_SPACES +#undef EMIT_COMMON_REFERENCE_PATTERNS #undef ASSIGN_DEST_SPACE case kNewPage: { @@ -1108,8 +1067,7 @@ Serializer::Serializer(SnapshotByteSink* sink) : sink_(sink), current_root_index_(0), external_reference_encoder_(new ExternalReferenceEncoder), - large_object_total_(0), - root_index_wave_front_(0) { + large_object_total_(0) { // The serializer is meant to be used only to generate initial heap images // from a context in which there is only one isolate. ASSERT(Isolate::Current()->IsDefaultIsolate()); @@ -1166,10 +1124,6 @@ void Serializer::VisitPointers(Object** start, Object** end) { Isolate* isolate = Isolate::Current(); for (Object** current = start; current < end; current++) { - if (start == isolate->heap()->roots_array_start()) { - root_index_wave_front_ = - Max(root_index_wave_front_, static_cast<intptr_t>(current - start)); - } if (reinterpret_cast<Address>(current) == isolate->heap()->store_buffer()->TopAddress()) { sink_->Put(kSkip, "Skip"); @@ -1237,12 +1191,10 @@ int PartialSerializer::PartialSnapshotCacheIndex(HeapObject* heap_object) { } -int Serializer::RootIndex(HeapObject* heap_object) { - Heap* heap = HEAP; - if (heap->InNewSpace(heap_object)) return kInvalidRootIndex; - for (int i = 0; i < root_index_wave_front_; i++) { - Object* root = heap->roots_array_start()[i]; - if (!root->IsSmi() && root == heap_object) return i; +int PartialSerializer::RootIndex(HeapObject* heap_object) { + for (int i = 0; i < Heap::kRootListLength; i++) { + Object* root = HEAP->roots_address()[i]; + if (root == heap_object) return i; } return kInvalidRootIndex; } @@ -1278,8 +1230,18 @@ void Serializer::SerializeReferenceToPreviousObject( // all objects) then we should shift out the bits that are always 0. if (!SpaceIsLarge(space)) address >>= kObjectAlignmentBits; if (from_start) { - sink_->Put(kFromStart + how_to_code + where_to_point + space, "RefSer"); - sink_->PutInt(address, "address"); +#define COMMON_REFS_CASE(pseudo_space, actual_space, offset) \ + if (space == actual_space && address == offset && \ + how_to_code == kPlain && where_to_point == kStartOfObject) { \ + sink_->Put(kFromStart + how_to_code + where_to_point + \ + pseudo_space, "RefSer"); \ + } else /* NOLINT */ + COMMON_REFERENCE_PATTERNS(COMMON_REFS_CASE) +#undef COMMON_REFS_CASE + { /* NOLINT */ + sink_->Put(kFromStart + how_to_code + where_to_point + space, "RefSer"); + sink_->PutInt(address, "address"); + } } else { sink_->Put(kBackref + how_to_code + where_to_point + space, "BackRefSer"); sink_->PutInt(address, "address"); @@ -1294,12 +1256,6 @@ void StartupSerializer::SerializeObject( CHECK(o->IsHeapObject()); HeapObject* heap_object = HeapObject::cast(o); - int root_index; - if ((root_index = RootIndex(heap_object)) != kInvalidRootIndex) { - PutRoot(root_index, heap_object, how_to_code, where_to_point); - return; - } - if (address_mapper_.IsMapped(heap_object)) { int space = SpaceOfAlreadySerializedObject(heap_object); int address = address_mapper_.MappedTo(heap_object); @@ -1330,28 +1286,6 @@ void StartupSerializer::SerializeWeakReferences() { } -void Serializer::PutRoot(int root_index, - HeapObject* object, - SerializerDeserializer::HowToCode how_to_code, - SerializerDeserializer::WhereToPoint where_to_point) { - if (how_to_code == kPlain && - where_to_point == kStartOfObject && - root_index < kRootArrayNumberOfConstantEncodings && - !HEAP->InNewSpace(object)) { - if (root_index < kRootArrayNumberOfLowConstantEncodings) { - sink_->Put(kRootArrayLowConstants + root_index, "RootLoConstant"); - } else { - sink_->Put(kRootArrayHighConstants + root_index - - kRootArrayNumberOfLowConstantEncodings, - "RootHiConstant"); - } - } else { - sink_->Put(kRootArray + how_to_code + where_to_point, "RootSerialization"); - sink_->PutInt(root_index, "root_index"); - } -} - - void PartialSerializer::SerializeObject( Object* o, HowToCode how_to_code, @@ -1361,7 +1295,8 @@ void PartialSerializer::SerializeObject( int root_index; if ((root_index = RootIndex(heap_object)) != kInvalidRootIndex) { - PutRoot(root_index, heap_object, how_to_code, where_to_point); + sink_->Put(kRootArray + how_to_code + where_to_point, "RootSerialization"); + sink_->PutInt(root_index, "root_index"); return; } @@ -1439,33 +1374,9 @@ void Serializer::ObjectSerializer::VisitPointers(Object** start, if (current < end) OutputRawData(reinterpret_cast<Address>(current)); while (current < end && !(*current)->IsSmi()) { - HeapObject* current_contents = HeapObject::cast(*current); - int root_index = serializer_->RootIndex(current_contents); - // Repeats are not subject to the write barrier so there are only some - // objects that can be used in a repeat encoding. These are the early - // ones in the root array that are never in new space. - if (current != start && - root_index != kInvalidRootIndex && - root_index < kRootArrayNumberOfConstantEncodings && - current_contents == current[-1]) { - ASSERT(!HEAP->InNewSpace(current_contents)); - int repeat_count = 1; - while (current < end - 1 && current[repeat_count] == current_contents) { - repeat_count++; - } - current += repeat_count; - bytes_processed_so_far_ += repeat_count * kPointerSize; - if (repeat_count > kMaxRepeats) { - sink_->Put(kRepeat, "SerializeRepeats"); - sink_->PutInt(repeat_count, "SerializeRepeats"); - } else { - sink_->Put(CodeForRepeats(repeat_count), "SerializeRepeats"); - } - } else { - serializer_->SerializeObject(current_contents, kPlain, kStartOfObject); - bytes_processed_so_far_ += kPointerSize; - current++; - } + serializer_->SerializeObject(*current, kPlain, kStartOfObject); + bytes_processed_so_far_ += kPointerSize; + current++; } } } diff --git a/deps/v8/src/serialize.h b/deps/v8/src/serialize.h index 49695ec96..c07092332 100644 --- a/deps/v8/src/serialize.h +++ b/deps/v8/src/serialize.h @@ -187,6 +187,24 @@ class SnapshotByteSource { }; +// It is very common to have a reference to objects at certain offsets in the +// heap. These offsets have been determined experimentally. We code +// references to such objects in a single byte that encodes the way the pointer +// is written (only plain pointers allowed), the space number and the offset. +// This only works for objects in the first page of a space. Don't use this for +// things in newspace since it bypasses the write barrier. + +static const int k64 = (sizeof(uintptr_t) - 4) / 4; + +#define COMMON_REFERENCE_PATTERNS(f) \ + f(kNumberOfSpaces, 2, (11 - k64)) \ + f((kNumberOfSpaces + 1), 2, 0) \ + f((kNumberOfSpaces + 2), 2, (142 - 16 * k64)) \ + f((kNumberOfSpaces + 3), 2, (74 - 15 * k64)) \ + f((kNumberOfSpaces + 4), 2, 5) \ + f((kNumberOfSpaces + 5), 1, 135) \ + f((kNumberOfSpaces + 6), 2, (228 - 39 * k64)) + #define COMMON_RAW_LENGTHS(f) \ f(1, 1) \ f(2, 2) \ @@ -224,7 +242,7 @@ class SerializerDeserializer: public ObjectVisitor { // 0xd-0xf Free. kBackref = 0x10, // Object is described relative to end. // 0x11-0x18 One per space. - // 0x19-0x1f Free. + // 0x19-0x1f Common backref offsets. kFromStart = 0x20, // Object is described relative to start. // 0x21-0x28 One per space. // 0x29-0x2f Free. @@ -261,29 +279,9 @@ class SerializerDeserializer: public ObjectVisitor { // is referred to from external strings in the snapshot. static const int kNativesStringResource = 0x71; static const int kNewPage = 0x72; - static const int kRepeat = 0x73; - static const int kConstantRepeat = 0x74; - // 0x74-0x7f Repeat last word (subtract 0x73 to get the count). - static const int kMaxRepeats = 0x7f - 0x73; - static int CodeForRepeats(int repeats) { - ASSERT(repeats >= 1 && repeats <= kMaxRepeats); - return 0x73 + repeats; - } - static int RepeatsForCode(int byte_code) { - ASSERT(byte_code >= kConstantRepeat && byte_code <= 0x7f); - return byte_code - 0x73; - } - static const int kRootArrayLowConstants = 0xb0; - // 0xb0-0xbf Things from the first 16 elements of the root array. - static const int kRootArrayHighConstants = 0xf0; - // 0xf0-0xff Things from the next 16 elements of the root array. - static const int kRootArrayNumberOfConstantEncodings = 0x20; - static const int kRootArrayNumberOfLowConstantEncodings = 0x10; - static int RootArrayConstantFromByteCode(int byte_code) { - int constant = (byte_code & 0xf) | ((byte_code & 0x40) >> 2); - ASSERT(constant >= 0 && constant < kRootArrayNumberOfConstantEncodings); - return constant; - } + // 0x73-0x7f Free. + // 0xb0-0xbf Free. + // 0xf0-0xff Free. static const int kLargeData = LAST_SPACE; @@ -356,13 +354,7 @@ class Deserializer: public SerializerDeserializer { UNREACHABLE(); } - // Fills in some heap data in an area from start to end (non-inclusive). The - // space id is used for the write barrier. The object_address is the address - // of the object we are writing into, or NULL if we are not writing into an - // object, ie if we are writing a series of tagged values that are not on the - // heap. - void ReadChunk( - Object** start, Object** end, int space, Address object_address); + void ReadChunk(Object** start, Object** end, int space, Address address); HeapObject* GetAddressFromStart(int space); inline HeapObject* GetAddressFromEnd(int space); Address Allocate(int space_number, Space* space, int size); @@ -483,22 +475,14 @@ class Serializer : public SerializerDeserializer { static void TooLateToEnableNow() { too_late_to_enable_now_ = true; } static bool enabled() { return serialization_enabled_; } SerializationAddressMapper* address_mapper() { return &address_mapper_; } - void PutRoot( - int index, HeapObject* object, HowToCode how, WhereToPoint where); #ifdef DEBUG virtual void Synchronize(const char* tag); #endif protected: static const int kInvalidRootIndex = -1; - - int RootIndex(HeapObject* heap_object); + virtual int RootIndex(HeapObject* heap_object) = 0; virtual bool ShouldBeInThePartialSnapshotCache(HeapObject* o) = 0; - intptr_t root_index_wave_front() { return root_index_wave_front_; } - void set_root_index_wave_front(intptr_t value) { - ASSERT(value >= root_index_wave_front_); - root_index_wave_front_ = value; - } class ObjectSerializer : public ObjectVisitor { public: @@ -574,7 +558,6 @@ class Serializer : public SerializerDeserializer { static bool too_late_to_enable_now_; int large_object_total_; SerializationAddressMapper address_mapper_; - intptr_t root_index_wave_front_; friend class ObjectSerializer; friend class Deserializer; @@ -589,7 +572,6 @@ class PartialSerializer : public Serializer { SnapshotByteSink* sink) : Serializer(sink), startup_serializer_(startup_snapshot_serializer) { - set_root_index_wave_front(Heap::kStrongRootListLength); } // Serialize the objects reachable from a single object pointer. @@ -599,6 +581,7 @@ class PartialSerializer : public Serializer { WhereToPoint where_to_point); protected: + virtual int RootIndex(HeapObject* o); virtual int PartialSnapshotCacheIndex(HeapObject* o); virtual bool ShouldBeInThePartialSnapshotCache(HeapObject* o) { // Scripts should be referred only through shared function infos. We can't @@ -623,7 +606,7 @@ class StartupSerializer : public Serializer { explicit StartupSerializer(SnapshotByteSink* sink) : Serializer(sink) { // Clear the cache of objects used by the partial snapshot. After the // strong roots have been serialized we can create a partial snapshot - // which will repopulate the cache with objects needed by that partial + // which will repopulate the cache with objects neede by that partial // snapshot. Isolate::Current()->set_serialize_partial_snapshot_cache_length(0); } @@ -642,6 +625,7 @@ class StartupSerializer : public Serializer { } private: + virtual int RootIndex(HeapObject* o) { return kInvalidRootIndex; } virtual bool ShouldBeInThePartialSnapshotCache(HeapObject* o) { return false; } diff --git a/deps/v8/src/spaces-inl.h b/deps/v8/src/spaces-inl.h index 1973b3a35..d9e6053ad 100644 --- a/deps/v8/src/spaces-inl.h +++ b/deps/v8/src/spaces-inl.h @@ -257,12 +257,16 @@ HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) { if (new_top > allocation_info_.limit) return NULL; allocation_info_.top = new_top; + ASSERT(allocation_info_.VerifyPagedAllocation()); + ASSERT(current_top != NULL); return HeapObject::FromAddress(current_top); } // Raw allocation. MaybeObject* PagedSpace::AllocateRaw(int size_in_bytes) { + ASSERT(HasBeenSetup()); + ASSERT_OBJECT_SIZE(size_in_bytes); HeapObject* object = AllocateLinearly(size_in_bytes); if (object != NULL) { if (identity() == CODE_SPACE) { diff --git a/deps/v8/src/spaces.cc b/deps/v8/src/spaces.cc index f467f710c..61b318118 100644 --- a/deps/v8/src/spaces.cc +++ b/deps/v8/src/spaces.cc @@ -95,6 +95,10 @@ void HeapObjectIterator::Initialize(PagedSpace* space, cur_end_ = end; page_mode_ = mode; size_func_ = size_f; + +#ifdef DEBUG + Verify(); +#endif } @@ -119,6 +123,13 @@ bool HeapObjectIterator::AdvanceToNextPage() { } +#ifdef DEBUG +void HeapObjectIterator::Verify() { + // TODO(gc): We should do something here. +} +#endif + + // ----------------------------------------------------------------------------- // CodeRange @@ -1898,24 +1909,11 @@ intptr_t FreeList::SumFreeLists() { bool NewSpace::ReserveSpace(int bytes) { // We can't reliably unpack a partial snapshot that needs more new space - // space than the minimum NewSpace size. The limit can be set lower than - // the end of new space either because there is more space on the next page - // or because we have lowered the limit in order to get periodic incremental - // marking. The most reliable way to ensure that there is linear space is - // to do the allocation, then rewind the limit. + // space than the minimum NewSpace size. ASSERT(bytes <= InitialCapacity()); - MaybeObject* maybe = AllocateRawInternal(bytes); - Object* object = NULL; - if (!maybe->ToObject(&object)) return false; - HeapObject* allocation = HeapObject::cast(object); + Address limit = allocation_info_.limit; Address top = allocation_info_.top; - if ((top - bytes) == allocation->address()) { - allocation_info_.top = allocation->address(); - return true; - } - // There may be a borderline case here where the allocation succeeded, but - // the limit and top have moved on to a new page. In that case we try again. - return ReserveSpace(bytes); + return limit - top >= bytes; } @@ -2280,11 +2278,8 @@ HeapObject* LargeObjectIterator::Next() { // ----------------------------------------------------------------------------- // LargeObjectSpace -LargeObjectSpace::LargeObjectSpace(Heap* heap, - intptr_t max_capacity, - AllocationSpace id) +LargeObjectSpace::LargeObjectSpace(Heap* heap, AllocationSpace id) : Space(heap, id, NOT_EXECUTABLE), // Managed on a per-allocation basis - max_capacity_(max_capacity), first_page_(NULL), size_(0), page_count_(0), @@ -2324,10 +2319,6 @@ MaybeObject* LargeObjectSpace::AllocateRaw(int object_size, return Failure::RetryAfterGC(identity()); } - if (Size() + object_size > max_capacity_) { - return Failure::RetryAfterGC(identity()); - } - LargePage* page = heap()->isolate()->memory_allocator()-> AllocateLargePage(object_size, executable, this); if (page == NULL) return Failure::RetryAfterGC(identity()); diff --git a/deps/v8/src/spaces.h b/deps/v8/src/spaces.h index 45e008c00..ce8e382aa 100644 --- a/deps/v8/src/spaces.h +++ b/deps/v8/src/spaces.h @@ -459,6 +459,7 @@ class MemoryChunk { live_byte_count_ = 0; } void IncrementLiveBytes(int by) { + ASSERT_LE(static_cast<unsigned>(live_byte_count_), size_); if (FLAG_gc_verbose) { printf("UpdateLiveBytes:%p:%x%c=%x->%x\n", static_cast<void*>(this), live_byte_count_, @@ -641,6 +642,7 @@ class Page : public MemoryChunk { // [page_addr + kObjectStartOffset .. page_addr + kPageSize]. INLINE(static Page* FromAllocationTop(Address top)) { Page* p = FromAddress(top - kPointerSize); + ASSERT_PAGE_OFFSET(p->Offset(top)); return p; } @@ -664,6 +666,7 @@ class Page : public MemoryChunk { // Returns the offset of a given address to this page. INLINE(int Offset(Address a)) { int offset = static_cast<int>(a - address()); + ASSERT_PAGE_OFFSET(offset); return offset; } @@ -1131,6 +1134,11 @@ class HeapObjectIterator: public ObjectIterator { Address end, PageMode mode, HeapObjectCallback size_func); + +#ifdef DEBUG + // Verifies whether fields have valid values. + void Verify(); +#endif }; @@ -1733,6 +1741,7 @@ class NewSpacePage : public MemoryChunk { reinterpret_cast<Address>(reinterpret_cast<uintptr_t>(address_in_page) & ~Page::kPageAlignmentMask); NewSpacePage* page = reinterpret_cast<NewSpacePage*>(page_start); + ASSERT(page->InNewSpace()); return page; } @@ -1809,6 +1818,7 @@ class SemiSpace : public Space { // Returns the start address of the current page of the space. Address page_low() { + ASSERT(anchor_.next_page() != &anchor_); return current_page_->body(); } @@ -2074,7 +2084,7 @@ class NewSpace : public Space { // Return the current capacity of a semispace. intptr_t EffectiveCapacity() { - SLOW_ASSERT(to_space_.Capacity() == from_space_.Capacity()); + ASSERT(to_space_.Capacity() == from_space_.Capacity()); return (to_space_.Capacity() / Page::kPageSize) * Page::kObjectAreaSize; } @@ -2090,9 +2100,10 @@ class NewSpace : public Space { return Capacity(); } - // Return the available bytes without growing. + // Return the available bytes without growing or switching page in the + // active semispace. intptr_t Available() { - return Capacity() - Size(); + return allocation_info_.limit - allocation_info_.top; } // Return the maximum capacity of a semispace. @@ -2306,9 +2317,9 @@ class OldSpace : public PagedSpace { // For contiguous spaces, top should be in the space (or at the end) and limit // should be the end of the space. #define ASSERT_SEMISPACE_ALLOCATION_INFO(info, space) \ - SLOW_ASSERT((space).page_low() <= (info).top \ - && (info).top <= (space).page_high() \ - && (info).limit <= (space).page_high()) + ASSERT((space).page_low() <= (info).top \ + && (info).top <= (space).page_high() \ + && (info).limit <= (space).page_high()) // ----------------------------------------------------------------------------- @@ -2436,7 +2447,7 @@ class CellSpace : public FixedSpace { class LargeObjectSpace : public Space { public: - LargeObjectSpace(Heap* heap, intptr_t max_capacity, AllocationSpace id); + LargeObjectSpace(Heap* heap, AllocationSpace id); virtual ~LargeObjectSpace() {} // Initializes internal data structures. @@ -2506,7 +2517,6 @@ class LargeObjectSpace : public Space { bool SlowContains(Address addr) { return !FindObject(addr)->IsFailure(); } private: - intptr_t max_capacity_; // The head of the linked list of large object chunks. LargePage* first_page_; intptr_t size_; // allocated bytes diff --git a/deps/v8/src/store-buffer-inl.h b/deps/v8/src/store-buffer-inl.h index dd65cbcc9..34f35a487 100644 --- a/deps/v8/src/store-buffer-inl.h +++ b/deps/v8/src/store-buffer-inl.h @@ -55,10 +55,10 @@ void StoreBuffer::Mark(Address addr) { void StoreBuffer::EnterDirectlyIntoStoreBuffer(Address addr) { if (store_buffer_rebuilding_enabled_) { - SLOW_ASSERT(!heap_->cell_space()->Contains(addr) && - !heap_->code_space()->Contains(addr) && - !heap_->old_data_space()->Contains(addr) && - !heap_->new_space()->Contains(addr)); + ASSERT(!heap_->cell_space()->Contains(addr)); + ASSERT(!heap_->code_space()->Contains(addr)); + ASSERT(!heap_->old_data_space()->Contains(addr)); + ASSERT(!heap_->new_space()->Contains(addr)); Address* top = old_top_; *top++ = addr; old_top_ = top; diff --git a/deps/v8/src/store-buffer.cc b/deps/v8/src/store-buffer.cc index 7c8b5f207..ab810e400 100644 --- a/deps/v8/src/store-buffer.cc +++ b/deps/v8/src/store-buffer.cc @@ -401,9 +401,7 @@ void StoreBuffer::Verify() { void StoreBuffer::GCEpilogue() { during_gc_ = false; - if (FLAG_verify_heap) { - Verify(); - } + Verify(); } diff --git a/deps/v8/src/stub-cache.cc b/deps/v8/src/stub-cache.cc index 139bc2dcf..67451f2b8 100644 --- a/deps/v8/src/stub-cache.cc +++ b/deps/v8/src/stub-cache.cc @@ -109,8 +109,8 @@ Code* StubCache::Set(String* name, Map* map, Code* code) { } -Handle<Code> StubCache::ComputeLoadNonexistent(Handle<String> name, - Handle<JSObject> receiver) { +MaybeObject* StubCache::ComputeLoadNonexistent(String* name, + JSObject* receiver) { ASSERT(receiver->IsGlobalObject() || receiver->HasFastProperties()); // If no global objects are present in the prototype chain, the load // nonexistent IC stub can be shared for all names for a given map @@ -118,328 +118,385 @@ Handle<Code> StubCache::ComputeLoadNonexistent(Handle<String> name, // there are global objects involved, we need to check global // property cells in the stub and therefore the stub will be // specific to the name. - Handle<String> cache_name = factory()->empty_string(); + String* cache_name = heap()->empty_string(); if (receiver->IsGlobalObject()) cache_name = name; - Handle<JSObject> last = receiver; + JSObject* last = receiver; while (last->GetPrototype() != heap()->null_value()) { - last = Handle<JSObject>(JSObject::cast(last->GetPrototype())); + last = JSObject::cast(last->GetPrototype()); if (last->IsGlobalObject()) cache_name = name; } // Compile the stub that is either shared for all names or // name specific if there are global objects involved. Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, NONEXISTENT); - Handle<Object> probe(receiver->map()->FindInCodeCache(*cache_name, flags)); - if (probe->IsCode()) return Handle<Code>::cast(probe); - - LoadStubCompiler compiler(isolate_); - Handle<Code> code = - compiler.CompileLoadNonexistent(cache_name, receiver, last); - PROFILE(isolate_, CodeCreateEvent(Logger::LOAD_IC_TAG, *code, *cache_name)); - GDBJIT(AddCode(GDBJITInterface::LOAD_IC, *cache_name, *code)); - JSObject::UpdateMapCodeCache(receiver, cache_name, code); + Object* code = receiver->map()->FindInCodeCache(cache_name, flags); + if (code->IsUndefined()) { + LoadStubCompiler compiler; + { MaybeObject* maybe_code = + compiler.CompileLoadNonexistent(cache_name, receiver, last); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + PROFILE(isolate_, + CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), cache_name)); + GDBJIT(AddCode(GDBJITInterface::LOAD_IC, cache_name, Code::cast(code))); + Object* result; + { MaybeObject* maybe_result = + receiver->UpdateMapCodeCache(cache_name, Code::cast(code)); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + } return code; } -Handle<Code> StubCache::ComputeLoadField(Handle<String> name, - Handle<JSObject> receiver, - Handle<JSObject> holder, +MaybeObject* StubCache::ComputeLoadField(String* name, + JSObject* receiver, + JSObject* holder, int field_index) { - ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP); + ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP); Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, FIELD); - Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags)); - if (probe->IsCode()) return Handle<Code>::cast(probe); - - LoadStubCompiler compiler(isolate_); - Handle<Code> code = - compiler.CompileLoadField(receiver, holder, field_index, name); - PROFILE(isolate_, CodeCreateEvent(Logger::LOAD_IC_TAG, *code, *name)); - GDBJIT(AddCode(GDBJITInterface::LOAD_IC, *name, *code)); - JSObject::UpdateMapCodeCache(receiver, name, code); + Object* code = receiver->map()->FindInCodeCache(name, flags); + if (code->IsUndefined()) { + LoadStubCompiler compiler; + { MaybeObject* maybe_code = + compiler.CompileLoadField(receiver, holder, field_index, name); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + PROFILE(isolate_, + CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name)); + GDBJIT(AddCode(GDBJITInterface::LOAD_IC, name, Code::cast(code))); + Object* result; + { MaybeObject* maybe_result = + receiver->UpdateMapCodeCache(name, Code::cast(code)); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + } return code; } -Handle<Code> LoadStubCompiler::CompileLoadCallback( - Handle<String> name, - Handle<JSObject> object, - Handle<JSObject> holder, - Handle<AccessorInfo> callback) { - CALL_HEAP_FUNCTION(isolate(), - (set_failure(NULL), - CompileLoadCallback(*name, *object, *holder, *callback)), - Code); -} - - -Handle<Code> StubCache::ComputeLoadCallback(Handle<String> name, - Handle<JSObject> receiver, - Handle<JSObject> holder, - Handle<AccessorInfo> callback) { +MaybeObject* StubCache::ComputeLoadCallback(String* name, + JSObject* receiver, + JSObject* holder, + AccessorInfo* callback) { ASSERT(v8::ToCData<Address>(callback->getter()) != 0); - ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP); + ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP); Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, CALLBACKS); - Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags)); - if (probe->IsCode()) return Handle<Code>::cast(probe); - - LoadStubCompiler compiler(isolate_); - Handle<Code> code = - compiler.CompileLoadCallback(name, receiver, holder, callback); - PROFILE(isolate_, CodeCreateEvent(Logger::LOAD_IC_TAG, *code, *name)); - GDBJIT(AddCode(GDBJITInterface::LOAD_IC, *name, *code)); - JSObject::UpdateMapCodeCache(receiver, name, code); + Object* code = receiver->map()->FindInCodeCache(name, flags); + if (code->IsUndefined()) { + LoadStubCompiler compiler; + { MaybeObject* maybe_code = + compiler.CompileLoadCallback(name, receiver, holder, callback); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + PROFILE(isolate_, + CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name)); + GDBJIT(AddCode(GDBJITInterface::LOAD_IC, name, Code::cast(code))); + Object* result; + { MaybeObject* maybe_result = + receiver->UpdateMapCodeCache(name, Code::cast(code)); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + } return code; } -Handle<Code> StubCache::ComputeLoadConstant(Handle<String> name, - Handle<JSObject> receiver, - Handle<JSObject> holder, - Handle<Object> value) { - ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP); +MaybeObject* StubCache::ComputeLoadConstant(String* name, + JSObject* receiver, + JSObject* holder, + Object* value) { + ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP); Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, CONSTANT_FUNCTION); - Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags)); - if (probe->IsCode()) return Handle<Code>::cast(probe); - - LoadStubCompiler compiler(isolate_); - Handle<Code> code = - compiler.CompileLoadConstant(receiver, holder, value, name); - PROFILE(isolate_, CodeCreateEvent(Logger::LOAD_IC_TAG, *code, *name)); - GDBJIT(AddCode(GDBJITInterface::LOAD_IC, *name, *code)); - JSObject::UpdateMapCodeCache(receiver, name, code); + Object* code = receiver->map()->FindInCodeCache(name, flags); + if (code->IsUndefined()) { + LoadStubCompiler compiler; + { MaybeObject* maybe_code = + compiler.CompileLoadConstant(receiver, holder, value, name); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + PROFILE(isolate_, + CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name)); + GDBJIT(AddCode(GDBJITInterface::LOAD_IC, name, Code::cast(code))); + Object* result; + { MaybeObject* maybe_result = + receiver->UpdateMapCodeCache(name, Code::cast(code)); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + } return code; } -Handle<Code> LoadStubCompiler::CompileLoadInterceptor(Handle<JSObject> object, - Handle<JSObject> holder, - Handle<String> name) { - CALL_HEAP_FUNCTION(isolate(), - (set_failure(NULL), - CompileLoadInterceptor(*object, *holder, *name)), - Code); -} - - -Handle<Code> StubCache::ComputeLoadInterceptor(Handle<String> name, - Handle<JSObject> receiver, - Handle<JSObject> holder) { - ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP); +MaybeObject* StubCache::ComputeLoadInterceptor(String* name, + JSObject* receiver, + JSObject* holder) { + ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP); Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, INTERCEPTOR); - Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags)); - if (probe->IsCode()) return Handle<Code>::cast(probe); - - LoadStubCompiler compiler(isolate_); - Handle<Code> code = - compiler.CompileLoadInterceptor(receiver, holder, name); - PROFILE(isolate_, CodeCreateEvent(Logger::LOAD_IC_TAG, *code, *name)); - GDBJIT(AddCode(GDBJITInterface::LOAD_IC, *name, *code)); - JSObject::UpdateMapCodeCache(receiver, name, code); + Object* code = receiver->map()->FindInCodeCache(name, flags); + if (code->IsUndefined()) { + LoadStubCompiler compiler; + { MaybeObject* maybe_code = + compiler.CompileLoadInterceptor(receiver, holder, name); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + PROFILE(isolate_, + CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name)); + GDBJIT(AddCode(GDBJITInterface::LOAD_IC, name, Code::cast(code))); + Object* result; + { MaybeObject* maybe_result = + receiver->UpdateMapCodeCache(name, Code::cast(code)); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + } return code; } -Handle<Code> StubCache::ComputeLoadNormal() { - return isolate_->builtins()->LoadIC_Normal(); +MaybeObject* StubCache::ComputeLoadNormal() { + return isolate_->builtins()->builtin(Builtins::kLoadIC_Normal); } -Handle<Code> StubCache::ComputeLoadGlobal(Handle<String> name, - Handle<JSObject> receiver, - Handle<GlobalObject> holder, - Handle<JSGlobalPropertyCell> cell, +MaybeObject* StubCache::ComputeLoadGlobal(String* name, + JSObject* receiver, + GlobalObject* holder, + JSGlobalPropertyCell* cell, bool is_dont_delete) { - ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP); + ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP); Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, NORMAL); - Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags)); - if (probe->IsCode()) return Handle<Code>::cast(probe); - - LoadStubCompiler compiler(isolate_); - Handle<Code> code = - compiler.CompileLoadGlobal(receiver, holder, cell, name, is_dont_delete); - PROFILE(isolate_, CodeCreateEvent(Logger::LOAD_IC_TAG, *code, *name)); - GDBJIT(AddCode(GDBJITInterface::LOAD_IC, *name, *code)); - JSObject::UpdateMapCodeCache(receiver, name, code); + Object* code = receiver->map()->FindInCodeCache(name, flags); + if (code->IsUndefined()) { + LoadStubCompiler compiler; + { MaybeObject* maybe_code = compiler.CompileLoadGlobal(receiver, + holder, + cell, + name, + is_dont_delete); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + PROFILE(isolate_, + CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name)); + GDBJIT(AddCode(GDBJITInterface::LOAD_IC, name, Code::cast(code))); + Object* result; + { MaybeObject* maybe_result = + receiver->UpdateMapCodeCache(name, Code::cast(code)); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + } return code; } -Handle<Code> StubCache::ComputeKeyedLoadField(Handle<String> name, - Handle<JSObject> receiver, - Handle<JSObject> holder, +MaybeObject* StubCache::ComputeKeyedLoadField(String* name, + JSObject* receiver, + JSObject* holder, int field_index) { - ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP); + ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP); Code::Flags flags = Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, FIELD); - Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags)); - if (probe->IsCode()) return Handle<Code>::cast(probe); - - KeyedLoadStubCompiler compiler(isolate_); - Handle<Code> code = - compiler.CompileLoadField(name, receiver, holder, field_index); - PROFILE(isolate_, CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, *code, *name)); - GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, *name, *code)); - JSObject::UpdateMapCodeCache(receiver, name, code); + Object* code = receiver->map()->FindInCodeCache(name, flags); + if (code->IsUndefined()) { + KeyedLoadStubCompiler compiler; + { MaybeObject* maybe_code = + compiler.CompileLoadField(name, receiver, holder, field_index); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + PROFILE(isolate_, + CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name)); + GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, name, Code::cast(code))); + Object* result; + { MaybeObject* maybe_result = + receiver->UpdateMapCodeCache(name, Code::cast(code)); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + } return code; } -Handle<Code> StubCache::ComputeKeyedLoadConstant(Handle<String> name, - Handle<JSObject> receiver, - Handle<JSObject> holder, - Handle<Object> value) { - ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP); +MaybeObject* StubCache::ComputeKeyedLoadConstant(String* name, + JSObject* receiver, + JSObject* holder, + Object* value) { + ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP); Code::Flags flags = Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, CONSTANT_FUNCTION); - Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags)); - if (probe->IsCode()) return Handle<Code>::cast(probe); - - KeyedLoadStubCompiler compiler(isolate_); - Handle<Code> code = - compiler.CompileLoadConstant(name, receiver, holder, value); - PROFILE(isolate_, CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, *code, *name)); - GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, *name, *code)); - JSObject::UpdateMapCodeCache(receiver, name, code); + Object* code = receiver->map()->FindInCodeCache(name, flags); + if (code->IsUndefined()) { + KeyedLoadStubCompiler compiler; + { MaybeObject* maybe_code = + compiler.CompileLoadConstant(name, receiver, holder, value); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + PROFILE(isolate_, + CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name)); + GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, name, Code::cast(code))); + Object* result; + { MaybeObject* maybe_result = + receiver->UpdateMapCodeCache(name, Code::cast(code)); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + } return code; } -Handle<Code> KeyedLoadStubCompiler::CompileLoadInterceptor( - Handle<JSObject> object, - Handle<JSObject> holder, - Handle<String> name) { - CALL_HEAP_FUNCTION(isolate(), - (set_failure(NULL), - CompileLoadInterceptor(*object, *holder, *name)), - Code); -} - - -Handle<Code> StubCache::ComputeKeyedLoadInterceptor(Handle<String> name, - Handle<JSObject> receiver, - Handle<JSObject> holder) { - ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP); +MaybeObject* StubCache::ComputeKeyedLoadInterceptor(String* name, + JSObject* receiver, + JSObject* holder) { + ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP); Code::Flags flags = Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, INTERCEPTOR); - Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags)); - if (probe->IsCode()) return Handle<Code>::cast(probe); - - KeyedLoadStubCompiler compiler(isolate_); - Handle<Code> code = compiler.CompileLoadInterceptor(receiver, holder, name); - PROFILE(isolate_, CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, *code, *name)); - GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, *name, *code)); - JSObject::UpdateMapCodeCache(receiver, name, code); + Object* code = receiver->map()->FindInCodeCache(name, flags); + if (code->IsUndefined()) { + KeyedLoadStubCompiler compiler; + { MaybeObject* maybe_code = + compiler.CompileLoadInterceptor(receiver, holder, name); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + PROFILE(isolate_, + CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name)); + GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, name, Code::cast(code))); + Object* result; + { MaybeObject* maybe_result = + receiver->UpdateMapCodeCache(name, Code::cast(code)); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + } return code; } -Handle<Code> KeyedLoadStubCompiler::CompileLoadCallback( - Handle<String> name, - Handle<JSObject> object, - Handle<JSObject> holder, - Handle<AccessorInfo> callback) { - CALL_HEAP_FUNCTION(isolate(), - (set_failure(NULL), - CompileLoadCallback(*name, *object, *holder, *callback)), - Code); -} - - -Handle<Code> StubCache::ComputeKeyedLoadCallback( - Handle<String> name, - Handle<JSObject> receiver, - Handle<JSObject> holder, - Handle<AccessorInfo> callback) { - ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP); +MaybeObject* StubCache::ComputeKeyedLoadCallback(String* name, + JSObject* receiver, + JSObject* holder, + AccessorInfo* callback) { + ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP); Code::Flags flags = Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, CALLBACKS); - Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags)); - if (probe->IsCode()) return Handle<Code>::cast(probe); - - KeyedLoadStubCompiler compiler(isolate_); - Handle<Code> code = - compiler.CompileLoadCallback(name, receiver, holder, callback); - PROFILE(isolate_, CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, *code, *name)); - GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, *name, *code)); - JSObject::UpdateMapCodeCache(receiver, name, code); + Object* code = receiver->map()->FindInCodeCache(name, flags); + if (code->IsUndefined()) { + KeyedLoadStubCompiler compiler; + { MaybeObject* maybe_code = + compiler.CompileLoadCallback(name, receiver, holder, callback); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + PROFILE(isolate_, + CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name)); + GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, name, Code::cast(code))); + Object* result; + { MaybeObject* maybe_result = + receiver->UpdateMapCodeCache(name, Code::cast(code)); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + } return code; } -Handle<Code> StubCache::ComputeKeyedLoadArrayLength(Handle<String> name, - Handle<JSArray> receiver) { + +MaybeObject* StubCache::ComputeKeyedLoadArrayLength(String* name, + JSArray* receiver) { Code::Flags flags = Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, CALLBACKS); - Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags)); - if (probe->IsCode()) return Handle<Code>::cast(probe); - - KeyedLoadStubCompiler compiler(isolate_); - Handle<Code> code = compiler.CompileLoadArrayLength(name); - PROFILE(isolate_, CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, *code, *name)); - GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, *name, *code)); - JSObject::UpdateMapCodeCache(receiver, name, code); + ASSERT(receiver->IsJSObject()); + Object* code = receiver->map()->FindInCodeCache(name, flags); + if (code->IsUndefined()) { + KeyedLoadStubCompiler compiler; + { MaybeObject* maybe_code = compiler.CompileLoadArrayLength(name); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + PROFILE(isolate_, + CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name)); + GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, name, Code::cast(code))); + Object* result; + { MaybeObject* maybe_result = + receiver->UpdateMapCodeCache(name, Code::cast(code)); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + } return code; } -Handle<Code> StubCache::ComputeKeyedLoadStringLength(Handle<String> name, - Handle<String> receiver) { +MaybeObject* StubCache::ComputeKeyedLoadStringLength(String* name, + String* receiver) { Code::Flags flags = Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, CALLBACKS); - Handle<Map> map(receiver->map()); - Handle<Object> probe(map->FindInCodeCache(*name, flags)); - if (probe->IsCode()) return Handle<Code>::cast(probe); - - KeyedLoadStubCompiler compiler(isolate_); - Handle<Code> code = compiler.CompileLoadStringLength(name); - PROFILE(isolate_, CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, *code, *name)); - GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, *name, *code)); - Map::UpdateCodeCache(map, name, code); + Map* map = receiver->map(); + Object* code = map->FindInCodeCache(name, flags); + if (code->IsUndefined()) { + KeyedLoadStubCompiler compiler; + { MaybeObject* maybe_code = compiler.CompileLoadStringLength(name); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + PROFILE(isolate_, + CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name)); + GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, name, Code::cast(code))); + Object* result; + { MaybeObject* maybe_result = map->UpdateCodeCache(name, Code::cast(code)); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + } return code; } -Handle<Code> StubCache::ComputeKeyedLoadFunctionPrototype( - Handle<String> name, - Handle<JSFunction> receiver) { +MaybeObject* StubCache::ComputeKeyedLoadFunctionPrototype( + String* name, + JSFunction* receiver) { Code::Flags flags = Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, CALLBACKS); - Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags)); - if (probe->IsCode()) return Handle<Code>::cast(probe); - - KeyedLoadStubCompiler compiler(isolate_); - Handle<Code> code = compiler.CompileLoadFunctionPrototype(name); - PROFILE(isolate_, CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, *code, *name)); - GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, *name, *code)); - JSObject::UpdateMapCodeCache(receiver, name, code); + Object* code = receiver->map()->FindInCodeCache(name, flags); + if (code->IsUndefined()) { + KeyedLoadStubCompiler compiler; + { MaybeObject* maybe_code = compiler.CompileLoadFunctionPrototype(name); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + PROFILE(isolate_, + CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name)); + GDBJIT(AddCode(GDBJITInterface::KEYED_LOAD_IC, name, Code::cast(code))); + Object* result; + { MaybeObject* maybe_result = + receiver->UpdateMapCodeCache(name, Code::cast(code)); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + } return code; } -Handle<Code> StubCache::ComputeStoreField(Handle<String> name, - Handle<JSObject> receiver, +MaybeObject* StubCache::ComputeStoreField(String* name, + JSObject* receiver, int field_index, - Handle<Map> transition, + Map* transition, StrictModeFlag strict_mode) { - PropertyType type = (transition.is_null()) ? FIELD : MAP_TRANSITION; + PropertyType type = (transition == NULL) ? FIELD : MAP_TRANSITION; Code::Flags flags = Code::ComputeMonomorphicFlags( Code::STORE_IC, type, strict_mode); - Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags)); - if (probe->IsCode()) return Handle<Code>::cast(probe); - - StoreStubCompiler compiler(isolate_, strict_mode); - Handle<Code> code = - compiler.CompileStoreField(receiver, field_index, transition, name); - PROFILE(isolate_, CodeCreateEvent(Logger::STORE_IC_TAG, *code, *name)); - GDBJIT(AddCode(GDBJITInterface::STORE_IC, *name, *code)); - JSObject::UpdateMapCodeCache(receiver, name, code); + Object* code = receiver->map()->FindInCodeCache(name, flags); + if (code->IsUndefined()) { + StoreStubCompiler compiler(strict_mode); + { MaybeObject* maybe_code = + compiler.CompileStoreField(receiver, field_index, transition, name); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + PROFILE(isolate_, + CodeCreateEvent(Logger::STORE_IC_TAG, Code::cast(code), name)); + GDBJIT(AddCode(GDBJITInterface::STORE_IC, name, Code::cast(code))); + Object* result; + { MaybeObject* maybe_result = + receiver->UpdateMapCodeCache(name, Code::cast(code)); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + } return code; } -Handle<Code> StubCache::ComputeKeyedLoadOrStoreElement( - Handle<JSObject> receiver, +MaybeObject* StubCache::ComputeKeyedLoadOrStoreElement( + JSObject* receiver, KeyedIC::StubKind stub_kind, StrictModeFlag strict_mode) { Code::Flags flags = @@ -448,159 +505,189 @@ Handle<Code> StubCache::ComputeKeyedLoadOrStoreElement( : Code::KEYED_STORE_IC, NORMAL, strict_mode); - Handle<String> name; + String* name = NULL; switch (stub_kind) { case KeyedIC::LOAD: - name = isolate()->factory()->KeyedLoadElementMonomorphic_symbol(); + name = isolate()->heap()->KeyedLoadElementMonomorphic_symbol(); break; case KeyedIC::STORE_NO_TRANSITION: - name = isolate()->factory()->KeyedStoreElementMonomorphic_symbol(); + name = isolate()->heap()->KeyedStoreElementMonomorphic_symbol(); break; default: UNREACHABLE(); break; } - Handle<Map> receiver_map(receiver->map()); - Handle<Object> probe(receiver_map->FindInCodeCache(*name, flags)); - if (probe->IsCode()) return Handle<Code>::cast(probe); + Object* maybe_code = receiver->map()->FindInCodeCache(name, flags); + if (!maybe_code->IsUndefined()) return Code::cast(maybe_code); - Handle<Code> code; + Map* receiver_map = receiver->map(); + MaybeObject* maybe_new_code = NULL; switch (stub_kind) { case KeyedIC::LOAD: { - KeyedLoadStubCompiler compiler(isolate_); - code = compiler.CompileLoadElement(receiver_map); + KeyedLoadStubCompiler compiler; + maybe_new_code = compiler.CompileLoadElement(receiver_map); break; } case KeyedIC::STORE_NO_TRANSITION: { - KeyedStoreStubCompiler compiler(isolate_, strict_mode); - code = compiler.CompileStoreElement(receiver_map); + KeyedStoreStubCompiler compiler(strict_mode); + maybe_new_code = compiler.CompileStoreElement(receiver_map); break; } default: UNREACHABLE(); break; } - - ASSERT(!code.is_null()); + Code* code = NULL; + if (!maybe_new_code->To(&code)) return maybe_new_code; if (stub_kind == KeyedIC::LOAD) { - PROFILE(isolate_, CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, *code, 0)); + PROFILE(isolate_, + CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, + Code::cast(code), 0)); } else { - PROFILE(isolate_, CodeCreateEvent(Logger::KEYED_STORE_IC_TAG, *code, 0)); + PROFILE(isolate_, + CodeCreateEvent(Logger::KEYED_STORE_IC_TAG, + Code::cast(code), 0)); + } + ASSERT(code->IsCode()); + Object* result; + { MaybeObject* maybe_result = + receiver->UpdateMapCodeCache(name, Code::cast(code)); + if (!maybe_result->ToObject(&result)) return maybe_result; } - JSObject::UpdateMapCodeCache(receiver, name, code); return code; } -Handle<Code> StubCache::ComputeStoreNormal(StrictModeFlag strict_mode) { - return (strict_mode == kStrictMode) - ? isolate_->builtins()->Builtins::StoreIC_Normal_Strict() - : isolate_->builtins()->Builtins::StoreIC_Normal(); +MaybeObject* StubCache::ComputeStoreNormal(StrictModeFlag strict_mode) { + return isolate_->builtins()->builtin((strict_mode == kStrictMode) + ? Builtins::kStoreIC_Normal_Strict + : Builtins::kStoreIC_Normal); } -Handle<Code> StubCache::ComputeStoreGlobal(Handle<String> name, - Handle<GlobalObject> receiver, - Handle<JSGlobalPropertyCell> cell, +MaybeObject* StubCache::ComputeStoreGlobal(String* name, + GlobalObject* receiver, + JSGlobalPropertyCell* cell, StrictModeFlag strict_mode) { Code::Flags flags = Code::ComputeMonomorphicFlags( Code::STORE_IC, NORMAL, strict_mode); - Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags)); - if (probe->IsCode()) return Handle<Code>::cast(probe); - - StoreStubCompiler compiler(isolate_, strict_mode); - Handle<Code> code = compiler.CompileStoreGlobal(receiver, cell, name); - PROFILE(isolate_, CodeCreateEvent(Logger::STORE_IC_TAG, *code, *name)); - GDBJIT(AddCode(GDBJITInterface::STORE_IC, *name, *code)); - JSObject::UpdateMapCodeCache(receiver, name, code); + Object* code = receiver->map()->FindInCodeCache(name, flags); + if (code->IsUndefined()) { + StoreStubCompiler compiler(strict_mode); + { MaybeObject* maybe_code = + compiler.CompileStoreGlobal(receiver, cell, name); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + PROFILE(isolate_, + CodeCreateEvent(Logger::STORE_IC_TAG, Code::cast(code), name)); + GDBJIT(AddCode(GDBJITInterface::STORE_IC, name, Code::cast(code))); + Object* result; + { MaybeObject* maybe_result = + receiver->UpdateMapCodeCache(name, Code::cast(code)); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + } return code; } -Handle<Code> StubCache::ComputeStoreCallback(Handle<String> name, - Handle<JSObject> receiver, - Handle<AccessorInfo> callback, - StrictModeFlag strict_mode) { +MaybeObject* StubCache::ComputeStoreCallback( + String* name, + JSObject* receiver, + AccessorInfo* callback, + StrictModeFlag strict_mode) { ASSERT(v8::ToCData<Address>(callback->setter()) != 0); Code::Flags flags = Code::ComputeMonomorphicFlags( Code::STORE_IC, CALLBACKS, strict_mode); - Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags)); - if (probe->IsCode()) return Handle<Code>::cast(probe); - - StoreStubCompiler compiler(isolate_, strict_mode); - Handle<Code> code = compiler.CompileStoreCallback(receiver, callback, name); - PROFILE(isolate_, CodeCreateEvent(Logger::STORE_IC_TAG, *code, *name)); - GDBJIT(AddCode(GDBJITInterface::STORE_IC, *name, *code)); - JSObject::UpdateMapCodeCache(receiver, name, code); + Object* code = receiver->map()->FindInCodeCache(name, flags); + if (code->IsUndefined()) { + StoreStubCompiler compiler(strict_mode); + { MaybeObject* maybe_code = + compiler.CompileStoreCallback(receiver, callback, name); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + PROFILE(isolate_, + CodeCreateEvent(Logger::STORE_IC_TAG, Code::cast(code), name)); + GDBJIT(AddCode(GDBJITInterface::STORE_IC, name, Code::cast(code))); + Object* result; + { MaybeObject* maybe_result = + receiver->UpdateMapCodeCache(name, Code::cast(code)); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + } return code; } -Handle<Code> StubCache::ComputeStoreInterceptor(Handle<String> name, - Handle<JSObject> receiver, - StrictModeFlag strict_mode) { +MaybeObject* StubCache::ComputeStoreInterceptor( + String* name, + JSObject* receiver, + StrictModeFlag strict_mode) { Code::Flags flags = Code::ComputeMonomorphicFlags( Code::STORE_IC, INTERCEPTOR, strict_mode); - Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags)); - if (probe->IsCode()) return Handle<Code>::cast(probe); - - StoreStubCompiler compiler(isolate_, strict_mode); - Handle<Code> code = compiler.CompileStoreInterceptor(receiver, name); - PROFILE(isolate_, CodeCreateEvent(Logger::STORE_IC_TAG, *code, *name)); - GDBJIT(AddCode(GDBJITInterface::STORE_IC, *name, *code)); - JSObject::UpdateMapCodeCache(receiver, name, code); + Object* code = receiver->map()->FindInCodeCache(name, flags); + if (code->IsUndefined()) { + StoreStubCompiler compiler(strict_mode); + { MaybeObject* maybe_code = + compiler.CompileStoreInterceptor(receiver, name); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + PROFILE(isolate_, + CodeCreateEvent(Logger::STORE_IC_TAG, Code::cast(code), name)); + GDBJIT(AddCode(GDBJITInterface::STORE_IC, name, Code::cast(code))); + Object* result; + { MaybeObject* maybe_result = + receiver->UpdateMapCodeCache(name, Code::cast(code)); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + } return code; } -Handle<Code> StubCache::ComputeKeyedStoreField(Handle<String> name, - Handle<JSObject> receiver, + +MaybeObject* StubCache::ComputeKeyedStoreField(String* name, + JSObject* receiver, int field_index, - Handle<Map> transition, + Map* transition, StrictModeFlag strict_mode) { - PropertyType type = (transition.is_null()) ? FIELD : MAP_TRANSITION; + PropertyType type = (transition == NULL) ? FIELD : MAP_TRANSITION; Code::Flags flags = Code::ComputeMonomorphicFlags( Code::KEYED_STORE_IC, type, strict_mode); - Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags)); - if (probe->IsCode()) return Handle<Code>::cast(probe); - - KeyedStoreStubCompiler compiler(isolate(), strict_mode); - Handle<Code> code = - compiler.CompileStoreField(receiver, field_index, transition, name); - PROFILE(isolate_, CodeCreateEvent(Logger::KEYED_STORE_IC_TAG, *code, *name)); - GDBJIT(AddCode(GDBJITInterface::KEYED_STORE_IC, *name, *code)); - JSObject::UpdateMapCodeCache(receiver, name, code); + Object* code = receiver->map()->FindInCodeCache(name, flags); + if (code->IsUndefined()) { + KeyedStoreStubCompiler compiler(strict_mode); + { MaybeObject* maybe_code = + compiler.CompileStoreField(receiver, field_index, transition, name); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + PROFILE(isolate(), + CodeCreateEvent(Logger::KEYED_STORE_IC_TAG, + Code::cast(code), name)); + GDBJIT(AddCode(GDBJITInterface::KEYED_STORE_IC, name, Code::cast(code))); + Object* result; + { MaybeObject* maybe_result = + receiver->UpdateMapCodeCache(name, Code::cast(code)); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + } return code; } - #define CALL_LOGGER_TAG(kind, type) \ (kind == Code::CALL_IC ? Logger::type : Logger::KEYED_##type) -Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object, - Handle<JSObject> holder, - Handle<JSFunction> function, - Handle<String> name, - CheckType check) { - CALL_HEAP_FUNCTION( - isolate(), - (set_failure(NULL), - CompileCallConstant(*object, *holder, *function, *name, check)), - Code); -} - - -Handle<Code> StubCache::ComputeCallConstant(int argc, +MaybeObject* StubCache::ComputeCallConstant(int argc, Code::Kind kind, - Code::ExtraICState extra_state, - Handle<String> name, - Handle<Object> object, - Handle<JSObject> holder, - Handle<JSFunction> function) { + Code::ExtraICState extra_ic_state, + String* name, + Object* object, + JSObject* holder, + JSFunction* function) { // Compute the check type and the map. InlineCacheHolderFlag cache_holder = - IC::GetCodeCacheForObject(*object, *holder); - Handle<JSObject> map_holder(IC::GetCodeCacheHolder(*object, cache_holder)); + IC::GetCodeCacheForObject(object, holder); + JSObject* map_holder = IC::GetCodeCacheHolder(object, cache_holder); // Compute check type based on receiver/holder. CheckType check = RECEIVER_MAP_CHECK; @@ -612,36 +699,51 @@ Handle<Code> StubCache::ComputeCallConstant(int argc, check = BOOLEAN_CHECK; } - Code::Flags flags = - Code::ComputeMonomorphicFlags(kind, CONSTANT_FUNCTION, extra_state, - cache_holder, argc); - Handle<Object> probe(map_holder->map()->FindInCodeCache(*name, flags)); - if (probe->IsCode()) return Handle<Code>::cast(probe); - - CallStubCompiler compiler(isolate_, argc, kind, extra_state, cache_holder); - Handle<Code> code = - compiler.CompileCallConstant(object, holder, function, name, check); - code->set_check_type(check); - ASSERT_EQ(flags, code->flags()); - PROFILE(isolate_, - CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG), *code, *name)); - GDBJIT(AddCode(GDBJITInterface::CALL_IC, *name, *code)); - JSObject::UpdateMapCodeCache(map_holder, name, code); + Code::Flags flags = Code::ComputeMonomorphicFlags(kind, + CONSTANT_FUNCTION, + extra_ic_state, + cache_holder, + argc); + Object* code = map_holder->map()->FindInCodeCache(name, flags); + if (code->IsUndefined()) { + // If the function hasn't been compiled yet, we cannot do it now + // because it may cause GC. To avoid this issue, we return an + // internal error which will make sure we do not update any + // caches. + if (!function->is_compiled()) return Failure::InternalError(); + // Compile the stub - only create stubs for fully compiled functions. + CallStubCompiler compiler(argc, kind, extra_ic_state, cache_holder); + { MaybeObject* maybe_code = + compiler.CompileCallConstant(object, holder, function, name, check); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + Code::cast(code)->set_check_type(check); + ASSERT_EQ(flags, Code::cast(code)->flags()); + PROFILE(isolate_, + CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG), + Code::cast(code), name)); + GDBJIT(AddCode(GDBJITInterface::CALL_IC, name, Code::cast(code))); + Object* result; + { MaybeObject* maybe_result = + map_holder->UpdateMapCodeCache(name, Code::cast(code)); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + } return code; } -Handle<Code> StubCache::ComputeCallField(int argc, +MaybeObject* StubCache::ComputeCallField(int argc, Code::Kind kind, - Code::ExtraICState extra_state, - Handle<String> name, - Handle<Object> object, - Handle<JSObject> holder, + Code::ExtraICState extra_ic_state, + String* name, + Object* object, + JSObject* holder, int index) { // Compute the check type and the map. InlineCacheHolderFlag cache_holder = - IC::GetCodeCacheForObject(*object, *holder); - Handle<JSObject> map_holder(IC::GetCodeCacheHolder(*object, cache_holder)); + IC::GetCodeCacheForObject(object, holder); + JSObject* map_holder = IC::GetCodeCacheHolder(object, cache_holder); // TODO(1233596): We cannot do receiver map check for non-JS objects // because they may be represented as immediates without a @@ -650,45 +752,47 @@ Handle<Code> StubCache::ComputeCallField(int argc, object = holder; } - Code::Flags flags = - Code::ComputeMonomorphicFlags(kind, FIELD, extra_state, - cache_holder, argc); - Handle<Object> probe(map_holder->map()->FindInCodeCache(*name, flags)); - if (probe->IsCode()) return Handle<Code>::cast(probe); - - CallStubCompiler compiler(isolate_, argc, kind, extra_state, cache_holder); - Handle<Code> code = - compiler.CompileCallField(Handle<JSObject>::cast(object), - holder, index, name); - ASSERT_EQ(flags, code->flags()); - PROFILE(isolate_, - CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG), *code, *name)); - GDBJIT(AddCode(GDBJITInterface::CALL_IC, *name, *code)); - JSObject::UpdateMapCodeCache(map_holder, name, code); + Code::Flags flags = Code::ComputeMonomorphicFlags(kind, + FIELD, + extra_ic_state, + cache_holder, + argc); + Object* code = map_holder->map()->FindInCodeCache(name, flags); + if (code->IsUndefined()) { + CallStubCompiler compiler(argc, kind, extra_ic_state, cache_holder); + { MaybeObject* maybe_code = + compiler.CompileCallField(JSObject::cast(object), + holder, + index, + name); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + ASSERT_EQ(flags, Code::cast(code)->flags()); + PROFILE(isolate_, + CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG), + Code::cast(code), name)); + GDBJIT(AddCode(GDBJITInterface::CALL_IC, name, Code::cast(code))); + Object* result; + { MaybeObject* maybe_result = + map_holder->UpdateMapCodeCache(name, Code::cast(code)); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + } return code; } -Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object, - Handle<JSObject> holder, - Handle<String> name) { - CALL_HEAP_FUNCTION( - isolate(), - (set_failure(NULL), CompileCallInterceptor(*object, *holder, *name)), - Code); -} - - -Handle<Code> StubCache::ComputeCallInterceptor(int argc, - Code::Kind kind, - Code::ExtraICState extra_state, - Handle<String> name, - Handle<Object> object, - Handle<JSObject> holder) { +MaybeObject* StubCache::ComputeCallInterceptor( + int argc, + Code::Kind kind, + Code::ExtraICState extra_ic_state, + String* name, + Object* object, + JSObject* holder) { // Compute the check type and the map. InlineCacheHolderFlag cache_holder = - IC::GetCodeCacheForObject(*object, *holder); - Handle<JSObject> map_holder(IC::GetCodeCacheHolder(*object, cache_holder)); + IC::GetCodeCacheForObject(object, holder); + JSObject* map_holder = IC::GetCodeCacheHolder(object, cache_holder); // TODO(1233596): We cannot do receiver map check for non-JS objects // because they may be represented as immediates without a @@ -697,75 +801,134 @@ Handle<Code> StubCache::ComputeCallInterceptor(int argc, object = holder; } - Code::Flags flags = - Code::ComputeMonomorphicFlags(kind, INTERCEPTOR, extra_state, - cache_holder, argc); - Handle<Object> probe(map_holder->map()->FindInCodeCache(*name, flags)); - if (probe->IsCode()) return Handle<Code>::cast(probe); - - CallStubCompiler compiler(isolate(), argc, kind, extra_state, cache_holder); - Handle<Code> code = - compiler.CompileCallInterceptor(Handle<JSObject>::cast(object), - holder, name); - ASSERT_EQ(flags, code->flags()); - PROFILE(isolate(), - CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG), *code, *name)); - GDBJIT(AddCode(GDBJITInterface::CALL_IC, *name, *code)); - JSObject::UpdateMapCodeCache(map_holder, name, code); + Code::Flags flags = Code::ComputeMonomorphicFlags(kind, + INTERCEPTOR, + extra_ic_state, + cache_holder, + argc); + Object* code = map_holder->map()->FindInCodeCache(name, flags); + if (code->IsUndefined()) { + CallStubCompiler compiler(argc, kind, extra_ic_state, cache_holder); + { MaybeObject* maybe_code = + compiler.CompileCallInterceptor(JSObject::cast(object), holder, name); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + ASSERT_EQ(flags, Code::cast(code)->flags()); + PROFILE(isolate(), + CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG), + Code::cast(code), name)); + GDBJIT(AddCode(GDBJITInterface::CALL_IC, name, Code::cast(code))); + Object* result; + { MaybeObject* maybe_result = + map_holder->UpdateMapCodeCache(name, Code::cast(code)); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + } return code; } -Handle<Code> CallStubCompiler::CompileCallGlobal( - Handle<JSObject> object, - Handle<GlobalObject> holder, - Handle<JSGlobalPropertyCell> cell, - Handle<JSFunction> function, - Handle<String> name) { - CALL_HEAP_FUNCTION( - isolate(), - (set_failure(NULL), - CompileCallGlobal(*object, *holder, *cell, *function, *name)), - Code); +MaybeObject* StubCache::ComputeCallNormal(int argc, + Code::Kind kind, + Code::ExtraICState extra_ic_state, + String* name, + JSObject* receiver) { + Object* code; + { MaybeObject* maybe_code = ComputeCallNormal(argc, kind, extra_ic_state); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + return code; } -Handle<Code> StubCache::ComputeCallGlobal(int argc, +MaybeObject* StubCache::ComputeCallGlobal(int argc, Code::Kind kind, - Code::ExtraICState extra_state, - Handle<String> name, - Handle<JSObject> receiver, - Handle<GlobalObject> holder, - Handle<JSGlobalPropertyCell> cell, - Handle<JSFunction> function) { + Code::ExtraICState extra_ic_state, + String* name, + JSObject* receiver, + GlobalObject* holder, + JSGlobalPropertyCell* cell, + JSFunction* function) { InlineCacheHolderFlag cache_holder = - IC::GetCodeCacheForObject(*receiver, *holder); - Handle<JSObject> map_holder(IC::GetCodeCacheHolder(*receiver, cache_holder)); - Code::Flags flags = - Code::ComputeMonomorphicFlags(kind, NORMAL, extra_state, - cache_holder, argc); - Handle<Object> probe(map_holder->map()->FindInCodeCache(*name, flags)); - if (probe->IsCode()) return Handle<Code>::cast(probe); - - CallStubCompiler compiler(isolate(), argc, kind, extra_state, cache_holder); - Handle<Code> code = - compiler.CompileCallGlobal(receiver, holder, cell, function, name); - ASSERT_EQ(flags, code->flags()); - PROFILE(isolate(), - CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG), *code, *name)); - GDBJIT(AddCode(GDBJITInterface::CALL_IC, *name, *code)); - JSObject::UpdateMapCodeCache(map_holder, name, code); + IC::GetCodeCacheForObject(receiver, holder); + JSObject* map_holder = IC::GetCodeCacheHolder(receiver, cache_holder); + Code::Flags flags = Code::ComputeMonomorphicFlags(kind, + NORMAL, + extra_ic_state, + cache_holder, + argc); + Object* code = map_holder->map()->FindInCodeCache(name, flags); + if (code->IsUndefined()) { + // If the function hasn't been compiled yet, we cannot do it now + // because it may cause GC. To avoid this issue, we return an + // internal error which will make sure we do not update any + // caches. + if (!function->is_compiled()) return Failure::InternalError(); + CallStubCompiler compiler(argc, kind, extra_ic_state, cache_holder); + { MaybeObject* maybe_code = + compiler.CompileCallGlobal(receiver, holder, cell, function, name); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + ASSERT_EQ(flags, Code::cast(code)->flags()); + PROFILE(isolate(), + CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG), + Code::cast(code), name)); + GDBJIT(AddCode(GDBJITInterface::CALL_IC, name, Code::cast(code))); + Object* result; + { MaybeObject* maybe_result = + map_holder->UpdateMapCodeCache(name, Code::cast(code)); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + } return code; } -static void FillCache(Isolate* isolate, Handle<Code> code) { - Handle<NumberDictionary> dictionary = - NumberDictionarySet(isolate->factory()->non_monomorphic_cache(), - code->flags(), - code, - PropertyDetails(NONE, NORMAL)); - isolate->heap()->public_set_non_monomorphic_cache(*dictionary); +static Object* GetProbeValue(Isolate* isolate, Code::Flags flags) { + // Use raw_unchecked... so we don't get assert failures during GC. + NumberDictionary* dictionary = + isolate->heap()->raw_unchecked_non_monomorphic_cache(); + int entry = dictionary->FindEntry(isolate, flags); + if (entry != -1) return dictionary->ValueAt(entry); + return isolate->heap()->raw_unchecked_undefined_value(); +} + + +MUST_USE_RESULT static MaybeObject* ProbeCache(Isolate* isolate, + Code::Flags flags) { + Heap* heap = isolate->heap(); + Object* probe = GetProbeValue(isolate, flags); + if (probe != heap->undefined_value()) return probe; + // Seed the cache with an undefined value to make sure that any + // generated code object can always be inserted into the cache + // without causing allocation failures. + Object* result; + { MaybeObject* maybe_result = + heap->non_monomorphic_cache()->AtNumberPut(flags, + heap->undefined_value()); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + heap->public_set_non_monomorphic_cache(NumberDictionary::cast(result)); + return probe; +} + + +static MaybeObject* FillCache(Isolate* isolate, MaybeObject* maybe_code) { + Object* code; + if (maybe_code->ToObject(&code)) { + if (code->IsCode()) { + Heap* heap = isolate->heap(); + int entry = heap->non_monomorphic_cache()->FindEntry( + Code::cast(code)->flags()); + // The entry must be present see comment in ProbeCache. + ASSERT(entry != -1); + ASSERT(heap->non_monomorphic_cache()->ValueAt(entry) == + heap->undefined_value()); + heap->non_monomorphic_cache()->ValueAtPut(entry, code); + CHECK(GetProbeValue(isolate, Code::cast(code)->flags()) == code); + } + } + return maybe_code; } @@ -775,198 +938,188 @@ Code* StubCache::FindCallInitialize(int argc, Code::ExtraICState extra_state = CallICBase::StringStubState::encode(DEFAULT_STRING_STUB) | CallICBase::Contextual::encode(mode == RelocInfo::CODE_TARGET_CONTEXT); - Code::Flags flags = - Code::ComputeFlags(kind, UNINITIALIZED, extra_state, NORMAL, argc); - - // Use raw_unchecked... so we don't get assert failures during GC. - NumberDictionary* dictionary = - isolate()->heap()->raw_unchecked_non_monomorphic_cache(); - int entry = dictionary->FindEntry(isolate(), flags); - ASSERT(entry != -1); - Object* code = dictionary->ValueAt(entry); + Code::Flags flags = Code::ComputeFlags(kind, + UNINITIALIZED, + extra_state, + NORMAL, + argc); + Object* result = ProbeCache(isolate(), flags)->ToObjectUnchecked(); + ASSERT(result != heap()->undefined_value()); // This might be called during the marking phase of the collector // hence the unchecked cast. - return reinterpret_cast<Code*>(code); + return reinterpret_cast<Code*>(result); } -Handle<Code> StubCache::ComputeCallInitialize(int argc, +MaybeObject* StubCache::ComputeCallInitialize(int argc, RelocInfo::Mode mode, Code::Kind kind) { Code::ExtraICState extra_state = CallICBase::StringStubState::encode(DEFAULT_STRING_STUB) | CallICBase::Contextual::encode(mode == RelocInfo::CODE_TARGET_CONTEXT); - Code::Flags flags = - Code::ComputeFlags(kind, UNINITIALIZED, extra_state, NORMAL, argc); - Handle<NumberDictionary> cache = isolate_->factory()->non_monomorphic_cache(); - int entry = cache->FindEntry(isolate_, flags); - if (entry != -1) return Handle<Code>(Code::cast(cache->ValueAt(entry))); - - StubCompiler compiler(isolate_); - Handle<Code> code = compiler.CompileCallInitialize(flags); - FillCache(isolate_, code); - return code; + Code::Flags flags = Code::ComputeFlags(kind, + UNINITIALIZED, + extra_state, + NORMAL, + argc); + Object* probe; + { MaybeObject* maybe_probe = ProbeCache(isolate_, flags); + if (!maybe_probe->ToObject(&probe)) return maybe_probe; + } + if (!probe->IsUndefined()) return probe; + StubCompiler compiler; + return FillCache(isolate_, compiler.CompileCallInitialize(flags)); } -Handle<Code> StubCache::ComputeCallInitialize(int argc, RelocInfo::Mode mode) { - return ComputeCallInitialize(argc, mode, Code::CALL_IC); +Handle<Code> StubCache::ComputeCallInitialize(int argc, + RelocInfo::Mode mode) { + CALL_HEAP_FUNCTION(isolate_, + ComputeCallInitialize(argc, mode, Code::CALL_IC), + Code); } Handle<Code> StubCache::ComputeKeyedCallInitialize(int argc) { - return ComputeCallInitialize(argc, RelocInfo::CODE_TARGET, - Code::KEYED_CALL_IC); + CALL_HEAP_FUNCTION( + isolate_, + ComputeCallInitialize(argc, RelocInfo::CODE_TARGET, Code::KEYED_CALL_IC), + Code); } -Handle<Code> StubCache::ComputeCallPreMonomorphic( +MaybeObject* StubCache::ComputeCallPreMonomorphic( int argc, Code::Kind kind, - Code::ExtraICState extra_state) { - Code::Flags flags = - Code::ComputeFlags(kind, PREMONOMORPHIC, extra_state, NORMAL, argc); - Handle<NumberDictionary> cache = isolate_->factory()->non_monomorphic_cache(); - int entry = cache->FindEntry(isolate_, flags); - if (entry != -1) return Handle<Code>(Code::cast(cache->ValueAt(entry))); - - StubCompiler compiler(isolate_); - Handle<Code> code = compiler.CompileCallPreMonomorphic(flags); - FillCache(isolate_, code); - return code; + Code::ExtraICState extra_ic_state) { + Code::Flags flags = Code::ComputeFlags(kind, + PREMONOMORPHIC, + extra_ic_state, + NORMAL, + argc); + Object* probe; + { MaybeObject* maybe_probe = ProbeCache(isolate_, flags); + if (!maybe_probe->ToObject(&probe)) return maybe_probe; + } + if (!probe->IsUndefined()) return probe; + StubCompiler compiler; + return FillCache(isolate_, compiler.CompileCallPreMonomorphic(flags)); } -Handle<Code> StubCache::ComputeCallNormal(int argc, +MaybeObject* StubCache::ComputeCallNormal(int argc, Code::Kind kind, - Code::ExtraICState extra_state) { - Code::Flags flags = - Code::ComputeFlags(kind, MONOMORPHIC, extra_state, NORMAL, argc); - Handle<NumberDictionary> cache = isolate_->factory()->non_monomorphic_cache(); - int entry = cache->FindEntry(isolate_, flags); - if (entry != -1) return Handle<Code>(Code::cast(cache->ValueAt(entry))); - - StubCompiler compiler(isolate_); - Handle<Code> code = compiler.CompileCallNormal(flags); - FillCache(isolate_, code); - return code; + Code::ExtraICState extra_ic_state) { + Code::Flags flags = Code::ComputeFlags(kind, + MONOMORPHIC, + extra_ic_state, + NORMAL, + argc); + Object* probe; + { MaybeObject* maybe_probe = ProbeCache(isolate_, flags); + if (!maybe_probe->ToObject(&probe)) return maybe_probe; + } + if (!probe->IsUndefined()) return probe; + StubCompiler compiler; + return FillCache(isolate_, compiler.CompileCallNormal(flags)); } -Handle<Code> StubCache::ComputeCallArguments(int argc, Code::Kind kind) { +MaybeObject* StubCache::ComputeCallArguments(int argc, Code::Kind kind) { ASSERT(kind == Code::KEYED_CALL_IC); - Code::Flags flags = - Code::ComputeFlags(kind, MEGAMORPHIC, Code::kNoExtraICState, - NORMAL, argc); - Handle<NumberDictionary> cache = isolate_->factory()->non_monomorphic_cache(); - int entry = cache->FindEntry(isolate_, flags); - if (entry != -1) return Handle<Code>(Code::cast(cache->ValueAt(entry))); - - StubCompiler compiler(isolate_); - Handle<Code> code = compiler.CompileCallArguments(flags); - FillCache(isolate_, code); - return code; + Code::Flags flags = Code::ComputeFlags(kind, + MEGAMORPHIC, + Code::kNoExtraICState, + NORMAL, + argc); + Object* probe; + { MaybeObject* maybe_probe = ProbeCache(isolate_, flags); + if (!maybe_probe->ToObject(&probe)) return maybe_probe; + } + if (!probe->IsUndefined()) return probe; + StubCompiler compiler; + return FillCache(isolate_, compiler.CompileCallArguments(flags)); } -Handle<Code> StubCache::ComputeCallMegamorphic( +MaybeObject* StubCache::ComputeCallMegamorphic( int argc, Code::Kind kind, - Code::ExtraICState extra_state) { - Code::Flags flags = - Code::ComputeFlags(kind, MEGAMORPHIC, extra_state, - NORMAL, argc); - Handle<NumberDictionary> cache = isolate_->factory()->non_monomorphic_cache(); - int entry = cache->FindEntry(isolate_, flags); - if (entry != -1) return Handle<Code>(Code::cast(cache->ValueAt(entry))); - - StubCompiler compiler(isolate_); - Handle<Code> code = compiler.CompileCallMegamorphic(flags); - FillCache(isolate_, code); - return code; + Code::ExtraICState extra_ic_state) { + Code::Flags flags = Code::ComputeFlags(kind, + MEGAMORPHIC, + extra_ic_state, + NORMAL, + argc); + Object* probe; + { MaybeObject* maybe_probe = ProbeCache(isolate_, flags); + if (!maybe_probe->ToObject(&probe)) return maybe_probe; + } + if (!probe->IsUndefined()) return probe; + StubCompiler compiler; + return FillCache(isolate_, compiler.CompileCallMegamorphic(flags)); } -Handle<Code> StubCache::ComputeCallMiss(int argc, +MaybeObject* StubCache::ComputeCallMiss(int argc, Code::Kind kind, - Code::ExtraICState extra_state) { + Code::ExtraICState extra_ic_state) { // MONOMORPHIC_PROTOTYPE_FAILURE state is used to make sure that miss stubs // and monomorphic stubs are not mixed up together in the stub cache. - Code::Flags flags = - Code::ComputeFlags(kind, MONOMORPHIC_PROTOTYPE_FAILURE, extra_state, - NORMAL, argc, OWN_MAP); - Handle<NumberDictionary> cache = isolate_->factory()->non_monomorphic_cache(); - int entry = cache->FindEntry(isolate_, flags); - if (entry != -1) return Handle<Code>(Code::cast(cache->ValueAt(entry))); - - StubCompiler compiler(isolate_); - Handle<Code> code = compiler.CompileCallMiss(flags); - FillCache(isolate_, code); - return code; -} - - -// The CallStubCompiler needs a version of ComputeCallMiss that does not -// perform GC. This function is temporary, because the stub cache but not -// yet the stub compiler uses handles. -MaybeObject* StubCache::TryComputeCallMiss(int argc, - Code::Kind kind, - Code::ExtraICState extra_state) { - Code::Flags flags = - Code::ComputeFlags(kind, MONOMORPHIC_PROTOTYPE_FAILURE, extra_state, - NORMAL, argc, OWN_MAP); - NumberDictionary* cache = isolate_->heap()->non_monomorphic_cache(); - int entry = cache->FindEntry(isolate_, flags); - if (entry != -1) return cache->ValueAt(entry); - - StubCompiler compiler(isolate_); - Code* code = NULL; - MaybeObject* maybe_code = compiler.TryCompileCallMiss(flags); - if (!maybe_code->To(&code)) return maybe_code; - - NumberDictionary* new_cache = NULL; - MaybeObject* maybe_new_cache = cache->AtNumberPut(flags, code); - if (!maybe_new_cache->To(&new_cache)) return maybe_new_cache; - isolate_->heap()->public_set_non_monomorphic_cache(new_cache); - - return code; + Code::Flags flags = Code::ComputeFlags(kind, + MONOMORPHIC_PROTOTYPE_FAILURE, + extra_ic_state, + NORMAL, + argc, + OWN_MAP); + Object* probe; + { MaybeObject* maybe_probe = ProbeCache(isolate_, flags); + if (!maybe_probe->ToObject(&probe)) return maybe_probe; + } + if (!probe->IsUndefined()) return probe; + StubCompiler compiler; + return FillCache(isolate_, compiler.CompileCallMiss(flags)); } #ifdef ENABLE_DEBUGGER_SUPPORT -Handle<Code> StubCache::ComputeCallDebugBreak(int argc, - Code::Kind kind) { +MaybeObject* StubCache::ComputeCallDebugBreak( + int argc, + Code::Kind kind) { // Extra IC state is irrelevant for debug break ICs. They jump to // the actual call ic to carry out the work. - Code::Flags flags = - Code::ComputeFlags(kind, DEBUG_BREAK, Code::kNoExtraICState, - NORMAL, argc); - Handle<NumberDictionary> cache = isolate_->factory()->non_monomorphic_cache(); - int entry = cache->FindEntry(isolate_, flags); - if (entry != -1) return Handle<Code>(Code::cast(cache->ValueAt(entry))); - - StubCompiler compiler(isolate_); - Handle<Code> code = compiler.CompileCallDebugBreak(flags); - FillCache(isolate_, code); - return code; + Code::Flags flags = Code::ComputeFlags(kind, + DEBUG_BREAK, + Code::kNoExtraICState, + NORMAL, + argc); + Object* probe; + { MaybeObject* maybe_probe = ProbeCache(isolate_, flags); + if (!maybe_probe->ToObject(&probe)) return maybe_probe; + } + if (!probe->IsUndefined()) return probe; + StubCompiler compiler; + return FillCache(isolate_, compiler.CompileCallDebugBreak(flags)); } -Handle<Code> StubCache::ComputeCallDebugPrepareStepIn(int argc, - Code::Kind kind) { +MaybeObject* StubCache::ComputeCallDebugPrepareStepIn( + int argc, + Code::Kind kind) { // Extra IC state is irrelevant for debug break ICs. They jump to // the actual call ic to carry out the work. - Code::Flags flags = - Code::ComputeFlags(kind, DEBUG_PREPARE_STEP_IN, Code::kNoExtraICState, - NORMAL, argc); - Handle<NumberDictionary> cache = isolate_->factory()->non_monomorphic_cache(); - int entry = cache->FindEntry(isolate_, flags); - if (entry != -1) return Handle<Code>(Code::cast(cache->ValueAt(entry))); - - StubCompiler compiler(isolate_); - Handle<Code> code = compiler.CompileCallDebugPrepareStepIn(flags); - FillCache(isolate_, code); - return code; + Code::Flags flags = Code::ComputeFlags(kind, + DEBUG_PREPARE_STEP_IN, + Code::kNoExtraICState, + NORMAL, + argc); + Object* probe; + { MaybeObject* maybe_probe = ProbeCache(isolate_, flags); + if (!maybe_probe->ToObject(&probe)) return maybe_probe; + } + if (!probe->IsUndefined()) return probe; + StubCompiler compiler; + return FillCache(isolate_, compiler.CompileCallDebugPrepareStepIn(flags)); } #endif @@ -1231,47 +1384,62 @@ RUNTIME_FUNCTION(MaybeObject*, KeyedLoadPropertyWithInterceptor) { } -Handle<Code> StubCompiler::CompileCallInitialize(Code::Flags flags) { +MaybeObject* StubCompiler::CompileCallInitialize(Code::Flags flags) { + HandleScope scope(isolate()); int argc = Code::ExtractArgumentsCountFromFlags(flags); Code::Kind kind = Code::ExtractKindFromFlags(flags); - Code::ExtraICState extra_state = Code::ExtractExtraICStateFromFlags(flags); + Code::ExtraICState extra_ic_state = Code::ExtractExtraICStateFromFlags(flags); if (kind == Code::CALL_IC) { - CallIC::GenerateInitialize(masm(), argc, extra_state); + CallIC::GenerateInitialize(masm(), argc, extra_ic_state); } else { KeyedCallIC::GenerateInitialize(masm(), argc); } - Handle<Code> code = GetCodeWithFlags(flags, "CompileCallInitialize"); + Object* result; + { MaybeObject* maybe_result = + GetCodeWithFlags(flags, "CompileCallInitialize"); + if (!maybe_result->ToObject(&result)) return maybe_result; + } isolate()->counters()->call_initialize_stubs()->Increment(); + Code* code = Code::cast(result); + USE(code); PROFILE(isolate(), CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_INITIALIZE_TAG), - *code, code->arguments_count())); - GDBJIT(AddCode(GDBJITInterface::CALL_INITIALIZE, *code)); - return code; + code, code->arguments_count())); + GDBJIT(AddCode(GDBJITInterface::CALL_INITIALIZE, Code::cast(code))); + return result; } -Handle<Code> StubCompiler::CompileCallPreMonomorphic(Code::Flags flags) { +MaybeObject* StubCompiler::CompileCallPreMonomorphic(Code::Flags flags) { + HandleScope scope(isolate()); int argc = Code::ExtractArgumentsCountFromFlags(flags); // The code of the PreMonomorphic stub is the same as the code // of the Initialized stub. They just differ on the code object flags. Code::Kind kind = Code::ExtractKindFromFlags(flags); - Code::ExtraICState extra_state = Code::ExtractExtraICStateFromFlags(flags); + Code::ExtraICState extra_ic_state = Code::ExtractExtraICStateFromFlags(flags); if (kind == Code::CALL_IC) { - CallIC::GenerateInitialize(masm(), argc, extra_state); + CallIC::GenerateInitialize(masm(), argc, extra_ic_state); } else { KeyedCallIC::GenerateInitialize(masm(), argc); } - Handle<Code> code = GetCodeWithFlags(flags, "CompileCallPreMonomorphic"); + Object* result; + { MaybeObject* maybe_result = + GetCodeWithFlags(flags, "CompileCallPreMonomorphic"); + if (!maybe_result->ToObject(&result)) return maybe_result; + } isolate()->counters()->call_premonomorphic_stubs()->Increment(); + Code* code = Code::cast(result); + USE(code); PROFILE(isolate(), CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_PRE_MONOMORPHIC_TAG), - *code, code->arguments_count())); - GDBJIT(AddCode(GDBJITInterface::CALL_PRE_MONOMORPHIC, *code)); - return code; + code, code->arguments_count())); + GDBJIT(AddCode(GDBJITInterface::CALL_PRE_MONOMORPHIC, Code::cast(code))); + return result; } -Handle<Code> StubCompiler::CompileCallNormal(Code::Flags flags) { +MaybeObject* StubCompiler::CompileCallNormal(Code::Flags flags) { + HandleScope scope(isolate()); int argc = Code::ExtractArgumentsCountFromFlags(flags); Code::Kind kind = Code::ExtractKindFromFlags(flags); if (kind == Code::CALL_IC) { @@ -1282,81 +1450,79 @@ Handle<Code> StubCompiler::CompileCallNormal(Code::Flags flags) { } else { KeyedCallIC::GenerateNormal(masm(), argc); } - Handle<Code> code = GetCodeWithFlags(flags, "CompileCallNormal"); + Object* result; + { MaybeObject* maybe_result = GetCodeWithFlags(flags, "CompileCallNormal"); + if (!maybe_result->ToObject(&result)) return maybe_result; + } isolate()->counters()->call_normal_stubs()->Increment(); + Code* code = Code::cast(result); + USE(code); PROFILE(isolate(), CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_NORMAL_TAG), - *code, code->arguments_count())); - GDBJIT(AddCode(GDBJITInterface::CALL_NORMAL, *code)); - return code; + code, code->arguments_count())); + GDBJIT(AddCode(GDBJITInterface::CALL_NORMAL, Code::cast(code))); + return result; } -Handle<Code> StubCompiler::CompileCallMegamorphic(Code::Flags flags) { +MaybeObject* StubCompiler::CompileCallMegamorphic(Code::Flags flags) { + HandleScope scope(isolate()); int argc = Code::ExtractArgumentsCountFromFlags(flags); Code::Kind kind = Code::ExtractKindFromFlags(flags); - Code::ExtraICState extra_state = Code::ExtractExtraICStateFromFlags(flags); + Code::ExtraICState extra_ic_state = Code::ExtractExtraICStateFromFlags(flags); if (kind == Code::CALL_IC) { - CallIC::GenerateMegamorphic(masm(), argc, extra_state); + CallIC::GenerateMegamorphic(masm(), argc, extra_ic_state); } else { KeyedCallIC::GenerateMegamorphic(masm(), argc); } - Handle<Code> code = GetCodeWithFlags(flags, "CompileCallMegamorphic"); + Object* result; + { MaybeObject* maybe_result = + GetCodeWithFlags(flags, "CompileCallMegamorphic"); + if (!maybe_result->ToObject(&result)) return maybe_result; + } isolate()->counters()->call_megamorphic_stubs()->Increment(); + Code* code = Code::cast(result); + USE(code); PROFILE(isolate(), CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_MEGAMORPHIC_TAG), - *code, code->arguments_count())); - GDBJIT(AddCode(GDBJITInterface::CALL_MEGAMORPHIC, *code)); - return code; + code, code->arguments_count())); + GDBJIT(AddCode(GDBJITInterface::CALL_MEGAMORPHIC, Code::cast(code))); + return result; } -Handle<Code> StubCompiler::CompileCallArguments(Code::Flags flags) { +MaybeObject* StubCompiler::CompileCallArguments(Code::Flags flags) { + HandleScope scope(isolate()); int argc = Code::ExtractArgumentsCountFromFlags(flags); KeyedCallIC::GenerateNonStrictArguments(masm(), argc); - Handle<Code> code = GetCodeWithFlags(flags, "CompileCallArguments"); - PROFILE(isolate(), - CodeCreateEvent(CALL_LOGGER_TAG(Code::ExtractKindFromFlags(flags), - CALL_MEGAMORPHIC_TAG), - *code, code->arguments_count())); - GDBJIT(AddCode(GDBJITInterface::CALL_MEGAMORPHIC, *code)); - return code; -} - - -Handle<Code> StubCompiler::CompileCallMiss(Code::Flags flags) { - int argc = Code::ExtractArgumentsCountFromFlags(flags); Code::Kind kind = Code::ExtractKindFromFlags(flags); - Code::ExtraICState extra_state = Code::ExtractExtraICStateFromFlags(flags); - if (kind == Code::CALL_IC) { - CallIC::GenerateMiss(masm(), argc, extra_state); - } else { - KeyedCallIC::GenerateMiss(masm(), argc); + Object* result; + { MaybeObject* maybe_result = + GetCodeWithFlags(flags, "CompileCallArguments"); + if (!maybe_result->ToObject(&result)) return maybe_result; } - Handle<Code> code = GetCodeWithFlags(flags, "CompileCallMiss"); - isolate()->counters()->call_megamorphic_stubs()->Increment(); + Code* code = Code::cast(result); + USE(code); PROFILE(isolate(), - CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_MISS_TAG), - *code, code->arguments_count())); - GDBJIT(AddCode(GDBJITInterface::CALL_MISS, *code)); - return code; + CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_MEGAMORPHIC_TAG), + code, code->arguments_count())); + GDBJIT(AddCode(GDBJITInterface::CALL_MEGAMORPHIC, Code::cast(code))); + return result; } -// TODO(kmillikin): This annoying raw pointer implementation should be -// eliminated when the stub compiler no longer needs it. -MaybeObject* StubCompiler::TryCompileCallMiss(Code::Flags flags) { +MaybeObject* StubCompiler::CompileCallMiss(Code::Flags flags) { HandleScope scope(isolate()); int argc = Code::ExtractArgumentsCountFromFlags(flags); Code::Kind kind = Code::ExtractKindFromFlags(flags); - Code::ExtraICState extra_state = Code::ExtractExtraICStateFromFlags(flags); + Code::ExtraICState extra_ic_state = Code::ExtractExtraICStateFromFlags(flags); if (kind == Code::CALL_IC) { - CallIC::GenerateMiss(masm(), argc, extra_state); + CallIC::GenerateMiss(masm(), argc, extra_ic_state); } else { KeyedCallIC::GenerateMiss(masm(), argc); } Object* result; - { MaybeObject* maybe_result = TryGetCodeWithFlags(flags, "CompileCallMiss"); + { MaybeObject* maybe_result = GetCodeWithFlags(flags, "CompileCallMiss"); if (!maybe_result->ToObject(&result)) return maybe_result; } isolate()->counters()->call_megamorphic_stubs()->Increment(); @@ -1371,20 +1537,29 @@ MaybeObject* StubCompiler::TryCompileCallMiss(Code::Flags flags) { #ifdef ENABLE_DEBUGGER_SUPPORT -Handle<Code> StubCompiler::CompileCallDebugBreak(Code::Flags flags) { +MaybeObject* StubCompiler::CompileCallDebugBreak(Code::Flags flags) { + HandleScope scope(isolate()); Debug::GenerateCallICDebugBreak(masm()); - Handle<Code> code = GetCodeWithFlags(flags, "CompileCallDebugBreak"); + Object* result; + { MaybeObject* maybe_result = + GetCodeWithFlags(flags, "CompileCallDebugBreak"); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + Code* code = Code::cast(result); + USE(code); + Code::Kind kind = Code::ExtractKindFromFlags(flags); + USE(kind); PROFILE(isolate(), - CodeCreateEvent(CALL_LOGGER_TAG(Code::ExtractKindFromFlags(flags), - CALL_DEBUG_BREAK_TAG), - *code, code->arguments_count())); - return code; + CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_DEBUG_BREAK_TAG), + code, code->arguments_count())); + return result; } -Handle<Code> StubCompiler::CompileCallDebugPrepareStepIn(Code::Flags flags) { - // Use the same code for the the step in preparations as we do for the - // miss case. +MaybeObject* StubCompiler::CompileCallDebugPrepareStepIn(Code::Flags flags) { + HandleScope scope(isolate()); + // Use the same code for the the step in preparations as we do for + // the miss case. int argc = Code::ExtractArgumentsCountFromFlags(flags); Code::Kind kind = Code::ExtractKindFromFlags(flags); if (kind == Code::CALL_IC) { @@ -1393,42 +1568,26 @@ Handle<Code> StubCompiler::CompileCallDebugPrepareStepIn(Code::Flags flags) { } else { KeyedCallIC::GenerateMiss(masm(), argc); } - Handle<Code> code = GetCodeWithFlags(flags, "CompileCallDebugPrepareStepIn"); + Object* result; + { MaybeObject* maybe_result = + GetCodeWithFlags(flags, "CompileCallDebugPrepareStepIn"); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + Code* code = Code::cast(result); + USE(code); PROFILE(isolate(), CodeCreateEvent( CALL_LOGGER_TAG(kind, CALL_DEBUG_PREPARE_STEP_IN_TAG), - *code, + code, code->arguments_count())); - return code; + return result; } -#endif // ENABLE_DEBUGGER_SUPPORT +#endif #undef CALL_LOGGER_TAG - -Handle<Code> StubCompiler::GetCodeWithFlags(Code::Flags flags, +MaybeObject* StubCompiler::GetCodeWithFlags(Code::Flags flags, const char* name) { - // Create code object in the heap. - CodeDesc desc; - masm_.GetCode(&desc); - Handle<Code> code = factory()->NewCode(desc, flags, masm_.CodeObject()); -#ifdef ENABLE_DISASSEMBLER - if (FLAG_print_code_stubs) code->Disassemble(name); -#endif - return code; -} - - -Handle<Code> StubCompiler::GetCodeWithFlags(Code::Flags flags, - Handle<String> name) { - return (FLAG_print_code_stubs && !name.is_null()) - ? GetCodeWithFlags(flags, *name->ToCString()) - : GetCodeWithFlags(flags, reinterpret_cast<char*>(NULL)); -} - - -MaybeObject* StubCompiler::TryGetCodeWithFlags(Code::Flags flags, - const char* name) { // Check for allocation failures during stub compilation. if (failure_->IsFailure()) return failure_; @@ -1445,12 +1604,11 @@ MaybeObject* StubCompiler::TryGetCodeWithFlags(Code::Flags flags, } -MaybeObject* StubCompiler::TryGetCodeWithFlags(Code::Flags flags, - String* name) { - if (FLAG_print_code_stubs && name != NULL) { - return TryGetCodeWithFlags(flags, *name->ToCString()); +MaybeObject* StubCompiler::GetCodeWithFlags(Code::Flags flags, String* name) { + if (FLAG_print_code_stubs && (name != NULL)) { + return GetCodeWithFlags(flags, *name->ToCString()); } - return TryGetCodeWithFlags(flags, reinterpret_cast<char*>(NULL)); + return GetCodeWithFlags(flags, reinterpret_cast<char*>(NULL)); } @@ -1468,20 +1626,10 @@ void StubCompiler::LookupPostInterceptor(JSObject* holder, } -Handle<Code> LoadStubCompiler::GetCode(PropertyType type, Handle<String> name) { - Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, type); - Handle<Code> code = GetCodeWithFlags(flags, name); - PROFILE(isolate(), CodeCreateEvent(Logger::LOAD_IC_TAG, *code, *name)); - GDBJIT(AddCode(GDBJITInterface::LOAD_IC, *name, *code)); - return code; -} - -// TODO(ulan): Eliminate this function when the stub cache is fully -// handlified. -MaybeObject* LoadStubCompiler::TryGetCode(PropertyType type, String* name) { +MaybeObject* LoadStubCompiler::GetCode(PropertyType type, String* name) { Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, type); - MaybeObject* result = TryGetCodeWithFlags(flags, name); + MaybeObject* result = GetCodeWithFlags(flags, name); if (!result->IsFailure()) { PROFILE(isolate(), CodeCreateEvent(Logger::LOAD_IC_TAG, @@ -1495,25 +1643,12 @@ MaybeObject* LoadStubCompiler::TryGetCode(PropertyType type, String* name) { } -Handle<Code> KeyedLoadStubCompiler::GetCode(PropertyType type, - Handle<String> name, - InlineCacheState state) { - Code::Flags flags = Code::ComputeFlags( - Code::KEYED_LOAD_IC, state, Code::kNoExtraICState, type); - Handle<Code> code = GetCodeWithFlags(flags, name); - PROFILE(isolate(), CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, *code, *name)); - GDBJIT(AddCode(GDBJITInterface::LOAD_IC, *name, *code)); - return code; -} - -// TODO(ulan): Eliminate this function when the stub cache is fully -// handlified. -MaybeObject* KeyedLoadStubCompiler::TryGetCode(PropertyType type, +MaybeObject* KeyedLoadStubCompiler::GetCode(PropertyType type, String* name, InlineCacheState state) { Code::Flags flags = Code::ComputeFlags( Code::KEYED_LOAD_IC, state, Code::kNoExtraICState, type); - MaybeObject* result = TryGetCodeWithFlags(flags, name); + MaybeObject* result = GetCodeWithFlags(flags, name); if (!result->IsFailure()) { PROFILE(isolate(), CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, @@ -1527,26 +1662,39 @@ MaybeObject* KeyedLoadStubCompiler::TryGetCode(PropertyType type, } -Handle<Code> StoreStubCompiler::GetCode(PropertyType type, - Handle<String> name) { +MaybeObject* StoreStubCompiler::GetCode(PropertyType type, String* name) { Code::Flags flags = Code::ComputeMonomorphicFlags(Code::STORE_IC, type, strict_mode_); - Handle<Code> code = GetCodeWithFlags(flags, name); - PROFILE(isolate(), CodeCreateEvent(Logger::STORE_IC_TAG, *code, *name)); - GDBJIT(AddCode(GDBJITInterface::STORE_IC, *name, *code)); - return code; + MaybeObject* result = GetCodeWithFlags(flags, name); + if (!result->IsFailure()) { + PROFILE(isolate(), + CodeCreateEvent(Logger::STORE_IC_TAG, + Code::cast(result->ToObjectUnchecked()), + name)); + GDBJIT(AddCode(GDBJITInterface::STORE_IC, + name, + Code::cast(result->ToObjectUnchecked()))); + } + return result; } -Handle<Code> KeyedStoreStubCompiler::GetCode(PropertyType type, - Handle<String> name, +MaybeObject* KeyedStoreStubCompiler::GetCode(PropertyType type, + String* name, InlineCacheState state) { Code::Flags flags = Code::ComputeFlags(Code::KEYED_STORE_IC, state, strict_mode_, type); - Handle<Code> code = GetCodeWithFlags(flags, name); - PROFILE(isolate(), CodeCreateEvent(Logger::KEYED_STORE_IC_TAG, *code, *name)); - GDBJIT(AddCode(GDBJITInterface::KEYED_STORE_IC, *name, *code)); - return code; + MaybeObject* result = GetCodeWithFlags(flags, name); + if (!result->IsFailure()) { + PROFILE(isolate(), + CodeCreateEvent(Logger::KEYED_STORE_IC_TAG, + Code::cast(result->ToObjectUnchecked()), + name)); + GDBJIT(AddCode(GDBJITInterface::KEYED_STORE_IC, + name, + Code::cast(result->ToObjectUnchecked()))); + } + return result; } @@ -1556,15 +1704,13 @@ void KeyedStoreStubCompiler::GenerateStoreDictionaryElement( } -CallStubCompiler::CallStubCompiler(Isolate* isolate, - int argc, +CallStubCompiler::CallStubCompiler(int argc, Code::Kind kind, - Code::ExtraICState extra_state, + Code::ExtraICState extra_ic_state, InlineCacheHolderFlag cache_holder) - : StubCompiler(isolate), - arguments_(argc), + : arguments_(argc), kind_(kind), - extra_state_(extra_state), + extra_ic_state_(extra_ic_state), cache_holder_(cache_holder) { } @@ -1617,54 +1763,30 @@ MaybeObject* CallStubCompiler::CompileCustomCall(Object* object, } -Handle<Code> CallStubCompiler::GetCode(PropertyType type, Handle<String> name) { +MaybeObject* CallStubCompiler::GetCode(PropertyType type, String* name) { int argc = arguments_.immediate(); Code::Flags flags = Code::ComputeMonomorphicFlags(kind_, type, - extra_state_, + extra_ic_state_, cache_holder_, argc); return GetCodeWithFlags(flags, name); } -Handle<Code> CallStubCompiler::GetCode(Handle<JSFunction> function) { - Handle<String> function_name; - if (function->shared()->name()->IsString()) { - function_name = Handle<String>(String::cast(function->shared()->name())); - } - return GetCode(CONSTANT_FUNCTION, function_name); -} - - -// TODO(kmillikin): Eliminate this function when the stub cache is fully -// handlified. -MaybeObject* CallStubCompiler::TryGetCode(PropertyType type, String* name) { - int argc = arguments_.immediate(); - Code::Flags flags = Code::ComputeMonomorphicFlags(kind_, - type, - extra_state_, - cache_holder_, - argc); - return TryGetCodeWithFlags(flags, name); -} - - -// TODO(kmillikin): Eliminate this function when the stub cache is fully -// handlified. -MaybeObject* CallStubCompiler::TryGetCode(JSFunction* function) { +MaybeObject* CallStubCompiler::GetCode(JSFunction* function) { String* function_name = NULL; if (function->shared()->name()->IsString()) { function_name = String::cast(function->shared()->name()); } - return TryGetCode(CONSTANT_FUNCTION, function_name); + return GetCode(CONSTANT_FUNCTION, function_name); } MaybeObject* ConstructStubCompiler::GetCode() { Code::Flags flags = Code::ComputeFlags(Code::STUB); Object* result; - { MaybeObject* maybe_result = TryGetCodeWithFlags(flags, "ConstructStub"); + { MaybeObject* maybe_result = GetCodeWithFlags(flags, "ConstructStub"); if (!maybe_result->ToObject(&result)) return maybe_result; } Code* code = Code::cast(result); diff --git a/deps/v8/src/stub-cache.h b/deps/v8/src/stub-cache.h index 11fdb89eb..d9ec88f51 100644 --- a/deps/v8/src/stub-cache.h +++ b/deps/v8/src/stub-cache.h @@ -76,171 +76,207 @@ class StubCache { // Computes the right stub matching. Inserts the result in the // cache before returning. This might compile a stub if needed. - Handle<Code> ComputeLoadNonexistent(Handle<String> name, - Handle<JSObject> receiver); + MUST_USE_RESULT MaybeObject* ComputeLoadNonexistent( + String* name, + JSObject* receiver); + + MUST_USE_RESULT MaybeObject* ComputeLoadField(String* name, + JSObject* receiver, + JSObject* holder, + int field_index); + + MUST_USE_RESULT MaybeObject* ComputeLoadCallback( + String* name, + JSObject* receiver, + JSObject* holder, + AccessorInfo* callback); - Handle<Code> ComputeLoadField(Handle<String> name, - Handle<JSObject> receiver, - Handle<JSObject> holder, - int field_index); + MUST_USE_RESULT MaybeObject* ComputeLoadConstant(String* name, + JSObject* receiver, + JSObject* holder, + Object* value); - Handle<Code> ComputeLoadCallback(Handle<String> name, - Handle<JSObject> receiver, - Handle<JSObject> holder, - Handle<AccessorInfo> callback); + MUST_USE_RESULT MaybeObject* ComputeLoadInterceptor( + String* name, + JSObject* receiver, + JSObject* holder); - Handle<Code> ComputeLoadConstant(Handle<String> name, - Handle<JSObject> receiver, - Handle<JSObject> holder, - Handle<Object> value); + MUST_USE_RESULT MaybeObject* ComputeLoadNormal(); - Handle<Code> ComputeLoadInterceptor(Handle<String> name, - Handle<JSObject> receiver, - Handle<JSObject> holder); - Handle<Code> ComputeLoadNormal(); + MUST_USE_RESULT MaybeObject* ComputeLoadGlobal( + String* name, + JSObject* receiver, + GlobalObject* holder, + JSGlobalPropertyCell* cell, + bool is_dont_delete); - Handle<Code> ComputeLoadGlobal(Handle<String> name, - Handle<JSObject> receiver, - Handle<GlobalObject> holder, - Handle<JSGlobalPropertyCell> cell, - bool is_dont_delete); // --- - Handle<Code> ComputeKeyedLoadField(Handle<String> name, - Handle<JSObject> receiver, - Handle<JSObject> holder, - int field_index); + MUST_USE_RESULT MaybeObject* ComputeKeyedLoadField(String* name, + JSObject* receiver, + JSObject* holder, + int field_index); - Handle<Code> ComputeKeyedLoadCallback(Handle<String> name, - Handle<JSObject> receiver, - Handle<JSObject> holder, - Handle<AccessorInfo> callback); + MUST_USE_RESULT MaybeObject* ComputeKeyedLoadCallback( + String* name, + JSObject* receiver, + JSObject* holder, + AccessorInfo* callback); - Handle<Code> ComputeKeyedLoadConstant(Handle<String> name, - Handle<JSObject> receiver, - Handle<JSObject> holder, - Handle<Object> value); + MUST_USE_RESULT MaybeObject* ComputeKeyedLoadConstant( + String* name, + JSObject* receiver, + JSObject* holder, + Object* value); - Handle<Code> ComputeKeyedLoadInterceptor(Handle<String> name, - Handle<JSObject> receiver, - Handle<JSObject> holder); + MUST_USE_RESULT MaybeObject* ComputeKeyedLoadInterceptor( + String* name, + JSObject* receiver, + JSObject* holder); - Handle<Code> ComputeKeyedLoadArrayLength(Handle<String> name, - Handle<JSArray> receiver); + MUST_USE_RESULT MaybeObject* ComputeKeyedLoadArrayLength( + String* name, + JSArray* receiver); - Handle<Code> ComputeKeyedLoadStringLength(Handle<String> name, - Handle<String> receiver); + MUST_USE_RESULT MaybeObject* ComputeKeyedLoadStringLength( + String* name, + String* receiver); - Handle<Code> ComputeKeyedLoadFunctionPrototype(Handle<String> name, - Handle<JSFunction> receiver); + MUST_USE_RESULT MaybeObject* ComputeKeyedLoadFunctionPrototype( + String* name, + JSFunction* receiver); // --- - Handle<Code> ComputeStoreField(Handle<String> name, - Handle<JSObject> receiver, - int field_index, - Handle<Map> transition, - StrictModeFlag strict_mode); + MUST_USE_RESULT MaybeObject* ComputeStoreField( + String* name, + JSObject* receiver, + int field_index, + Map* transition, + StrictModeFlag strict_mode); - Handle<Code> ComputeStoreNormal(StrictModeFlag strict_mode); + MUST_USE_RESULT MaybeObject* ComputeStoreNormal( + StrictModeFlag strict_mode); - Handle<Code> ComputeStoreGlobal(Handle<String> name, - Handle<GlobalObject> receiver, - Handle<JSGlobalPropertyCell> cell, - StrictModeFlag strict_mode); + MUST_USE_RESULT MaybeObject* ComputeStoreGlobal( + String* name, + GlobalObject* receiver, + JSGlobalPropertyCell* cell, + StrictModeFlag strict_mode); - Handle<Code> ComputeStoreCallback(Handle<String> name, - Handle<JSObject> receiver, - Handle<AccessorInfo> callback, - StrictModeFlag strict_mode); + MUST_USE_RESULT MaybeObject* ComputeStoreCallback( + String* name, + JSObject* receiver, + AccessorInfo* callback, + StrictModeFlag strict_mode); - Handle<Code> ComputeStoreInterceptor(Handle<String> name, - Handle<JSObject> receiver, - StrictModeFlag strict_mode); + MUST_USE_RESULT MaybeObject* ComputeStoreInterceptor( + String* name, + JSObject* receiver, + StrictModeFlag strict_mode); // --- - Handle<Code> ComputeKeyedStoreField(Handle<String> name, - Handle<JSObject> receiver, - int field_index, - Handle<Map> transition, - StrictModeFlag strict_mode); + MUST_USE_RESULT MaybeObject* ComputeKeyedStoreField( + String* name, + JSObject* receiver, + int field_index, + Map* transition, + StrictModeFlag strict_mode); - Handle<Code> ComputeKeyedLoadOrStoreElement(Handle<JSObject> receiver, - KeyedIC::StubKind stub_kind, - StrictModeFlag strict_mode); + MUST_USE_RESULT MaybeObject* ComputeKeyedLoadOrStoreElement( + JSObject* receiver, + KeyedIC::StubKind stub_kind, + StrictModeFlag strict_mode); // --- - Handle<Code> ComputeCallField(int argc, - Code::Kind, - Code::ExtraICState extra_state, - Handle<String> name, - Handle<Object> object, - Handle<JSObject> holder, - int index); - - Handle<Code> ComputeCallConstant(int argc, - Code::Kind, - Code::ExtraICState extra_state, - Handle<String> name, - Handle<Object> object, - Handle<JSObject> holder, - Handle<JSFunction> function); - - Handle<Code> ComputeCallInterceptor(int argc, - Code::Kind, - Code::ExtraICState extra_state, - Handle<String> name, - Handle<Object> object, - Handle<JSObject> holder); - - Handle<Code> ComputeCallGlobal(int argc, - Code::Kind, - Code::ExtraICState extra_state, - Handle<String> name, - Handle<JSObject> receiver, - Handle<GlobalObject> holder, - Handle<JSGlobalPropertyCell> cell, - Handle<JSFunction> function); + MUST_USE_RESULT MaybeObject* ComputeCallField( + int argc, + Code::Kind, + Code::ExtraICState extra_ic_state, + String* name, + Object* object, + JSObject* holder, + int index); + + MUST_USE_RESULT MaybeObject* ComputeCallConstant( + int argc, + Code::Kind, + Code::ExtraICState extra_ic_state, + String* name, + Object* object, + JSObject* holder, + JSFunction* function); + + MUST_USE_RESULT MaybeObject* ComputeCallNormal( + int argc, + Code::Kind, + Code::ExtraICState extra_ic_state, + String* name, + JSObject* receiver); + + MUST_USE_RESULT MaybeObject* ComputeCallInterceptor( + int argc, + Code::Kind, + Code::ExtraICState extra_ic_state, + String* name, + Object* object, + JSObject* holder); + + MUST_USE_RESULT MaybeObject* ComputeCallGlobal( + int argc, + Code::Kind, + Code::ExtraICState extra_ic_state, + String* name, + JSObject* receiver, + GlobalObject* holder, + JSGlobalPropertyCell* cell, + JSFunction* function); // --- - Handle<Code> ComputeCallInitialize(int argc, RelocInfo::Mode mode); + MUST_USE_RESULT MaybeObject* ComputeCallInitialize(int argc, + RelocInfo::Mode mode, + Code::Kind kind); - Handle<Code> ComputeKeyedCallInitialize(int argc); + Handle<Code> ComputeCallInitialize(int argc, + RelocInfo::Mode mode); - Handle<Code> ComputeCallPreMonomorphic(int argc, - Code::Kind kind, - Code::ExtraICState extra_state); + Handle<Code> ComputeKeyedCallInitialize(int argc); - Handle<Code> ComputeCallNormal(int argc, - Code::Kind kind, - Code::ExtraICState state); + MUST_USE_RESULT MaybeObject* ComputeCallPreMonomorphic( + int argc, + Code::Kind kind, + Code::ExtraICState extra_ic_state); - Handle<Code> ComputeCallArguments(int argc, Code::Kind kind); + MUST_USE_RESULT MaybeObject* ComputeCallNormal(int argc, + Code::Kind kind, + Code::ExtraICState state); - Handle<Code> ComputeCallMegamorphic(int argc, - Code::Kind kind, - Code::ExtraICState state); + MUST_USE_RESULT MaybeObject* ComputeCallArguments(int argc, + Code::Kind kind); - Handle<Code> ComputeCallMiss(int argc, - Code::Kind kind, - Code::ExtraICState state); + MUST_USE_RESULT MaybeObject* ComputeCallMegamorphic(int argc, + Code::Kind kind, + Code::ExtraICState state); - MUST_USE_RESULT MaybeObject* TryComputeCallMiss(int argc, - Code::Kind kind, - Code::ExtraICState state); + MUST_USE_RESULT MaybeObject* ComputeCallMiss(int argc, + Code::Kind kind, + Code::ExtraICState state); // Finds the Code object stored in the Heap::non_monomorphic_cache(). - Code* FindCallInitialize(int argc, RelocInfo::Mode mode, Code::Kind kind); + MUST_USE_RESULT Code* FindCallInitialize(int argc, + RelocInfo::Mode mode, + Code::Kind kind); #ifdef ENABLE_DEBUGGER_SUPPORT - Handle<Code> ComputeCallDebugBreak(int argc, Code::Kind kind); + MUST_USE_RESULT MaybeObject* ComputeCallDebugBreak(int argc, Code::Kind kind); - Handle<Code> ComputeCallDebugPrepareStepIn(int argc, Code::Kind kind); + MUST_USE_RESULT MaybeObject* ComputeCallDebugPrepareStepIn(int argc, + Code::Kind kind); #endif // Update cache for entry hash(name, map). @@ -294,14 +330,16 @@ class StubCache { Isolate* isolate() { return isolate_; } Heap* heap() { return isolate()->heap(); } - Factory* factory() { return isolate()->factory(); } private: explicit StubCache(Isolate* isolate); - Handle<Code> ComputeCallInitialize(int argc, - RelocInfo::Mode mode, - Code::Kind kind); + friend class Isolate; + friend class SCTableReference; + static const int kPrimaryTableSize = 2048; + static const int kSecondaryTableSize = 512; + Entry primary_[kPrimaryTableSize]; + Entry secondary_[kSecondaryTableSize]; // Computes the hashed offsets for primary and secondary caches. static int PrimaryOffset(String* name, Code::Flags flags, Map* map) { @@ -346,16 +384,8 @@ class StubCache { reinterpret_cast<Address>(table) + (offset << shift_amount)); } - static const int kPrimaryTableSize = 2048; - static const int kSecondaryTableSize = 512; - - Entry primary_[kPrimaryTableSize]; - Entry secondary_[kSecondaryTableSize]; Isolate* isolate_; - friend class Isolate; - friend class SCTableReference; - DISALLOW_COPY_AND_ASSIGN(StubCache); }; @@ -377,26 +407,21 @@ DECLARE_RUNTIME_FUNCTION(MaybeObject*, CallInterceptorProperty); DECLARE_RUNTIME_FUNCTION(MaybeObject*, KeyedLoadPropertyWithInterceptor); -// The stub compilers compile stubs for the stub cache. +// The stub compiler compiles stubs for the stub cache. class StubCompiler BASE_EMBEDDED { public: - explicit StubCompiler(Isolate* isolate) - : isolate_(isolate), masm_(isolate, NULL, 256), failure_(NULL) { } - - // Functions to compile either CallIC or KeyedCallIC. The specific kind - // is extracted from the code flags. - Handle<Code> CompileCallInitialize(Code::Flags flags); - Handle<Code> CompileCallPreMonomorphic(Code::Flags flags); - Handle<Code> CompileCallNormal(Code::Flags flags); - Handle<Code> CompileCallMegamorphic(Code::Flags flags); - Handle<Code> CompileCallArguments(Code::Flags flags); - Handle<Code> CompileCallMiss(Code::Flags flags); - - MUST_USE_RESULT MaybeObject* TryCompileCallMiss(Code::Flags flags); - + StubCompiler() + : scope_(), masm_(Isolate::Current(), NULL, 256), failure_(NULL) { } + + MUST_USE_RESULT MaybeObject* CompileCallInitialize(Code::Flags flags); + MUST_USE_RESULT MaybeObject* CompileCallPreMonomorphic(Code::Flags flags); + MUST_USE_RESULT MaybeObject* CompileCallNormal(Code::Flags flags); + MUST_USE_RESULT MaybeObject* CompileCallMegamorphic(Code::Flags flags); + MUST_USE_RESULT MaybeObject* CompileCallArguments(Code::Flags flags); + MUST_USE_RESULT MaybeObject* CompileCallMiss(Code::Flags flags); #ifdef ENABLE_DEBUGGER_SUPPORT - Handle<Code> CompileCallDebugBreak(Code::Flags flags); - Handle<Code> CompileCallDebugPrepareStepIn(Code::Flags flags); + MUST_USE_RESULT MaybeObject* CompileCallDebugBreak(Code::Flags flags); + MUST_USE_RESULT MaybeObject* CompileCallDebugPrepareStepIn(Code::Flags flags); #endif // Static functions for generating parts of stubs. @@ -416,10 +441,8 @@ class StubCompiler BASE_EMBEDDED { Label* miss); static void GenerateFastPropertyLoad(MacroAssembler* masm, - Register dst, - Register src, - Handle<JSObject> holder, - int index); + Register dst, Register src, + JSObject* holder, int index); static void GenerateLoadArrayLength(MacroAssembler* masm, Register receiver, @@ -440,9 +463,9 @@ class StubCompiler BASE_EMBEDDED { Label* miss_label); static void GenerateStoreField(MacroAssembler* masm, - Handle<JSObject> object, + JSObject* object, int index, - Handle<Map> transition, + Map* transition, Register receiver_reg, Register name_reg, Register scratch, @@ -468,30 +491,7 @@ class StubCompiler BASE_EMBEDDED { // The function can optionally (when save_at_depth != // kInvalidProtoDepth) save the object at the given depth by moving // it to [esp + kPointerSize]. - Register CheckPrototypes(Handle<JSObject> object, - Register object_reg, - Handle<JSObject> holder, - Register holder_reg, - Register scratch1, - Register scratch2, - Handle<String> name, - Label* miss) { - return CheckPrototypes(object, object_reg, holder, holder_reg, scratch1, - scratch2, name, kInvalidProtoDepth, miss); - } - Register CheckPrototypes(Handle<JSObject> object, - Register object_reg, - Handle<JSObject> holder, - Register holder_reg, - Register scratch1, - Register scratch2, - Handle<String> name, - int save_at_depth, - Label* miss); - - // TODO(kmillikin): Eliminate this function when the stub cache is fully - // handlified. Register CheckPrototypes(JSObject* object, Register object_reg, JSObject* holder, @@ -504,8 +504,6 @@ class StubCompiler BASE_EMBEDDED { scratch2, name, kInvalidProtoDepth, miss); } - // TODO(kmillikin): Eliminate this function when the stub cache is fully - // handlified. Register CheckPrototypes(JSObject* object, Register object_reg, JSObject* holder, @@ -517,25 +515,20 @@ class StubCompiler BASE_EMBEDDED { Label* miss); protected: - Handle<Code> GetCodeWithFlags(Code::Flags flags, const char* name); - Handle<Code> GetCodeWithFlags(Code::Flags flags, Handle<String> name); - - MUST_USE_RESULT MaybeObject* TryGetCodeWithFlags(Code::Flags flags, - const char* name); - MUST_USE_RESULT MaybeObject* TryGetCodeWithFlags(Code::Flags flags, - String* name); + MaybeObject* GetCodeWithFlags(Code::Flags flags, const char* name); + MaybeObject* GetCodeWithFlags(Code::Flags flags, String* name); MacroAssembler* masm() { return &masm_; } void set_failure(Failure* failure) { failure_ = failure; } - void GenerateLoadField(Handle<JSObject> object, - Handle<JSObject> holder, + void GenerateLoadField(JSObject* object, + JSObject* holder, Register receiver, Register scratch1, Register scratch2, Register scratch3, int index, - Handle<String> name, + String* name, Label* miss); MaybeObject* GenerateLoadCallback(JSObject* object, @@ -549,14 +542,14 @@ class StubCompiler BASE_EMBEDDED { String* name, Label* miss); - void GenerateLoadConstant(Handle<JSObject> object, - Handle<JSObject> holder, + void GenerateLoadConstant(JSObject* object, + JSObject* holder, Register receiver, Register scratch1, Register scratch2, Register scratch3, - Handle<Object> value, - Handle<String> name, + Object* value, + String* name, Label* miss); void GenerateLoadInterceptor(JSObject* object, @@ -574,12 +567,12 @@ class StubCompiler BASE_EMBEDDED { String* name, LookupResult* lookup); - Isolate* isolate() { return isolate_; } + Isolate* isolate() { return scope_.isolate(); } Heap* heap() { return isolate()->heap(); } Factory* factory() { return isolate()->factory(); } private: - Isolate* isolate_; + HandleScope scope_; MacroAssembler masm_; Failure* failure_; }; @@ -587,95 +580,70 @@ class StubCompiler BASE_EMBEDDED { class LoadStubCompiler: public StubCompiler { public: - explicit LoadStubCompiler(Isolate* isolate) : StubCompiler(isolate) { } - - Handle<Code> CompileLoadNonexistent(Handle<String> name, - Handle<JSObject> object, - Handle<JSObject> last); + MUST_USE_RESULT MaybeObject* CompileLoadNonexistent(String* name, + JSObject* object, + JSObject* last); - Handle<Code> CompileLoadField(Handle<JSObject> object, - Handle<JSObject> holder, - int index, - Handle<String> name); - - Handle<Code> CompileLoadCallback(Handle<String> name, - Handle<JSObject> object, - Handle<JSObject> holder, - Handle<AccessorInfo> callback); + MUST_USE_RESULT MaybeObject* CompileLoadField(JSObject* object, + JSObject* holder, + int index, + String* name); MUST_USE_RESULT MaybeObject* CompileLoadCallback(String* name, JSObject* object, JSObject* holder, AccessorInfo* callback); - Handle<Code> CompileLoadConstant(Handle<JSObject> object, - Handle<JSObject> holder, - Handle<Object> value, - Handle<String> name); - - Handle<Code> CompileLoadInterceptor(Handle<JSObject> object, - Handle<JSObject> holder, - Handle<String> name); + MUST_USE_RESULT MaybeObject* CompileLoadConstant(JSObject* object, + JSObject* holder, + Object* value, + String* name); MUST_USE_RESULT MaybeObject* CompileLoadInterceptor(JSObject* object, JSObject* holder, String* name); - Handle<Code> CompileLoadGlobal(Handle<JSObject> object, - Handle<GlobalObject> holder, - Handle<JSGlobalPropertyCell> cell, - Handle<String> name, - bool is_dont_delete); + MUST_USE_RESULT MaybeObject* CompileLoadGlobal(JSObject* object, + GlobalObject* holder, + JSGlobalPropertyCell* cell, + String* name, + bool is_dont_delete); private: - MUST_USE_RESULT MaybeObject* TryGetCode(PropertyType type, String* name); - - Handle<Code> GetCode(PropertyType type, Handle<String> name); + MUST_USE_RESULT MaybeObject* GetCode(PropertyType type, String* name); }; class KeyedLoadStubCompiler: public StubCompiler { public: - explicit KeyedLoadStubCompiler(Isolate* isolate) : StubCompiler(isolate) { } - - Handle<Code> CompileLoadField(Handle<String> name, - Handle<JSObject> object, - Handle<JSObject> holder, - int index); - - Handle<Code> CompileLoadCallback(Handle<String> name, - Handle<JSObject> object, - Handle<JSObject> holder, - Handle<AccessorInfo> callback); + MUST_USE_RESULT MaybeObject* CompileLoadField(String* name, + JSObject* object, + JSObject* holder, + int index); MUST_USE_RESULT MaybeObject* CompileLoadCallback(String* name, JSObject* object, JSObject* holder, AccessorInfo* callback); - Handle<Code> CompileLoadConstant(Handle<String> name, - Handle<JSObject> object, - Handle<JSObject> holder, - Handle<Object> value); - - Handle<Code> CompileLoadInterceptor(Handle<JSObject> object, - Handle<JSObject> holder, - Handle<String> name); + MUST_USE_RESULT MaybeObject* CompileLoadConstant(String* name, + JSObject* object, + JSObject* holder, + Object* value); MUST_USE_RESULT MaybeObject* CompileLoadInterceptor(JSObject* object, JSObject* holder, String* name); - Handle<Code> CompileLoadArrayLength(Handle<String> name); + MUST_USE_RESULT MaybeObject* CompileLoadArrayLength(String* name); + MUST_USE_RESULT MaybeObject* CompileLoadStringLength(String* name); + MUST_USE_RESULT MaybeObject* CompileLoadFunctionPrototype(String* name); - Handle<Code> CompileLoadStringLength(Handle<String> name); + MUST_USE_RESULT MaybeObject* CompileLoadElement(Map* receiver_map); - Handle<Code> CompileLoadFunctionPrototype(Handle<String> name); - - Handle<Code> CompileLoadElement(Handle<Map> receiver_map); - - Handle<Code> CompileLoadPolymorphic(MapHandleList* receiver_maps, - CodeHandleList* handler_ics); + MUST_USE_RESULT MaybeObject* CompileLoadPolymorphic( + MapList* receiver_maps, + CodeList* handler_ics); static void GenerateLoadExternalArray(MacroAssembler* masm, ElementsKind elements_kind); @@ -687,40 +655,34 @@ class KeyedLoadStubCompiler: public StubCompiler { static void GenerateLoadDictionaryElement(MacroAssembler* masm); private: - MaybeObject* TryGetCode(PropertyType type, - String* name, - InlineCacheState state = MONOMORPHIC); - - Handle<Code> GetCode(PropertyType type, - Handle<String> name, + MaybeObject* GetCode(PropertyType type, + String* name, InlineCacheState state = MONOMORPHIC); }; class StoreStubCompiler: public StubCompiler { public: - StoreStubCompiler(Isolate* isolate, StrictModeFlag strict_mode) - : StubCompiler(isolate), strict_mode_(strict_mode) { } - - - Handle<Code> CompileStoreField(Handle<JSObject> object, - int index, - Handle<Map> transition, - Handle<String> name); + explicit StoreStubCompiler(StrictModeFlag strict_mode) + : strict_mode_(strict_mode) { } - Handle<Code> CompileStoreCallback(Handle<JSObject> object, - Handle<AccessorInfo> callback, - Handle<String> name); + MUST_USE_RESULT MaybeObject* CompileStoreField(JSObject* object, + int index, + Map* transition, + String* name); - Handle<Code> CompileStoreInterceptor(Handle<JSObject> object, - Handle<String> name); + MUST_USE_RESULT MaybeObject* CompileStoreCallback(JSObject* object, + AccessorInfo* callbacks, + String* name); + MUST_USE_RESULT MaybeObject* CompileStoreInterceptor(JSObject* object, + String* name); + MUST_USE_RESULT MaybeObject* CompileStoreGlobal(GlobalObject* object, + JSGlobalPropertyCell* holder, + String* name); - Handle<Code> CompileStoreGlobal(Handle<GlobalObject> object, - Handle<JSGlobalPropertyCell> holder, - Handle<String> name); private: - Handle<Code> GetCode(PropertyType type, Handle<String> name); + MaybeObject* GetCode(PropertyType type, String* name); StrictModeFlag strict_mode_; }; @@ -728,19 +690,20 @@ class StoreStubCompiler: public StubCompiler { class KeyedStoreStubCompiler: public StubCompiler { public: - KeyedStoreStubCompiler(Isolate* isolate, StrictModeFlag strict_mode) - : StubCompiler(isolate), strict_mode_(strict_mode) { } + explicit KeyedStoreStubCompiler(StrictModeFlag strict_mode) + : strict_mode_(strict_mode) { } - Handle<Code> CompileStoreField(Handle<JSObject> object, - int index, - Handle<Map> transition, - Handle<String> name); + MUST_USE_RESULT MaybeObject* CompileStoreField(JSObject* object, + int index, + Map* transition, + String* name); - Handle<Code> CompileStoreElement(Handle<Map> receiver_map); + MUST_USE_RESULT MaybeObject* CompileStoreElement(Map* receiver_map); - Handle<Code> CompileStorePolymorphic(MapHandleList* receiver_maps, - CodeHandleList* handler_stubs, - MapHandleList* transitioned_maps); + MUST_USE_RESULT MaybeObject* CompileStorePolymorphic( + MapList* receiver_maps, + CodeList* handler_stubs, + MapList* transitioned_maps); static void GenerateStoreFastElement(MacroAssembler* masm, bool is_js_array, @@ -755,8 +718,8 @@ class KeyedStoreStubCompiler: public StubCompiler { static void GenerateStoreDictionaryElement(MacroAssembler* masm); private: - Handle<Code> GetCode(PropertyType type, - Handle<String> name, + MaybeObject* GetCode(PropertyType type, + String* name, InlineCacheState state = MONOMORPHIC); StrictModeFlag strict_mode_; @@ -779,48 +742,35 @@ class CallOptimization; class CallStubCompiler: public StubCompiler { public: - CallStubCompiler(Isolate* isolate, - int argc, + CallStubCompiler(int argc, Code::Kind kind, - Code::ExtraICState extra_state, + Code::ExtraICState extra_ic_state, InlineCacheHolderFlag cache_holder); - Handle<Code> CompileCallField(Handle<JSObject> object, - Handle<JSObject> holder, - int index, - Handle<String> name); - - Handle<Code> CompileCallConstant(Handle<Object> object, - Handle<JSObject> holder, - Handle<JSFunction> function, - Handle<String> name, - CheckType check); - - MUST_USE_RESULT MaybeObject* CompileCallConstant(Object* object, - JSObject* holder, - JSFunction* function, - String* name, - CheckType check); - - Handle<Code> CompileCallInterceptor(Handle<JSObject> object, - Handle<JSObject> holder, - Handle<String> name); + MUST_USE_RESULT MaybeObject* CompileCallField( + JSObject* object, + JSObject* holder, + int index, + String* name); - MUST_USE_RESULT MaybeObject* CompileCallInterceptor(JSObject* object, - JSObject* holder, - String* name); + MUST_USE_RESULT MaybeObject* CompileCallConstant( + Object* object, + JSObject* holder, + JSFunction* function, + String* name, + CheckType check); - Handle<Code> CompileCallGlobal(Handle<JSObject> object, - Handle<GlobalObject> holder, - Handle<JSGlobalPropertyCell> cell, - Handle<JSFunction> function, - Handle<String> name); + MUST_USE_RESULT MaybeObject* CompileCallInterceptor( + JSObject* object, + JSObject* holder, + String* name); - MUST_USE_RESULT MaybeObject* CompileCallGlobal(JSObject* object, - GlobalObject* holder, - JSGlobalPropertyCell* cell, - JSFunction* function, - String* name); + MUST_USE_RESULT MaybeObject* CompileCallGlobal( + JSObject* object, + GlobalObject* holder, + JSGlobalPropertyCell* cell, + JSFunction* function, + String* name); static bool HasCustomCallGenerator(JSFunction* function); @@ -853,20 +803,18 @@ class CallStubCompiler: public StubCompiler { const ParameterCount arguments_; const Code::Kind kind_; - const Code::ExtraICState extra_state_; + const Code::ExtraICState extra_ic_state_; const InlineCacheHolderFlag cache_holder_; const ParameterCount& arguments() { return arguments_; } - Handle<Code> GetCode(PropertyType type, Handle<String> name); - Handle<Code> GetCode(Handle<JSFunction> function); + MUST_USE_RESULT MaybeObject* GetCode(PropertyType type, String* name); - // TODO(kmillikin): Eliminate these functions when the stub cache is fully - // handlified. - MUST_USE_RESULT MaybeObject* TryGetCode(PropertyType type, String* name); - MUST_USE_RESULT MaybeObject* TryGetCode(JSFunction* function); + // Convenience function. Calls GetCode above passing + // CONSTANT_FUNCTION type and the name of the given function. + MUST_USE_RESULT MaybeObject* GetCode(JSFunction* function); - void GenerateNameCheck(Handle<String> name, Label* miss); + void GenerateNameCheck(String* name, Label* miss); void GenerateGlobalReceiverCheck(JSObject* object, JSObject* holder, @@ -879,18 +827,15 @@ class CallStubCompiler: public StubCompiler { JSFunction* function, Label* miss); - // Generates a jump to CallIC miss stub. - void GenerateMissBranch(); - - // TODO(kmillikin): Eliminate this function when the stub cache is fully - // handlified. - MUST_USE_RESULT MaybeObject* TryGenerateMissBranch(); + // Generates a jump to CallIC miss stub. Returns Failure if the jump cannot + // be generated. + MUST_USE_RESULT MaybeObject* GenerateMissBranch(); }; class ConstructStubCompiler: public StubCompiler { public: - explicit ConstructStubCompiler(Isolate* isolate) : StubCompiler(isolate) { } + explicit ConstructStubCompiler() {} MUST_USE_RESULT MaybeObject* CompileConstructStub(JSFunction* function); diff --git a/deps/v8/src/token.h b/deps/v8/src/token.h index 7a2156c95..de4972dd7 100644 --- a/deps/v8/src/token.h +++ b/deps/v8/src/token.h @@ -73,7 +73,6 @@ namespace internal { T(INIT_VAR, "=init_var", 2) /* AST-use only. */ \ T(INIT_LET, "=init_let", 2) /* AST-use only. */ \ T(INIT_CONST, "=init_const", 2) /* AST-use only. */ \ - T(INIT_CONST_HARMONY, "=init_const_harmony", 2) /* AST-use only. */ \ T(ASSIGN, "=", 2) \ T(ASSIGN_BIT_OR, "|=", 2) \ T(ASSIGN_BIT_XOR, "^=", 2) \ diff --git a/deps/v8/src/type-info.cc b/deps/v8/src/type-info.cc index afec71a99..a4b16f4f3 100644 --- a/deps/v8/src/type-info.cc +++ b/deps/v8/src/type-info.cc @@ -423,14 +423,6 @@ void TypeFeedbackOracle::CollectReceiverTypes(unsigned ast_id, } -static void AddMapIfMissing(Handle<Map> map, SmallMapList* list) { - for (int i = 0; i < list->length(); ++i) { - if (list->at(i).is_identical_to(map)) return; - } - list->Add(map); -} - - void TypeFeedbackOracle::CollectKeyedReceiverTypes(unsigned ast_id, SmallMapList* types) { Handle<Object> object = GetInfo(ast_id); @@ -444,7 +436,7 @@ void TypeFeedbackOracle::CollectKeyedReceiverTypes(unsigned ast_id, RelocInfo* info = it.rinfo(); Object* object = info->target_object(); if (object->IsMap()) { - AddMapIfMissing(Handle<Map>(Map::cast(object)), types); + types->Add(Handle<Map>(Map::cast(object))); } } } @@ -504,56 +496,61 @@ void TypeFeedbackOracle::RelocateRelocInfos(ZoneList<RelocInfo>* infos, void TypeFeedbackOracle::ProcessRelocInfos(ZoneList<RelocInfo>* infos) { for (int i = 0; i < infos->length(); i++) { - RelocInfo reloc_entry = (*infos)[i]; - Address target_address = reloc_entry.target_address(); + Address target_address = (*infos)[i].target_address(); unsigned ast_id = static_cast<unsigned>((*infos)[i].data()); - Code* target = Code::GetCodeFromTargetAddress(target_address); - switch (target->kind()) { - case Code::LOAD_IC: - case Code::STORE_IC: - case Code::CALL_IC: - case Code::KEYED_CALL_IC: - if (target->ic_state() == MONOMORPHIC) { - if (target->kind() == Code::CALL_IC && - target->check_type() != RECEIVER_MAP_CHECK) { - SetInfo(ast_id, Smi::FromInt(target->check_type())); - } else { - Object* map = target->FindFirstMap(); - SetInfo(ast_id, map == NULL ? static_cast<Object*>(target) : map); - } - } else if (target->ic_state() == MEGAMORPHIC) { - SetInfo(ast_id, target); - } - break; + ProcessTargetAt(target_address, ast_id); + } +} + - case Code::KEYED_LOAD_IC: - case Code::KEYED_STORE_IC: - if (target->ic_state() == MONOMORPHIC || - target->ic_state() == MEGAMORPHIC) { - SetInfo(ast_id, target); +void TypeFeedbackOracle::ProcessTargetAt(Address target_address, + unsigned ast_id) { + Code* target = Code::GetCodeFromTargetAddress(target_address); + switch (target->kind()) { + case Code::LOAD_IC: + case Code::STORE_IC: + case Code::CALL_IC: + case Code::KEYED_CALL_IC: + if (target->ic_state() == MONOMORPHIC) { + if (target->kind() == Code::CALL_IC && + target->check_type() != RECEIVER_MAP_CHECK) { + SetInfo(ast_id, Smi::FromInt(target->check_type())); + } else { + Object* map = target->FindFirstMap(); + SetInfo(ast_id, map == NULL ? static_cast<Object*>(target) : map); } - break; + } else if (target->ic_state() == MEGAMORPHIC) { + SetInfo(ast_id, target); + } + break; - case Code::UNARY_OP_IC: - case Code::BINARY_OP_IC: - case Code::COMPARE_IC: - case Code::TO_BOOLEAN_IC: + case Code::KEYED_LOAD_IC: + case Code::KEYED_STORE_IC: + if (target->ic_state() == MONOMORPHIC || + target->ic_state() == MEGAMORPHIC) { SetInfo(ast_id, target); - break; - - case Code::STUB: - if (target->major_key() == CodeStub::CallFunction && - target->has_function_cache()) { - Object* value = CallFunctionStub::GetCachedValue(reloc_entry.pc()); - if (value->IsJSFunction()) { - SetInfo(ast_id, value); - } + } + break; + + case Code::UNARY_OP_IC: + case Code::BINARY_OP_IC: + case Code::COMPARE_IC: + case Code::TO_BOOLEAN_IC: + SetInfo(ast_id, target); + break; + + case Code::STUB: + if (target->major_key() == CodeStub::CallFunction && + target->has_function_cache()) { + Object* value = CallFunctionStub::GetCachedValue(target_address); + if (value->IsJSFunction()) { + SetInfo(ast_id, value); } - break; + } + break; - default: - break; - } + default: + break; } } diff --git a/deps/v8/src/type-info.h b/deps/v8/src/type-info.h index 2c3543eaf..0ba10aaa5 100644 --- a/deps/v8/src/type-info.h +++ b/deps/v8/src/type-info.h @@ -277,6 +277,7 @@ class TypeFeedbackOracle BASE_EMBEDDED { byte* old_start, byte* new_start); void ProcessRelocInfos(ZoneList<RelocInfo>* infos); + void ProcessTargetAt(Address target_address, unsigned ast_id); // Returns an element from the backing store. Returns undefined if // there is no information. diff --git a/deps/v8/src/utils.h b/deps/v8/src/utils.h index 2e6cfbd90..a523118a3 100644 --- a/deps/v8/src/utils.h +++ b/deps/v8/src/utils.h @@ -143,16 +143,6 @@ static int PointerValueCompare(const T* a, const T* b) { } -// Compare function to compare the object pointer value of two -// handlified objects. The handles are passed as pointers to the -// handles. -template<typename T> class Handle; // Forward declaration. -template <typename T> -static int HandleObjectPointerCompare(const Handle<T>* a, const Handle<T>* b) { - return Compare<T*>(*(*a), *(*b)); -} - - // Returns the smallest power of two which is >= x. If you pass in a // number that is already a power of two, it is returned as is. // Implementation is from "Hacker's Delight" by Henry S. Warren, Jr., @@ -178,6 +168,7 @@ static inline uint32_t RoundDownToPowerOf2(uint32_t x) { template <typename T, typename U> static inline bool IsAligned(T value, U alignment) { + ASSERT(IsPowerOf2(alignment)); return (value & (alignment - 1)) == 0; } @@ -266,18 +257,6 @@ static inline uint32_t ComputeIntegerHash(uint32_t key) { } -static inline uint32_t ComputeLongHash(uint64_t key) { - uint64_t hash = key; - hash = ~hash + (hash << 18); // hash = (hash << 18) - hash - 1; - hash = hash ^ (hash >> 31); - hash = hash * 21; // hash = (hash + (hash << 2)) + (hash << 4); - hash = hash ^ (hash >> 11); - hash = hash + (hash << 6); - hash = hash ^ (hash >> 22); - return (uint32_t) hash; -} - - static inline uint32_t ComputePointerHash(void* ptr) { return ComputeIntegerHash( static_cast<uint32_t>(reinterpret_cast<intptr_t>(ptr))); diff --git a/deps/v8/src/v8.cc b/deps/v8/src/v8.cc index 66c65e7b0..a04114e70 100644 --- a/deps/v8/src/v8.cc +++ b/deps/v8/src/v8.cc @@ -63,7 +63,7 @@ bool V8::Initialize(Deserializer* des) { FLAG_harmony_typeof = true; FLAG_harmony_scoping = true; FLAG_harmony_proxies = true; - FLAG_harmony_collections = true; + FLAG_harmony_weakmaps = true; } InitializeOncePerProcess(); @@ -150,10 +150,9 @@ void V8::SetEntropySource(EntropySource source) { // Used by JavaScript APIs -uint32_t V8::Random(Context* context) { - ASSERT(context->IsGlobalContext()); - ByteArray* seed = context->random_seed(); - return random_base(reinterpret_cast<uint32_t*>(seed->GetDataStartAddress())); +uint32_t V8::Random(Isolate* isolate) { + ASSERT(isolate == Isolate::Current()); + return random_base(isolate->random_seed()); } @@ -183,9 +182,8 @@ typedef union { } double_int_union; -Object* V8::FillHeapNumberWithRandom(Object* heap_number, - Context* context) { - uint64_t random_bits = Random(context); +Object* V8::FillHeapNumberWithRandom(Object* heap_number, Isolate* isolate) { + uint64_t random_bits = Random(isolate); // Make a double* from address (heap_number + sizeof(double)). double_int_union* r = reinterpret_cast<double_int_union*>( reinterpret_cast<char*>(heap_number) + diff --git a/deps/v8/src/v8.h b/deps/v8/src/v8.h index 01feefce6..2e039d429 100644 --- a/deps/v8/src/v8.h +++ b/deps/v8/src/v8.h @@ -96,14 +96,14 @@ class V8 : public AllStatic { // generation. static void SetEntropySource(EntropySource source); // Random number generation support. Not cryptographically safe. - static uint32_t Random(Context* context); + static uint32_t Random(Isolate* isolate); // We use random numbers internally in memory allocation and in the // compilers for security. In order to prevent information leaks we // use a separate random state for internal random number // generation. static uint32_t RandomPrivate(Isolate* isolate); static Object* FillHeapNumberWithRandom(Object* heap_number, - Context* context); + Isolate* isolate); // Idle notification directly from the API. static bool IdleNotification(); diff --git a/deps/v8/src/v8globals.h b/deps/v8/src/v8globals.h index f4703ff09..09d26d2f1 100644 --- a/deps/v8/src/v8globals.h +++ b/deps/v8/src/v8globals.h @@ -509,16 +509,6 @@ enum CallKind { }; -enum ScopeType { - EVAL_SCOPE, // The top-level scope for an eval source. - FUNCTION_SCOPE, // The top-level scope for a function. - GLOBAL_SCOPE, // The top-level scope for a program or a top-level eval. - CATCH_SCOPE, // The scope introduced by catch. - BLOCK_SCOPE, // The scope introduced by a new block. - WITH_SCOPE // The scope introduced by with. -}; - - static const uint32_t kHoleNanUpper32 = 0x7FFFFFFF; static const uint32_t kHoleNanLower32 = 0xFFFFFFFF; static const uint32_t kNaNOrInfinityLowerBoundUpper32 = 0x7FF00000; @@ -531,13 +521,11 @@ const uint64_t kLastNonNaNInt64 = enum VariableMode { // User declared variables: - VAR, // declared via 'var', and 'function' declarations - - CONST, // declared via 'const' declarations + VAR, // declared via 'var', and 'function' declarations - CONST_HARMONY, // declared via 'const' declarations in harmony mode + CONST, // declared via 'const' declarations - LET, // declared via 'let' declarations + LET, // declared via 'let' declarations // Variables introduced by the compiler: DYNAMIC, // always require dynamic lookup (we don't know @@ -559,13 +547,6 @@ enum VariableMode { // in a context }; - -enum ClearExceptionFlag { - KEEP_EXCEPTION, - CLEAR_EXCEPTION -}; - - } } // namespace v8::internal #endif // V8_V8GLOBALS_H_ diff --git a/deps/v8/src/v8natives.js b/deps/v8/src/v8natives.js index e6669d58a..dee303237 100644 --- a/deps/v8/src/v8natives.js +++ b/deps/v8/src/v8natives.js @@ -373,7 +373,6 @@ function IsDataDescriptor(desc) { // ES5 8.10.3. function IsGenericDescriptor(desc) { - if (IS_UNDEFINED(desc)) return false; return !(IsAccessorDescriptor(desc) || IsDataDescriptor(desc)); } @@ -705,7 +704,7 @@ function DefineObjectProperty(obj, p, desc, should_throw) { if (should_throw) { throw MakeTypeError("define_disallowed", [p]); } else { - return false; + return; } } @@ -735,7 +734,7 @@ function DefineObjectProperty(obj, p, desc, should_throw) { if (should_throw) { throw MakeTypeError("redefine_disallowed", [p]); } else { - return false; + return; } } // Step 8 @@ -745,7 +744,7 @@ function DefineObjectProperty(obj, p, desc, should_throw) { if (should_throw) { throw MakeTypeError("redefine_disallowed", [p]); } else { - return false; + return; } } // Step 10a @@ -754,7 +753,7 @@ function DefineObjectProperty(obj, p, desc, should_throw) { if (should_throw) { throw MakeTypeError("redefine_disallowed", [p]); } else { - return false; + return; } } if (!current.isWritable() && desc.hasValue() && @@ -762,7 +761,7 @@ function DefineObjectProperty(obj, p, desc, should_throw) { if (should_throw) { throw MakeTypeError("redefine_disallowed", [p]); } else { - return false; + return; } } } @@ -772,14 +771,14 @@ function DefineObjectProperty(obj, p, desc, should_throw) { if (should_throw) { throw MakeTypeError("redefine_disallowed", [p]); } else { - return false; + return; } } if (desc.hasGetter() && !SameValue(desc.getGet(),current.getGet())) { if (should_throw) { throw MakeTypeError("redefine_disallowed", [p]); } else { - return false; + return; } } } @@ -882,7 +881,7 @@ function DefineArrayProperty(obj, p, desc, should_throw) { if (should_throw) { throw MakeTypeError("define_disallowed", [p]); } else { - return false; + return; } } if (index >= length) { @@ -937,14 +936,14 @@ function ToStringArray(obj, trap) { } var n = ToUint32(obj.length); var array = new $Array(n); - var names = {} // TODO(rossberg): use sets once they are ready. + var names = {} for (var index = 0; index < n; index++) { var s = ToString(obj[index]); if (s in names) { throw MakeTypeError("proxy_repeated_prop_name", [obj, trap, s]) } array[index] = s; - names[s] = 0; + names.s = 0; } return array; } @@ -1079,12 +1078,10 @@ function ObjectDefineProperties(obj, properties) { throw MakeTypeError("obj_ctor_property_non_object", ["defineProperties"]); var props = ToObject(properties); var names = GetOwnEnumerablePropertyNames(props); - var descriptors = new InternalArray(); for (var i = 0; i < names.length; i++) { - descriptors.push(ToPropertyDescriptor(props[names[i]])); - } - for (var i = 0; i < names.length; i++) { - DefineOwnProperty(obj, names[i], descriptors[i], true); + var name = names[i]; + var desc = ToPropertyDescriptor(props[name]); + DefineOwnProperty(obj, name, desc, true); } return obj; } @@ -1520,53 +1517,53 @@ function FunctionToString() { // ES5 15.3.4.5 function FunctionBind(this_arg) { // Length is 1. if (!IS_SPEC_FUNCTION(this)) { - throw new $TypeError('Bind must be called on a function'); - } - var boundFunction = function () { - // Poison .arguments and .caller, but is otherwise not detectable. - "use strict"; - // This function must not use any object literals (Object, Array, RegExp), - // since the literals-array is being used to store the bound data. - if (%_IsConstructCall()) { - return %NewObjectFromBound(boundFunction); + throw new $TypeError('Bind must be called on a function'); + } + // this_arg is not an argument that should be bound. + var argc_bound = (%_ArgumentsLength() || 1) - 1; + var fn = this; + + if (argc_bound == 0) { + var result = function() { + if (%_IsConstructCall()) { + // %NewObjectFromBound implicitly uses arguments passed to this + // function. We do not pass the arguments object explicitly to avoid + // materializing it and guarantee that this function will be optimized. + return %NewObjectFromBound(fn, null); + } + return %Apply(fn, this_arg, arguments, 0, %_ArgumentsLength()); + }; + } else { + var bound_args = new InternalArray(argc_bound); + for(var i = 0; i < argc_bound; i++) { + bound_args[i] = %_Arguments(i+1); } - var bindings = %BoundFunctionGetBindings(boundFunction); - var argc = %_ArgumentsLength(); - if (argc == 0) { - return %Apply(bindings[0], bindings[1], bindings, 2, bindings.length - 2); - } - if (bindings.length === 2) { - return %Apply(bindings[0], bindings[1], arguments, 0, argc); - } - var bound_argc = bindings.length - 2; - var argv = new InternalArray(bound_argc + argc); - for (var i = 0; i < bound_argc; i++) { - argv[i] = bindings[i + 2]; - } - for (var j = 0; j < argc; j++) { - argv[i++] = %_Arguments(j); - } - return %Apply(bindings[0], bindings[1], argv, 0, bound_argc + argc); - }; - - %FunctionRemovePrototype(boundFunction); - var new_length = 0; - if (%_ClassOf(this) == "Function") { - // Function or FunctionProxy. - var old_length = this.length; - // FunctionProxies might provide a non-UInt32 value. If so, ignore it. - if ((typeof old_length === "number") && - ((old_length >>> 0) === old_length)) { + var result = function() { + // If this is a construct call we use a special runtime method + // to generate the actual object using the bound function. + if (%_IsConstructCall()) { + // %NewObjectFromBound implicitly uses arguments passed to this + // function. We do not pass the arguments object explicitly to avoid + // materializing it and guarantee that this function will be optimized. + return %NewObjectFromBound(fn, bound_args); + } + + // Combine the args we got from the bind call with the args + // given as argument to the invocation. var argc = %_ArgumentsLength(); - if (argc > 0) argc--; // Don't count the thisArg as parameter. - new_length = old_length - argc; - if (new_length < 0) new_length = 0; - } + var args = new InternalArray(argc + argc_bound); + // Add bound arguments. + for (var i = 0; i < argc_bound; i++) { + args[i] = bound_args[i]; + } + // Add arguments from call. + for (var i = 0; i < argc; i++) { + args[argc_bound + i] = %_Arguments(i); + } + return %Apply(fn, this_arg, args, 0, argc + argc_bound); + }; } - // This runtime function finds any remaining arguments on the stack, - // so we don't pass the arguments object. - var result = %FunctionBindArguments(boundFunction, this, this_arg, new_length); // We already have caller and arguments properties on functions, // which are non-configurable. It therefore makes no sence to @@ -1574,7 +1571,17 @@ function FunctionBind(this_arg) { // Length is 1. // that bind should make these throw a TypeError if get or set // is called and make them non-enumerable and non-configurable. // To be consistent with our normal functions we leave this as it is. - // TODO(lrn): Do set these to be thrower. + + %FunctionRemovePrototype(result); + %FunctionSetBound(result); + // Set the correct length. If this is a function proxy, this.length might + // throw, or return a bogus result. Leave length alone in that case. + // TODO(rossberg): This is underspecified in the current proxy proposal. + try { + var old_length = ToInteger(this.length); + var length = (old_length - argc_bound) > 0 ? old_length - argc_bound : 0; + %BoundFunctionSetLength(result, length); + } catch(x) {} return result; } diff --git a/deps/v8/src/variables.cc b/deps/v8/src/variables.cc index d85e1b270..076cdc0a4 100644 --- a/deps/v8/src/variables.cc +++ b/deps/v8/src/variables.cc @@ -41,7 +41,6 @@ const char* Variable::Mode2String(VariableMode mode) { switch (mode) { case VAR: return "VAR"; case CONST: return "CONST"; - case CONST_HARMONY: return "CONST"; case LET: return "LET"; case DYNAMIC: return "DYNAMIC"; case DYNAMIC_GLOBAL: return "DYNAMIC_GLOBAL"; diff --git a/deps/v8/src/variables.h b/deps/v8/src/variables.h index 8b2d86956..612d8d33c 100644 --- a/deps/v8/src/variables.h +++ b/deps/v8/src/variables.h @@ -118,15 +118,6 @@ class Variable: public ZoneObject { mode_ == DYNAMIC_GLOBAL || mode_ == DYNAMIC_LOCAL); } - bool is_const_mode() const { - return (mode_ == CONST || - mode_ == CONST_HARMONY); - } - bool binding_needs_init() const { - return (mode_ == LET || - mode_ == CONST || - mode_ == CONST_HARMONY); - } bool is_global() const; bool is_this() const { return kind_ == THIS; } @@ -163,10 +154,6 @@ class Variable: public ZoneObject { Location location_; int index_; - // If this field is set, this variable references the stored locally bound - // variable, but it might be shadowed by variable bindings introduced by - // non-strict 'eval' calls between the reference scope (inclusive) and the - // binding scope (exclusive). Variable* local_if_not_shadowed_; // Valid as a LHS? (const and this are not valid LHS, for example) diff --git a/deps/v8/src/version.cc b/deps/v8/src/version.cc index d34638bf8..30402266a 100644 --- a/deps/v8/src/version.cc +++ b/deps/v8/src/version.cc @@ -34,7 +34,7 @@ // cannot be changed without changing the SCons build script. #define MAJOR_VERSION 3 #define MINOR_VERSION 7 -#define BUILD_NUMBER 1 +#define BUILD_NUMBER 0 #define PATCH_LEVEL 0 // Use 1 for candidates and 0 otherwise. // (Boolean macro values are not supported by all preprocessors.) diff --git a/deps/v8/src/collection.js b/deps/v8/src/weakmap.js index 4e45885b9..5fb515107 100644 --- a/deps/v8/src/collection.js +++ b/deps/v8/src/weakmap.js @@ -26,69 +26,12 @@ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -const $Set = global.Set; -const $Map = global.Map; +// This file relies on the fact that the following declaration has been made +// in runtime.js: +// const $Object = global.Object; const $WeakMap = global.WeakMap; -//------------------------------------------------------------------- - -function SetConstructor() { - if (%_IsConstructCall()) { - %SetInitialize(this); - } else { - return new $Set(); - } -} - - -function SetAdd(key) { - return %SetAdd(this, key); -} - - -function SetHas(key) { - return %SetHas(this, key); -} - - -function SetDelete(key) { - return %SetDelete(this, key); -} - - -function MapConstructor() { - if (%_IsConstructCall()) { - %MapInitialize(this); - } else { - return new $Map(); - } -} - - -function MapGet(key) { - return %MapGet(this, key); -} - - -function MapSet(key, value) { - return %MapSet(this, key, value); -} - - -function MapHas(key) { - return !IS_UNDEFINED(%MapGet(this, key)); -} - - -function MapDelete(key) { - if (!IS_UNDEFINED(%MapGet(this, key))) { - %MapSet(this, key, void 0); - return true; - } else { - return false; - } -} - +// ------------------------------------------------------------------- function WeakMapConstructor() { if (%_IsConstructCall()) { @@ -139,30 +82,6 @@ function WeakMapDelete(key) { (function () { %CheckIsBootstrapping(); - - // Set up the Set and Map constructor function. - %SetCode($Set, SetConstructor); - %SetCode($Map, MapConstructor); - - // Set up the constructor property on the Set and Map prototype object. - %SetProperty($Set.prototype, "constructor", $Set, DONT_ENUM); - %SetProperty($Map.prototype, "constructor", $Map, DONT_ENUM); - - // Set up the non-enumerable functions on the Set prototype object. - InstallFunctionsOnHiddenPrototype($Set.prototype, DONT_ENUM, $Array( - "add", SetAdd, - "has", SetHas, - "delete", SetDelete - )); - - // Set up the non-enumerable functions on the Map prototype object. - InstallFunctionsOnHiddenPrototype($Map.prototype, DONT_ENUM, $Array( - "get", MapGet, - "set", MapSet, - "has", MapHas, - "delete", MapDelete - )); - // Set up the WeakMap constructor function. %SetCode($WeakMap, WeakMapConstructor); diff --git a/deps/v8/src/x64/assembler-x64-inl.h b/deps/v8/src/x64/assembler-x64-inl.h index f7b87ec04..10f0b886d 100644 --- a/deps/v8/src/x64/assembler-x64-inl.h +++ b/deps/v8/src/x64/assembler-x64-inl.h @@ -238,12 +238,12 @@ int RelocInfo::target_address_size() { } -void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) { +void RelocInfo::set_target_address(Address target) { ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY); if (IsCodeTarget(rmode_)) { Assembler::set_target_address_at(pc_, target); Object* target_code = Code::GetCodeFromTargetAddress(target); - if (mode == UPDATE_WRITE_BARRIER && host() != NULL) { + if (host() != NULL) { host()->GetHeap()->incremental_marking()->RecordWriteIntoCode( host(), this, HeapObject::cast(target_code)); } @@ -282,13 +282,11 @@ Address* RelocInfo::target_reference_address() { } -void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) { +void RelocInfo::set_target_object(Object* target) { ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); Memory::Object_at(pc_) = target; CPU::FlushICache(pc_, sizeof(Address)); - if (mode == UPDATE_WRITE_BARRIER && - host() != NULL && - target->IsHeapObject()) { + if (host() != NULL && target->IsHeapObject()) { host()->GetHeap()->incremental_marking()->RecordWrite( host(), &Memory::Object_at(pc_), HeapObject::cast(target)); } @@ -312,14 +310,12 @@ JSGlobalPropertyCell* RelocInfo::target_cell() { } -void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell, - WriteBarrierMode mode) { +void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell) { ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL); Address address = cell->address() + JSGlobalPropertyCell::kValueOffset; Memory::Address_at(pc_) = address; CPU::FlushICache(pc_, sizeof(Address)); - if (mode == UPDATE_WRITE_BARRIER && - host() != NULL) { + if (host() != NULL) { // TODO(1550) We are passing NULL as a slot because cell can never be on // evacuation candidate. host()->GetHeap()->incremental_marking()->RecordWrite( diff --git a/deps/v8/src/x64/builtins-x64.cc b/deps/v8/src/x64/builtins-x64.cc index 8baa2f32f..79ddb1393 100644 --- a/deps/v8/src/x64/builtins-x64.cc +++ b/deps/v8/src/x64/builtins-x64.cc @@ -670,7 +670,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { __ testq(rax, rax); __ j(not_zero, &done); __ pop(rbx); - __ Push(masm->isolate()->factory()->undefined_value()); + __ Push(FACTORY->undefined_value()); __ push(rbx); __ incq(rax); __ bind(&done); @@ -993,6 +993,10 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) { } +// Number of empty elements to allocate for an empty array. +static const int kPreallocatedArrayElements = 4; + + // Allocate an empty JSArray. The allocated array is put into the result // register. If the parameter initial_capacity is larger than zero an elements // backing store is allocated with this size and filled with the hole values. @@ -1003,9 +1007,9 @@ static void AllocateEmptyJSArray(MacroAssembler* masm, Register scratch1, Register scratch2, Register scratch3, + int initial_capacity, Label* gc_required) { - const int initial_capacity = JSArray::kPreallocatedArrayElements; - STATIC_ASSERT(initial_capacity >= 0); + ASSERT(initial_capacity >= 0); // Load the initial map from the array function. __ movq(scratch1, FieldOperand(array_function, @@ -1029,10 +1033,9 @@ static void AllocateEmptyJSArray(MacroAssembler* masm, // result: JSObject // scratch1: initial map // scratch2: start of next object - Factory* factory = masm->isolate()->factory(); __ movq(FieldOperand(result, JSObject::kMapOffset), scratch1); __ Move(FieldOperand(result, JSArray::kPropertiesOffset), - factory->empty_fixed_array()); + FACTORY->empty_fixed_array()); // Field JSArray::kElementsOffset is initialized later. __ Move(FieldOperand(result, JSArray::kLengthOffset), Smi::FromInt(0)); @@ -1040,7 +1043,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm, // fixed array. if (initial_capacity == 0) { __ Move(FieldOperand(result, JSArray::kElementsOffset), - factory->empty_fixed_array()); + FACTORY->empty_fixed_array()); return; } @@ -1057,14 +1060,15 @@ static void AllocateEmptyJSArray(MacroAssembler* masm, // scratch1: elements array // scratch2: start of next object __ Move(FieldOperand(scratch1, HeapObject::kMapOffset), - factory->fixed_array_map()); + FACTORY->fixed_array_map()); __ Move(FieldOperand(scratch1, FixedArray::kLengthOffset), Smi::FromInt(initial_capacity)); // Fill the FixedArray with the hole value. Inline the code if short. // Reconsider loop unfolding if kPreallocatedArrayElements gets changed. static const int kLoopUnfoldLimit = 4; - __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex); + ASSERT(kPreallocatedArrayElements <= kLoopUnfoldLimit); + __ Move(scratch3, FACTORY->the_hole_value()); if (initial_capacity <= kLoopUnfoldLimit) { // Use a scratch register here to have only one reloc info when unfolding // the loop. @@ -1097,25 +1101,38 @@ static void AllocateEmptyJSArray(MacroAssembler* masm, // register elements_array is scratched. static void AllocateJSArray(MacroAssembler* masm, Register array_function, // Array function. - Register array_size, // As a smi, cannot be 0. + Register array_size, // As a smi. Register result, Register elements_array, Register elements_array_end, Register scratch, bool fill_with_hole, Label* gc_required) { + Label not_empty, allocated; + // Load the initial map from the array function. __ movq(elements_array, FieldOperand(array_function, JSFunction::kPrototypeOrInitialMapOffset)); - if (FLAG_debug_code) { // Assert that array size is not zero. - __ testq(array_size, array_size); - __ Assert(not_zero, "array size is unexpectedly 0"); - } + // Check whether an empty sized array is requested. + __ testq(array_size, array_size); + __ j(not_zero, ¬_empty); + + // If an empty array is requested allocate a small elements array anyway. This + // keeps the code below free of special casing for the empty array. + int size = JSArray::kSize + FixedArray::SizeFor(kPreallocatedArrayElements); + __ AllocateInNewSpace(size, + result, + elements_array_end, + scratch, + gc_required, + TAG_OBJECT); + __ jmp(&allocated); // Allocate the JSArray object together with space for a FixedArray with the // requested elements. + __ bind(¬_empty); SmiIndex index = masm->SmiToIndex(kScratchRegister, array_size, kPointerSizeLog2); __ AllocateInNewSpace(JSArray::kSize + FixedArray::kHeaderSize, @@ -1133,9 +1150,9 @@ static void AllocateJSArray(MacroAssembler* masm, // elements_array: initial map // elements_array_end: start of next object // array_size: size of array (smi) - Factory* factory = masm->isolate()->factory(); + __ bind(&allocated); __ movq(FieldOperand(result, JSObject::kMapOffset), elements_array); - __ Move(elements_array, factory->empty_fixed_array()); + __ Move(elements_array, FACTORY->empty_fixed_array()); __ movq(FieldOperand(result, JSArray::kPropertiesOffset), elements_array); // Field JSArray::kElementsOffset is initialized later. __ movq(FieldOperand(result, JSArray::kLengthOffset), array_size); @@ -1154,7 +1171,16 @@ static void AllocateJSArray(MacroAssembler* masm, // elements_array_end: start of next object // array_size: size of array (smi) __ Move(FieldOperand(elements_array, JSObject::kMapOffset), - factory->fixed_array_map()); + FACTORY->fixed_array_map()); + Label not_empty_2, fill_array; + __ SmiTest(array_size); + __ j(not_zero, ¬_empty_2); + // Length of the FixedArray is the number of pre-allocated elements even + // though the actual JSArray has length 0. + __ Move(FieldOperand(elements_array, FixedArray::kLengthOffset), + Smi::FromInt(kPreallocatedArrayElements)); + __ jmp(&fill_array); + __ bind(¬_empty_2); // For non-empty JSArrays the length of the FixedArray and the JSArray is the // same. __ movq(FieldOperand(elements_array, FixedArray::kLengthOffset), array_size); @@ -1163,9 +1189,10 @@ static void AllocateJSArray(MacroAssembler* masm, // result: JSObject // elements_array: elements array // elements_array_end: start of next object + __ bind(&fill_array); if (fill_with_hole) { Label loop, entry; - __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex); + __ Move(scratch, FACTORY->the_hole_value()); __ lea(elements_array, Operand(elements_array, FixedArray::kHeaderSize - kHeapObjectTag)); __ jmp(&entry); @@ -1195,13 +1222,12 @@ static void AllocateJSArray(MacroAssembler* masm, // a construct call and a normal call. static void ArrayNativeCode(MacroAssembler* masm, Label *call_generic_code) { - Label argc_one_or_more, argc_two_or_more, empty_array, not_empty_array; + Label argc_one_or_more, argc_two_or_more; // Check for array construction with zero arguments. __ testq(rax, rax); __ j(not_zero, &argc_one_or_more); - __ bind(&empty_array); // Handle construction of an empty array. AllocateEmptyJSArray(masm, rdi, @@ -1209,6 +1235,7 @@ static void ArrayNativeCode(MacroAssembler* masm, rcx, rdx, r8, + kPreallocatedArrayElements, call_generic_code); Counters* counters = masm->isolate()->counters(); __ IncrementCounter(counters->array_function_native(), 1); @@ -1221,16 +1248,6 @@ static void ArrayNativeCode(MacroAssembler* masm, __ cmpq(rax, Immediate(1)); __ j(not_equal, &argc_two_or_more); __ movq(rdx, Operand(rsp, kPointerSize)); // Get the argument from the stack. - - __ SmiTest(rdx); - __ j(not_zero, ¬_empty_array); - __ pop(r8); // Adjust stack. - __ Drop(1); - __ push(r8); - __ movq(rax, Immediate(0)); // Treat this as a call with argc of zero. - __ jmp(&empty_array); - - __ bind(¬_empty_array); __ JumpUnlessNonNegativeSmi(rdx, call_generic_code); // Handle construction of an empty array of a certain size. Bail out if size diff --git a/deps/v8/src/x64/code-stubs-x64.cc b/deps/v8/src/x64/code-stubs-x64.cc index 3dfebeec4..7d41ffe53 100644 --- a/deps/v8/src/x64/code-stubs-x64.cc +++ b/deps/v8/src/x64/code-stubs-x64.cc @@ -227,12 +227,7 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { // [rsp + (3 * kPointerSize)]: literals array. // All sizes here are multiples of kPointerSize. - int elements_size = 0; - if (length_ > 0) { - elements_size = mode_ == CLONE_DOUBLE_ELEMENTS - ? FixedDoubleArray::SizeFor(length_) - : FixedArray::SizeFor(length_); - } + int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0; int size = JSArray::kSize + elements_size; // Load boilerplate object into rcx and check if we need to create a @@ -252,9 +247,6 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { if (mode_ == CLONE_ELEMENTS) { message = "Expected (writable) fixed array"; expected_map_index = Heap::kFixedArrayMapRootIndex; - } else if (mode_ == CLONE_DOUBLE_ELEMENTS) { - message = "Expected (writable) fixed double array"; - expected_map_index = Heap::kFixedDoubleArrayMapRootIndex; } else { ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS); message = "Expected copy-on-write fixed array"; @@ -288,24 +280,9 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx); // Copy the elements array. - if (mode_ == CLONE_ELEMENTS) { - for (int i = 0; i < elements_size; i += kPointerSize) { - __ movq(rbx, FieldOperand(rcx, i)); - __ movq(FieldOperand(rdx, i), rbx); - } - } else { - ASSERT(mode_ == CLONE_DOUBLE_ELEMENTS); - int i; - for (i = 0; i < FixedDoubleArray::kHeaderSize; i += kPointerSize) { - __ movq(rbx, FieldOperand(rcx, i)); - __ movq(FieldOperand(rdx, i), rbx); - } - while (i < elements_size) { - __ movsd(xmm0, FieldOperand(rcx, i)); - __ movsd(FieldOperand(rdx, i), xmm0); - i += kDoubleSize; - } - ASSERT(i == elements_size); + for (int i = 0; i < elements_size; i += kPointerSize) { + __ movq(rbx, FieldOperand(rcx, i)); + __ movq(FieldOperand(rdx, i), rbx); } } @@ -3902,7 +3879,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) { __ bind(&miss); } - __ TryGetFunctionPrototype(rdx, rbx, &slow, true); + __ TryGetFunctionPrototype(rdx, rbx, &slow); // Check that the function prototype is a JS object. __ JumpIfSmi(rbx, &slow); @@ -5461,68 +5438,7 @@ void ICCompareStub::GenerateMiss(MacroAssembler* masm) { } -void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm, - Label* miss, - Label* done, - Register properties, - Handle<String> name, - Register r0) { - // If names of slots in range from 1 to kProbes - 1 for the hash value are - // not equal to the name and kProbes-th slot is not used (its name is the - // undefined value), it guarantees the hash table doesn't contain the - // property. It's true even if some slots represent deleted properties - // (their names are the null value). - for (int i = 0; i < kInlinedProbes; i++) { - // r0 points to properties hash. - // Compute the masked index: (hash + i + i * i) & mask. - Register index = r0; - // Capacity is smi 2^n. - __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset)); - __ decl(index); - __ and_(index, - Immediate(name->Hash() + StringDictionary::GetProbeOffset(i))); - - // Scale the index by multiplying by the entry size. - ASSERT(StringDictionary::kEntrySize == 3); - __ lea(index, Operand(index, index, times_2, 0)); // index *= 3. - - Register entity_name = r0; - // Having undefined at this place means the name is not contained. - ASSERT_EQ(kSmiTagSize, 1); - __ movq(entity_name, Operand(properties, - index, - times_pointer_size, - kElementsStartOffset - kHeapObjectTag)); - __ Cmp(entity_name, masm->isolate()->factory()->undefined_value()); - __ j(equal, done); - - // Stop if found the property. - __ Cmp(entity_name, Handle<String>(name)); - __ j(equal, miss); - - // Check if the entry name is not a symbol. - __ movq(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset)); - __ testb(FieldOperand(entity_name, Map::kInstanceTypeOffset), - Immediate(kIsSymbolMask)); - __ j(zero, miss); - } - - StringDictionaryLookupStub stub(properties, - r0, - r0, - StringDictionaryLookupStub::NEGATIVE_LOOKUP); - __ Push(Handle<Object>(name)); - __ push(Immediate(name->Hash())); - __ CallStub(&stub); - __ testq(r0, r0); - __ j(not_zero, miss); - __ jmp(done); -} - - -// TODO(kmillikin): Eliminate this function when the stub cache is fully -// handlified. -MaybeObject* StringDictionaryLookupStub::TryGenerateNegativeLookup( +MaybeObject* StringDictionaryLookupStub::GenerateNegativeLookup( MacroAssembler* masm, Label* miss, Label* done, @@ -5749,15 +5665,6 @@ struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = { { rbx, rdx, rcx, EMIT_REMEMBERED_SET}, // KeyedStoreStubCompiler::GenerateStoreFastElement. { rdi, rdx, rcx, EMIT_REMEMBERED_SET}, - // ElementsTransitionGenerator::GenerateSmiOnlyToObject - // and ElementsTransitionGenerator::GenerateSmiOnlyToObject - // and ElementsTransitionGenerator::GenerateDoubleToObject - { rdx, rbx, rdi, EMIT_REMEMBERED_SET}, - // ElementsTransitionGenerator::GenerateSmiOnlyToDouble - // and ElementsTransitionGenerator::GenerateDoubleToObject - { rdx, r11, r15, EMIT_REMEMBERED_SET}, - // ElementsTransitionGenerator::GenerateDoubleToObject - { r11, rax, r15, EMIT_REMEMBERED_SET}, // Null termination. { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET} }; @@ -6005,6 +5912,7 @@ void RecordWriteStub::CheckNeedsToInformIncrementalMarker( // Fall through when we need to inform the incremental marker. } + #undef __ } } // namespace v8::internal diff --git a/deps/v8/src/x64/code-stubs-x64.h b/deps/v8/src/x64/code-stubs-x64.h index ffa3f4d20..698ba403c 100644 --- a/deps/v8/src/x64/code-stubs-x64.h +++ b/deps/v8/src/x64/code-stubs-x64.h @@ -423,16 +423,7 @@ class StringDictionaryLookupStub: public CodeStub { void Generate(MacroAssembler* masm); - static void GenerateNegativeLookup(MacroAssembler* masm, - Label* miss, - Label* done, - Register properties, - Handle<String> name, - Register r0); - - // TODO(kmillikin): Eliminate this function when the stub cache is fully - // handlified. - MUST_USE_RESULT static MaybeObject* TryGenerateNegativeLookup( + MUST_USE_RESULT static MaybeObject* GenerateNegativeLookup( MacroAssembler* masm, Label* miss, Label* done, diff --git a/deps/v8/src/x64/codegen-x64.cc b/deps/v8/src/x64/codegen-x64.cc index 4c216e8f2..f6102c7c7 100644 --- a/deps/v8/src/x64/codegen-x64.cc +++ b/deps/v8/src/x64/codegen-x64.cc @@ -30,7 +30,6 @@ #if defined(V8_TARGET_ARCH_X64) #include "codegen.h" -#include "macro-assembler.h" namespace v8 { namespace internal { @@ -144,224 +143,6 @@ ModuloFunction CreateModuloFunction() { #endif -#undef __ - -// ------------------------------------------------------------------------- -// Code generators - -#define __ ACCESS_MASM(masm) - -void ElementsTransitionGenerator::GenerateSmiOnlyToObject( - MacroAssembler* masm) { - // ----------- S t a t e ------------- - // -- rax : value - // -- rbx : target map - // -- rcx : key - // -- rdx : receiver - // -- rsp[0] : return address - // ----------------------------------- - // Set transitioned map. - __ movq(FieldOperand(rdx, HeapObject::kMapOffset), rbx); - __ RecordWriteField(rdx, - HeapObject::kMapOffset, - rbx, - rdi, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); -} - - -void ElementsTransitionGenerator::GenerateSmiOnlyToDouble( - MacroAssembler* masm, Label* fail) { - // ----------- S t a t e ------------- - // -- rax : value - // -- rbx : target map - // -- rcx : key - // -- rdx : receiver - // -- rsp[0] : return address - // ----------------------------------- - // The fail label is not actually used since we do not allocate. - Label allocated, cow_array; - - // Check backing store for COW-ness. If the negative case, we do not have to - // allocate a new array, since FixedArray and FixedDoubleArray do not differ - // in size. - __ movq(r8, FieldOperand(rdx, JSObject::kElementsOffset)); - __ SmiToInteger32(r9, FieldOperand(r8, FixedDoubleArray::kLengthOffset)); - __ CompareRoot(FieldOperand(r8, HeapObject::kMapOffset), - Heap::kFixedCOWArrayMapRootIndex); - __ j(equal, &cow_array); - __ movq(r14, r8); // Destination array equals source array. - - __ bind(&allocated); - // r8 : source FixedArray - // r9 : elements array length - // r14: destination FixedDoubleArray - // Set backing store's map - __ LoadRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex); - __ movq(FieldOperand(r14, HeapObject::kMapOffset), rdi); - - // Set transitioned map. - __ movq(FieldOperand(rdx, HeapObject::kMapOffset), rbx); - __ RecordWriteField(rdx, - HeapObject::kMapOffset, - rbx, - rdi, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - - // Convert smis to doubles and holes to hole NaNs. The Array's length - // remains unchanged. - STATIC_ASSERT(FixedDoubleArray::kLengthOffset == FixedArray::kLengthOffset); - STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize); - - Label loop, entry, convert_hole; - __ movq(r15, BitCast<int64_t, uint64_t>(kHoleNanInt64), RelocInfo::NONE); - // r15: the-hole NaN - __ jmp(&entry); - - // Allocate new array if the source array is a COW array. - __ bind(&cow_array); - __ lea(rdi, Operand(r9, times_pointer_size, FixedArray::kHeaderSize)); - __ AllocateInNewSpace(rdi, r14, r11, r15, fail, TAG_OBJECT); - // Set receiver's backing store. - __ movq(FieldOperand(rdx, JSObject::kElementsOffset), r14); - __ movq(r11, r14); - __ RecordWriteField(rdx, - JSObject::kElementsOffset, - r11, - r15, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - // Set backing store's length. - __ Integer32ToSmi(r11, r9); - __ movq(FieldOperand(r14, FixedDoubleArray::kLengthOffset), r11); - __ jmp(&allocated); - - // Conversion loop. - __ bind(&loop); - __ decq(r9); - __ movq(rbx, - FieldOperand(r8, r9, times_8, FixedArray::kHeaderSize)); - // r9 : current element's index - // rbx: current element (smi-tagged) - __ JumpIfNotSmi(rbx, &convert_hole); - __ SmiToInteger32(rbx, rbx); - __ cvtlsi2sd(xmm0, rbx); - __ movsd(FieldOperand(r14, r9, times_8, FixedDoubleArray::kHeaderSize), - xmm0); - __ jmp(&entry); - __ bind(&convert_hole); - __ movq(FieldOperand(r14, r9, times_8, FixedDoubleArray::kHeaderSize), r15); - __ bind(&entry); - __ testq(r9, r9); - __ j(not_zero, &loop); -} - - -void ElementsTransitionGenerator::GenerateDoubleToObject( - MacroAssembler* masm, Label* fail) { - // ----------- S t a t e ------------- - // -- rax : value - // -- rbx : target map - // -- rcx : key - // -- rdx : receiver - // -- rsp[0] : return address - // ----------------------------------- - Label loop, entry, convert_hole, gc_required; - __ push(rax); - - __ movq(r8, FieldOperand(rdx, JSObject::kElementsOffset)); - __ SmiToInteger32(r9, FieldOperand(r8, FixedDoubleArray::kLengthOffset)); - // r8 : source FixedDoubleArray - // r9 : number of elements - __ lea(rdi, Operand(r9, times_pointer_size, FixedArray::kHeaderSize)); - __ AllocateInNewSpace(rdi, r11, r14, r15, &gc_required, TAG_OBJECT); - // r11: destination FixedArray - __ LoadRoot(rdi, Heap::kFixedArrayMapRootIndex); - __ movq(FieldOperand(r11, HeapObject::kMapOffset), rdi); - __ Integer32ToSmi(r14, r9); - __ movq(FieldOperand(r11, FixedArray::kLengthOffset), r14); - - // Prepare for conversion loop. - __ movq(rsi, BitCast<int64_t, uint64_t>(kHoleNanInt64), RelocInfo::NONE); - __ LoadRoot(rdi, Heap::kTheHoleValueRootIndex); - // rsi: the-hole NaN - // rdi: pointer to the-hole - __ jmp(&entry); - - // Call into runtime if GC is required. - __ bind(&gc_required); - __ pop(rax); - __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); - __ jmp(fail); - - // Box doubles into heap numbers. - __ bind(&loop); - __ decq(r9); - __ movq(r14, FieldOperand(r8, - r9, - times_pointer_size, - FixedDoubleArray::kHeaderSize)); - // r9 : current element's index - // r14: current element - __ cmpq(r14, rsi); - __ j(equal, &convert_hole); - - // Non-hole double, copy value into a heap number. - __ AllocateHeapNumber(rax, r15, &gc_required); - // rax: new heap number - __ movq(FieldOperand(rax, HeapNumber::kValueOffset), r14); - __ movq(FieldOperand(r11, - r9, - times_pointer_size, - FixedArray::kHeaderSize), - rax); - __ movq(r15, r9); - __ RecordWriteArray(r11, - rax, - r15, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - __ jmp(&entry, Label::kNear); - - // Replace the-hole NaN with the-hole pointer. - __ bind(&convert_hole); - __ movq(FieldOperand(r11, - r9, - times_pointer_size, - FixedArray::kHeaderSize), - rdi); - - __ bind(&entry); - __ testq(r9, r9); - __ j(not_zero, &loop); - - // Set transitioned map. - __ movq(FieldOperand(rdx, HeapObject::kMapOffset), rbx); - __ RecordWriteField(rdx, - HeapObject::kMapOffset, - rbx, - rdi, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - // Replace receiver's backing store with newly created and filled FixedArray. - __ movq(FieldOperand(rdx, JSObject::kElementsOffset), r11); - __ RecordWriteField(rdx, - JSObject::kElementsOffset, - r11, - r15, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - __ pop(rax); - __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); -} #undef __ diff --git a/deps/v8/src/x64/deoptimizer-x64.cc b/deps/v8/src/x64/deoptimizer-x64.cc index d0a052b42..b7e334ee7 100644 --- a/deps/v8/src/x64/deoptimizer-x64.cc +++ b/deps/v8/src/x64/deoptimizer-x64.cc @@ -258,13 +258,16 @@ void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code, Assembler::set_target_address_at(call_target_address, replacement_code->entry()); - unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( - unoptimized_code, call_target_address, replacement_code); + RelocInfo rinfo(call_target_address, + RelocInfo::CODE_TARGET, + 0, + unoptimized_code); + unoptimized_code->GetHeap()->incremental_marking()->RecordWriteIntoCode( + unoptimized_code, &rinfo, replacement_code); } -void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code, - Address pc_after, +void Deoptimizer::RevertStackCheckCodeAt(Address pc_after, Code* check_code, Code* replacement_code) { Address call_target_address = pc_after - kIntSize; @@ -279,9 +282,8 @@ void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code, *(call_target_address - 2) = 0x07; // offset Assembler::set_target_address_at(call_target_address, check_code->entry()); - - check_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( - unoptimized_code, call_target_address, check_code); + check_code->GetHeap()->incremental_marking()-> + RecordCodeTargetPatch(call_target_address, check_code); } diff --git a/deps/v8/src/x64/full-codegen-x64.cc b/deps/v8/src/x64/full-codegen-x64.cc index bf640dbcb..b5c5fc5e7 100644 --- a/deps/v8/src/x64/full-codegen-x64.cc +++ b/deps/v8/src/x64/full-codegen-x64.cc @@ -254,10 +254,7 @@ void FullCodeGenerator::Generate(CompilationInfo* info) { // constant. if (scope()->is_function_scope() && scope()->function() != NULL) { int ignored = 0; - VariableProxy* proxy = scope()->function(); - ASSERT(proxy->var()->mode() == CONST || - proxy->var()->mode() == CONST_HARMONY); - EmitDeclaration(proxy, proxy->var()->mode(), NULL, &ignored); + EmitDeclaration(scope()->function(), CONST, NULL, &ignored); } VisitDeclarations(scope()->declarations()); } @@ -687,8 +684,6 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, // need to "declare" it at runtime to make sure it actually exists in the // local context. Variable* variable = proxy->var(); - bool binding_needs_init = - mode == CONST || mode == CONST_HARMONY || mode == LET; switch (variable->location()) { case Variable::UNALLOCATED: ++(*global_count); @@ -700,7 +695,7 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, Comment cmnt(masm_, "[ Declaration"); VisitForAccumulatorValue(function); __ movq(StackOperand(variable), result_register()); - } else if (binding_needs_init) { + } else if (mode == CONST || mode == LET) { Comment cmnt(masm_, "[ Declaration"); __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); __ movq(StackOperand(variable), kScratchRegister); @@ -733,7 +728,7 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); PrepareForBailoutForId(proxy->id(), NO_REGISTERS); - } else if (binding_needs_init) { + } else if (mode == CONST || mode == LET) { Comment cmnt(masm_, "[ Declaration"); __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); __ movq(ContextOperand(rsi, variable->index()), kScratchRegister); @@ -746,13 +741,9 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, Comment cmnt(masm_, "[ Declaration"); __ push(rsi); __ Push(variable->name()); - // Declaration nodes are always introduced in one of four modes. - ASSERT(mode == VAR || - mode == CONST || - mode == CONST_HARMONY || - mode == LET); - PropertyAttributes attr = - (mode == CONST || mode == CONST_HARMONY) ? READ_ONLY : NONE; + // Declaration nodes are always introduced in one of three modes. + ASSERT(mode == VAR || mode == CONST || mode == LET); + PropertyAttributes attr = (mode == CONST) ? READ_ONLY : NONE; __ Push(Smi::FromInt(attr)); // Push initial value, if any. // Note: For variables we must not push an initial value (such as @@ -760,7 +751,7 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, // must not destroy the current value. if (function != NULL) { VisitForStackValue(function); - } else if (binding_needs_init) { + } else if (mode == CONST || mode == LET) { __ PushRoot(Heap::kTheHoleValueRootIndex); } else { __ Push(Smi::FromInt(0)); // Indicates no initial value. @@ -899,17 +890,11 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { __ bind(&done_convert); __ push(rax); - // Check for proxies. - Label call_runtime; - STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); - __ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx); - __ j(below_equal, &call_runtime); - // Check cache validity in generated code. This is a fast case for // the JSObject::IsSimpleEnum cache validity checks. If we cannot // guarantee cache validity, call the runtime system to check cache // validity or get the property names in a fixed array. - Label next; + Label next, call_runtime; Register empty_fixed_array_value = r8; __ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex); Register empty_descriptor_array_value = r9; @@ -985,17 +970,9 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { __ jmp(&loop); // We got a fixed array in register rax. Iterate through that. - Label non_proxy; __ bind(&fixed_array); - __ Move(rbx, Smi::FromInt(1)); // Smi indicates slow check - __ movq(rcx, Operand(rsp, 0 * kPointerSize)); // Get enumerated object - STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); - __ CmpObjectType(rcx, LAST_JS_PROXY_TYPE, rcx); - __ j(above, &non_proxy); - __ Move(rbx, Smi::FromInt(0)); // Zero indicates proxy - __ bind(&non_proxy); - __ push(rbx); // Smi - __ push(rax); // Array + __ Push(Smi::FromInt(0)); // Map (0) - force slow check. + __ push(rax); __ movq(rax, FieldOperand(rax, FixedArray::kLengthOffset)); __ push(rax); // Fixed array length (as smi). __ Push(Smi::FromInt(0)); // Initial index. @@ -1014,22 +991,17 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { index.scale, FixedArray::kHeaderSize)); - // Get the expected map from the stack or a smi in the + // Get the expected map from the stack or a zero map in the // permanent slow case into register rdx. __ movq(rdx, Operand(rsp, 3 * kPointerSize)); // Check if the expected map still matches that of the enumerable. - // If not, we may have to filter the key. + // If not, we have to filter the key. Label update_each; __ movq(rcx, Operand(rsp, 4 * kPointerSize)); __ cmpq(rdx, FieldOperand(rcx, HeapObject::kMapOffset)); __ j(equal, &update_each, Label::kNear); - // For proxies, no filtering is done. - // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. - __ Cmp(rdx, Smi::FromInt(0)); - __ j(equal, &update_each, Label::kNear); - // Convert the entry to a string or null if it isn't a property // anymore. If the property has been removed while iterating, we // just skip it. @@ -1083,7 +1055,7 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, !pretenure && scope()->is_function_scope() && info->num_literals() == 0) { - FastNewClosureStub stub(info->strict_mode_flag()); + FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode); __ Push(info); __ CallStub(&stub); } else { @@ -1113,7 +1085,7 @@ void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var, Scope* s = scope(); while (s != NULL) { if (s->num_heap_slots() > 0) { - if (s->calls_non_strict_eval()) { + if (s->calls_eval()) { // Check that extension is NULL. __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0)); @@ -1127,7 +1099,7 @@ void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var, // If no outer scope calls eval, we do not need to check more // context extensions. If we have reached an eval scope, we check // all extensions from this point. - if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break; + if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break; s = s->outer_scope(); } @@ -1173,7 +1145,7 @@ MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { if (s->num_heap_slots() > 0) { - if (s->calls_non_strict_eval()) { + if (s->calls_eval()) { // Check that extension is NULL. __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0)); @@ -1210,14 +1182,12 @@ void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var, } else if (var->mode() == DYNAMIC_LOCAL) { Variable* local = var->local_if_not_shadowed(); __ movq(rax, ContextSlotOperandCheckExtensions(local, slow)); - if (local->mode() == CONST || - local->mode() == CONST_HARMONY || - local->mode() == LET) { + if (local->mode() == CONST || local->mode() == LET) { __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); __ j(not_equal, done); if (local->mode() == CONST) { __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); - } else { // LET || CONST_HARMONY + } else { // LET __ Push(var->name()); __ CallRuntime(Runtime::kThrowReferenceError, 1); } @@ -1251,7 +1221,7 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { case Variable::LOCAL: case Variable::CONTEXT: { Comment cmnt(masm_, var->IsContextSlot() ? "Context slot" : "Stack slot"); - if (!var->binding_needs_init()) { + if (var->mode() != LET && var->mode() != CONST) { context()->Plug(var); } else { // Let and const need a read barrier. @@ -1259,14 +1229,10 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { GetVar(rax, var); __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); __ j(not_equal, &done, Label::kNear); - if (var->mode() == LET || var->mode() == CONST_HARMONY) { - // Throw a reference error when using an uninitialized let/const - // binding in harmony mode. + if (var->mode() == LET) { __ Push(var->name()); __ CallRuntime(Runtime::kThrowReferenceError, 1); - } else { - // Uninitalized const bindings outside of harmony mode are unholed. - ASSERT(var->mode() == CONST); + } else { // CONST __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); } __ bind(&done); @@ -1451,18 +1417,12 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { ZoneList<Expression*>* subexprs = expr->values(); int length = subexprs->length(); - Handle<FixedArray> constant_elements = expr->constant_elements(); - ASSERT_EQ(2, constant_elements->length()); - ElementsKind constant_elements_kind = - static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value()); - Handle<FixedArrayBase> constant_elements_values( - FixedArrayBase::cast(constant_elements->get(1))); __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset)); __ Push(Smi::FromInt(expr->literal_index())); - __ Push(constant_elements); - if (constant_elements_values->map() == + __ Push(expr->constant_elements()); + if (expr->constant_elements()->map() == isolate()->heap()->fixed_cow_array_map()) { FastCloneShallowArrayStub stub( FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length); @@ -1473,14 +1433,8 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); } else { - ASSERT(constant_elements_kind == FAST_ELEMENTS || - constant_elements_kind == FAST_SMI_ONLY_ELEMENTS || - FLAG_smi_only_arrays); - FastCloneShallowArrayStub::Mode mode = - constant_elements_kind == FAST_DOUBLE_ELEMENTS - ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS - : FastCloneShallowArrayStub::CLONE_ELEMENTS; - FastCloneShallowArrayStub stub(mode, length); + FastCloneShallowArrayStub stub( + FastCloneShallowArrayStub::CLONE_ELEMENTS, length); __ CallStub(&stub); } @@ -1505,59 +1459,22 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { // Store the subexpression value in the array's elements. __ movq(r8, Operand(rsp, 0)); // Copy of array literal. - __ movq(rdi, FieldOperand(r8, JSObject::kMapOffset)); __ movq(rbx, FieldOperand(r8, JSObject::kElementsOffset)); int offset = FixedArray::kHeaderSize + (i * kPointerSize); - - Label element_done; - Label double_elements; - Label smi_element; - Label slow_elements; - Label fast_elements; - __ CheckFastElements(rdi, &double_elements); - - // FAST_SMI_ONLY_ELEMENTS or FAST_ELEMENTS - __ JumpIfSmi(result_register(), &smi_element); - __ CheckFastSmiOnlyElements(rdi, &fast_elements); - - // Store into the array literal requires a elements transition. Call into - // the runtime. - __ bind(&slow_elements); - __ push(r8); // Copy of array literal. - __ Push(Smi::FromInt(i)); - __ push(result_register()); - __ Push(Smi::FromInt(NONE)); // PropertyAttributes - __ Push(Smi::FromInt(strict_mode_flag())); // Strict mode. - __ CallRuntime(Runtime::kSetProperty, 5); - __ jmp(&element_done); - - // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS. - __ bind(&double_elements); - __ movq(rcx, Immediate(i)); - __ StoreNumberToDoubleElements(result_register(), - rbx, - rcx, - xmm0, - &slow_elements); - __ jmp(&element_done); - - // Array literal has ElementsKind of FAST_ELEMENTS and value is an object. - __ bind(&fast_elements); __ movq(FieldOperand(rbx, offset), result_register()); + + Label no_map_change; + __ JumpIfSmi(result_register(), &no_map_change); // Update the write barrier for the array store. __ RecordWriteField(rbx, offset, result_register(), rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); - __ jmp(&element_done); - - // Array literal has ElementsKind of FAST_SMI_ONLY_ELEMENTS or - // FAST_ELEMENTS, and value is Smi. - __ bind(&smi_element); - __ movq(FieldOperand(rbx, offset), result_register()); - // Fall through - - __ bind(&element_done); + __ movq(rdi, FieldOperand(rbx, JSObject::kMapOffset)); + __ CheckFastSmiOnlyElements(rdi, &no_map_change, Label::kNear); + __ push(r8); + __ CallRuntime(Runtime::kNonSmiElementStored, 1); + __ bind(&no_map_change); PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); } @@ -1888,9 +1805,8 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, } } - } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) { - // Assignment to var or initializing assignment to let/const - // in harmony mode. + } else if (var->mode() != CONST) { + // Assignment to var or initializing assignment to let. if (var->IsStackAllocated() || var->IsContextSlot()) { MemOperand location = VarOperand(var, rcx); if (FLAG_debug_code && op == Token::INIT_LET) { @@ -2741,12 +2657,9 @@ void FullCodeGenerator::EmitRandomHeapNumber(ZoneList<Expression*>* args) { // The fresh HeapNumber is in rbx, which is callee-save on both x64 ABIs. __ PrepareCallCFunction(1); #ifdef _WIN64 - __ movq(rcx, ContextOperand(context_register(), Context::GLOBAL_INDEX)); - __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset)); - + __ LoadAddress(rcx, ExternalReference::isolate_address()); #else - __ movq(rdi, ContextOperand(context_register(), Context::GLOBAL_INDEX)); - __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset)); + __ LoadAddress(rdi, ExternalReference::isolate_address()); #endif __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1); @@ -4084,25 +3997,33 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { case Token::EQ_STRICT: case Token::EQ: cc = equal; + __ pop(rdx); break; case Token::LT: cc = less; + __ pop(rdx); break; case Token::GT: - cc = greater; + // Reverse left and right sizes to obtain ECMA-262 conversion order. + cc = less; + __ movq(rdx, result_register()); + __ pop(rax); break; case Token::LTE: - cc = less_equal; + // Reverse left and right sizes to obtain ECMA-262 conversion order. + cc = greater_equal; + __ movq(rdx, result_register()); + __ pop(rax); break; case Token::GTE: cc = greater_equal; + __ pop(rdx); break; case Token::IN: case Token::INSTANCEOF: default: UNREACHABLE(); } - __ pop(rdx); bool inline_smi_code = ShouldInlineSmiCase(op); JumpPatchSite patch_site(masm_); diff --git a/deps/v8/src/x64/ic-x64.cc b/deps/v8/src/x64/ic-x64.cc index e8ab06cda..27a96674c 100644 --- a/deps/v8/src/x64/ic-x64.cc +++ b/deps/v8/src/x64/ic-x64.cc @@ -712,11 +712,12 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm, // Writing a non-smi, check whether array allows non-smi elements. // r9: receiver's map __ CheckFastObjectElements(r9, &slow, Label::kNear); - __ movq(FieldOperand(rbx, rcx, times_pointer_size, FixedArray::kHeaderSize), - rax); - __ movq(rdx, rax); // Preserve the value which is returned. - __ RecordWriteArray( - rbx, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); + __ lea(rcx, + FieldOperand(rbx, rcx, times_pointer_size, FixedArray::kHeaderSize)); + __ movq(Operand(rcx, 0), rax); + __ movq(rdx, rax); + __ RecordWrite( + rbx, rcx, rdx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); __ ret(0); __ bind(&fast_double_with_map_check); @@ -735,10 +736,10 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm, // The generated code does not accept smi keys. // The generated code falls through if both probes miss. -void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm, - int argc, - Code::Kind kind, - Code::ExtraICState extra_state) { +static void GenerateMonomorphicCacheProbe(MacroAssembler* masm, + int argc, + Code::Kind kind, + Code::ExtraICState extra_ic_state) { // ----------- S t a t e ------------- // rcx : function name // rdx : receiver @@ -748,7 +749,7 @@ void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm, // Probe the stub cache. Code::Flags flags = Code::ComputeFlags(kind, MONOMORPHIC, - extra_state, + extra_ic_state, NORMAL, argc); Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx, @@ -821,7 +822,7 @@ static void GenerateFunctionTailCall(MacroAssembler* masm, // The generated code falls through if the call should be handled by runtime. -void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) { +static void GenerateCallNormal(MacroAssembler* masm, int argc) { // ----------- S t a t e ------------- // rcx : function name // rsp[0] : return address @@ -848,10 +849,10 @@ void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) { } -void CallICBase::GenerateMiss(MacroAssembler* masm, - int argc, - IC::UtilityId id, - Code::ExtraICState extra_state) { +static void GenerateCallMiss(MacroAssembler* masm, + int argc, + IC::UtilityId id, + Code::ExtraICState extra_ic_state) { // ----------- S t a t e ------------- // rcx : function name // rsp[0] : return address @@ -909,7 +910,7 @@ void CallICBase::GenerateMiss(MacroAssembler* masm, } // Invoke the function. - CallKind call_kind = CallICBase::Contextual::decode(extra_state) + CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state) ? CALL_AS_FUNCTION : CALL_AS_METHOD; ParameterCount actual(argc); @@ -941,6 +942,39 @@ void CallIC::GenerateMegamorphic(MacroAssembler* masm, } +void CallIC::GenerateNormal(MacroAssembler* masm, int argc) { + // ----------- S t a t e ------------- + // rcx : function name + // rsp[0] : return address + // rsp[8] : argument argc + // rsp[16] : argument argc - 1 + // ... + // rsp[argc * 8] : argument 1 + // rsp[(argc + 1) * 8] : argument 0 = receiver + // ----------------------------------- + + GenerateCallNormal(masm, argc); + GenerateMiss(masm, argc, Code::kNoExtraICState); +} + + +void CallIC::GenerateMiss(MacroAssembler* masm, + int argc, + Code::ExtraICState extra_ic_state) { + // ----------- S t a t e ------------- + // rcx : function name + // rsp[0] : return address + // rsp[8] : argument argc + // rsp[16] : argument argc - 1 + // ... + // rsp[argc * 8] : argument 1 + // rsp[(argc + 1) * 8] : argument 0 = receiver + // ----------------------------------- + + GenerateCallMiss(masm, argc, IC::kCallIC_Miss, extra_ic_state); +} + + void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) { // ----------- S t a t e ------------- // rcx : function name @@ -1068,12 +1102,27 @@ void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) { __ JumpIfSmi(rcx, &miss); Condition cond = masm->IsObjectStringType(rcx, rax, rax); __ j(NegateCondition(cond), &miss); - CallICBase::GenerateNormal(masm, argc); + GenerateCallNormal(masm, argc); __ bind(&miss); GenerateMiss(masm, argc); } +void KeyedCallIC::GenerateMiss(MacroAssembler* masm, int argc) { + // ----------- S t a t e ------------- + // rcx : function name + // rsp[0] : return address + // rsp[8] : argument argc + // rsp[16] : argument argc - 1 + // ... + // rsp[argc * 8] : argument 1 + // rsp[(argc + 1) * 8] : argument 0 = receiver + // ----------------------------------- + + GenerateCallMiss(masm, argc, IC::kKeyedCallIC_Miss, Code::kNoExtraICState); +} + + static Operand GenerateMappedArgumentsLookup(MacroAssembler* masm, Register object, Register key, @@ -1553,51 +1602,6 @@ void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, bool force_generic) { } -void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) { - // ----------- S t a t e ------------- - // -- rbx : target map - // -- rdx : receiver - // -- rsp[0] : return address - // ----------------------------------- - // Must return the modified receiver in eax. - if (!FLAG_trace_elements_transitions) { - Label fail; - ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &fail); - __ movq(rax, rdx); - __ Ret(); - __ bind(&fail); - } - - __ pop(rbx); - __ push(rdx); - __ push(rbx); // return address - __ TailCallRuntime(Runtime::kTransitionElementsSmiToDouble, 1, 1); -} - - -void KeyedStoreIC::GenerateTransitionElementsDoubleToObject( - MacroAssembler* masm) { - // ----------- S t a t e ------------- - // -- rbx : target map - // -- rdx : receiver - // -- rsp[0] : return address - // ----------------------------------- - // Must return the modified receiver in eax. - if (!FLAG_trace_elements_transitions) { - Label fail; - ElementsTransitionGenerator::GenerateDoubleToObject(masm, &fail); - __ movq(rax, rdx); - __ Ret(); - __ bind(&fail); - } - - __ pop(rbx); - __ push(rdx); - __ push(rbx); // return address - __ TailCallRuntime(Runtime::kTransitionElementsDoubleToObject, 1, 1); -} - - #undef __ @@ -1609,9 +1613,11 @@ Condition CompareIC::ComputeCondition(Token::Value op) { case Token::LT: return less; case Token::GT: - return greater; + // Reverse left and right operands to obtain ECMA-262 conversion order. + return less; case Token::LTE: - return less_equal; + // Reverse left and right operands to obtain ECMA-262 conversion order. + return greater_equal; case Token::GTE: return greater_equal; default: diff --git a/deps/v8/src/x64/lithium-codegen-x64.cc b/deps/v8/src/x64/lithium-codegen-x64.cc index 38a8c18be..45aaad754 100644 --- a/deps/v8/src/x64/lithium-codegen-x64.cc +++ b/deps/v8/src/x64/lithium-codegen-x64.cc @@ -374,12 +374,6 @@ int LCodeGen::ToInteger32(LConstantOperand* op) const { } -double LCodeGen::ToDouble(LConstantOperand* op) const { - Handle<Object> value = chunk_->LookupLiteral(op); - return value->Number(); -} - - Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const { Handle<Object> literal = chunk_->LookupLiteral(op); ASSERT(chunk_->LookupLiteralRepresentation(op).IsTagged()); @@ -1532,51 +1526,39 @@ inline Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) { } +void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) { + if (right->IsConstantOperand()) { + int32_t value = ToInteger32(LConstantOperand::cast(right)); + if (left->IsRegister()) { + __ cmpl(ToRegister(left), Immediate(value)); + } else { + __ cmpl(ToOperand(left), Immediate(value)); + } + } else if (right->IsRegister()) { + __ cmpl(ToRegister(left), ToRegister(right)); + } else { + __ cmpl(ToRegister(left), ToOperand(right)); + } +} + + void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) { LOperand* left = instr->InputAt(0); LOperand* right = instr->InputAt(1); int false_block = chunk_->LookupDestination(instr->false_block_id()); int true_block = chunk_->LookupDestination(instr->true_block_id()); - Condition cc = TokenToCondition(instr->op(), instr->is_double()); - if (left->IsConstantOperand() && right->IsConstantOperand()) { - // We can statically evaluate the comparison. - double left_val = ToDouble(LConstantOperand::cast(left)); - double right_val = ToDouble(LConstantOperand::cast(right)); - int next_block = - EvalComparison(instr->op(), left_val, right_val) ? true_block - : false_block; - EmitGoto(next_block); + if (instr->is_double()) { + // Don't base result on EFLAGS when a NaN is involved. Instead + // jump to the false block. + __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right)); + __ j(parity_even, chunk_->GetAssemblyLabel(false_block)); } else { - if (instr->is_double()) { - // Don't base result on EFLAGS when a NaN is involved. Instead - // jump to the false block. - __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right)); - __ j(parity_even, chunk_->GetAssemblyLabel(false_block)); - } else { - int32_t value; - if (right->IsConstantOperand()) { - value = ToInteger32(LConstantOperand::cast(right)); - __ cmpl(ToRegister(left), Immediate(value)); - } else if (left->IsConstantOperand()) { - value = ToInteger32(LConstantOperand::cast(left)); - if (right->IsRegister()) { - __ cmpl(ToRegister(right), Immediate(value)); - } else { - __ cmpl(ToOperand(right), Immediate(value)); - } - // We transposed the operands. Reverse the condition. - cc = ReverseCondition(cc); - } else { - if (right->IsRegister()) { - __ cmpl(ToRegister(left), ToRegister(right)); - } else { - __ cmpl(ToRegister(left), ToOperand(right)); - } - } - } - EmitBranch(true_block, false_block, cc); + EmitCmpI(left, right); } + + Condition cc = TokenToCondition(instr->op(), instr->is_double()); + EmitBranch(true_block, false_block, cc); } @@ -1997,6 +1979,9 @@ void LCodeGen::DoCmpT(LCmpT* instr) { CallCode(ic, RelocInfo::CODE_TARGET, instr); Condition condition = TokenToCondition(op, false); + if (op == Token::GT || op == Token::LTE) { + condition = ReverseCondition(condition); + } Label true_value, done; __ testq(rax, rax); __ j(condition, &true_value, Label::kNear); @@ -2070,24 +2055,19 @@ void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) { // Store the value. __ movq(Operand(address, 0), value); - if (instr->hydrogen()->NeedsWriteBarrier()) { - Label smi_store; - HType type = instr->hydrogen()->value()->type(); - if (!type.IsHeapNumber() && !type.IsString() && !type.IsNonPrimitive()) { - __ JumpIfSmi(value, &smi_store, Label::kNear); - } + Label smi_store; + __ JumpIfSmi(value, &smi_store, Label::kNear); - int offset = JSGlobalPropertyCell::kValueOffset - kHeapObjectTag; - __ lea(object, Operand(address, -offset)); - // Cells are always in the remembered set. - __ RecordWrite(object, - address, - value, - kSaveFPRegs, - OMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - __ bind(&smi_store); - } + int offset = JSGlobalPropertyCell::kValueOffset - kHeapObjectTag; + __ lea(object, Operand(address, -offset)); + // Cells are always in the remembered set. + __ RecordWrite(object, + address, + value, + kSaveFPRegs, + OMIT_REMEMBERED_SET, + OMIT_SMI_CHECK); + __ bind(&smi_store); } @@ -2114,19 +2094,10 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { Register context = ToRegister(instr->context()); Register value = ToRegister(instr->value()); __ movq(ContextOperand(context, instr->slot_index()), value); - if (instr->hydrogen()->NeedsWriteBarrier()) { - HType type = instr->hydrogen()->value()->type(); - SmiCheck check_needed = - type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; + if (instr->needs_write_barrier()) { int offset = Context::SlotOffset(instr->slot_index()); Register scratch = ToRegister(instr->TempAt(0)); - __ RecordWriteContextSlot(context, - offset, - value, - scratch, - kSaveFPRegs, - EMIT_REMEMBERED_SET, - check_needed); + __ RecordWriteContextSlot(context, offset, value, scratch, kSaveFPRegs); } } @@ -2147,7 +2118,7 @@ void LCodeGen::EmitLoadFieldOrConstantFunction(Register result, Register object, Handle<Map> type, Handle<String> name) { - LookupResult lookup(isolate()); + LookupResult lookup; type->LookupInDescriptors(NULL, *name, &lookup); ASSERT(lookup.IsProperty() && (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION)); @@ -2590,7 +2561,7 @@ void LCodeGen::DoPushArgument(LPushArgument* instr) { void LCodeGen::DoThisFunction(LThisFunction* instr) { Register result = ToRegister(instr->result()); - LoadHeapObject(result, instr->hydrogen()->closure()); + __ movq(result, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); } @@ -3090,36 +3061,21 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { } // Do the store. - HType type = instr->hydrogen()->value()->type(); - SmiCheck check_needed = - type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; if (instr->is_in_object()) { __ movq(FieldOperand(object, offset), value); - if (instr->hydrogen()->NeedsWriteBarrier()) { + if (instr->needs_write_barrier()) { Register temp = ToRegister(instr->TempAt(0)); // Update the write barrier for the object for in-object properties. - __ RecordWriteField(object, - offset, - value, - temp, - kSaveFPRegs, - EMIT_REMEMBERED_SET, - check_needed); + __ RecordWriteField(object, offset, value, temp, kSaveFPRegs); } } else { Register temp = ToRegister(instr->TempAt(0)); __ movq(temp, FieldOperand(object, JSObject::kPropertiesOffset)); __ movq(FieldOperand(temp, offset), value); - if (instr->hydrogen()->NeedsWriteBarrier()) { + if (instr->needs_write_barrier()) { // Update the write barrier for the properties array. // object is used as a scratch register. - __ RecordWriteField(temp, - offset, - value, - object, - kSaveFPRegs, - EMIT_REMEMBERED_SET, - check_needed); + __ RecordWriteField(temp, offset, value, object, kSaveFPRegs); } } } @@ -3226,20 +3182,12 @@ void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) { } if (instr->hydrogen()->NeedsWriteBarrier()) { - HType type = instr->hydrogen()->value()->type(); - SmiCheck check_needed = - type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; // Compute address of modified element and store it into key register. __ lea(key, FieldOperand(elements, key, times_pointer_size, FixedArray::kHeaderSize)); - __ RecordWrite(elements, - key, - value, - kSaveFPRegs, - EMIT_REMEMBERED_SET, - check_needed); + __ RecordWrite(elements, key, value, kSaveFPRegs); } } @@ -3275,47 +3223,6 @@ void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { } -void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { - Register object_reg = ToRegister(instr->object()); - Register new_map_reg = ToRegister(instr->new_map_reg()); - - Handle<Map> from_map = instr->original_map(); - Handle<Map> to_map = instr->transitioned_map(); - ElementsKind from_kind = from_map->elements_kind(); - ElementsKind to_kind = to_map->elements_kind(); - - Label not_applicable; - __ Cmp(FieldOperand(object_reg, HeapObject::kMapOffset), from_map); - __ j(not_equal, ¬_applicable); - __ movq(new_map_reg, to_map, RelocInfo::EMBEDDED_OBJECT); - if (from_kind == FAST_SMI_ONLY_ELEMENTS && to_kind == FAST_ELEMENTS) { - __ movq(FieldOperand(object_reg, HeapObject::kMapOffset), new_map_reg); - // Write barrier. - ASSERT_NE(instr->temp_reg(), NULL); - __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, - ToRegister(instr->temp_reg()), kDontSaveFPRegs); - } else if (from_kind == FAST_SMI_ONLY_ELEMENTS && - to_kind == FAST_DOUBLE_ELEMENTS) { - Register fixed_object_reg = ToRegister(instr->temp_reg()); - ASSERT(fixed_object_reg.is(rdx)); - ASSERT(new_map_reg.is(rbx)); - __ movq(fixed_object_reg, object_reg); - CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(), - RelocInfo::CODE_TARGET, instr); - } else if (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS) { - Register fixed_object_reg = ToRegister(instr->temp_reg()); - ASSERT(fixed_object_reg.is(rdx)); - ASSERT(new_map_reg.is(rbx)); - __ movq(fixed_object_reg, object_reg); - CallCode(isolate()->builtins()->TransitionElementsDoubleToObject(), - RelocInfo::CODE_TARGET, instr); - } else { - UNREACHABLE(); - } - __ bind(¬_applicable); -} - - void LCodeGen::DoStringAdd(LStringAdd* instr) { EmitPushTaggedOperand(instr->left()); EmitPushTaggedOperand(instr->right()); @@ -3918,11 +3825,6 @@ void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) { - Handle<FixedArray> constant_elements = instr->hydrogen()->constant_elements(); - ASSERT_EQ(2, constant_elements->length()); - ElementsKind constant_elements_kind = - static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value()); - // Setup the parameters to the stub/runtime call. __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); __ push(FieldOperand(rax, JSFunction::kLiteralsOffset)); @@ -3943,9 +3845,7 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) { CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr); } else { FastCloneShallowArrayStub::Mode mode = - constant_elements_kind == FAST_DOUBLE_ELEMENTS - ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS - : FastCloneShallowArrayStub::CLONE_ELEMENTS; + FastCloneShallowArrayStub::CLONE_ELEMENTS; FastCloneShallowArrayStub stub(mode, length); CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); } @@ -4034,7 +3934,8 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { Handle<SharedFunctionInfo> shared_info = instr->shared_info(); bool pretenure = instr->hydrogen()->pretenure(); if (!pretenure && shared_info->num_literals() == 0) { - FastNewClosureStub stub(shared_info->strict_mode_flag()); + FastNewClosureStub stub( + shared_info->strict_mode() ? kStrictMode : kNonStrictMode); __ Push(shared_info); CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); } else { @@ -4074,11 +3975,12 @@ void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) { Label* true_label = chunk_->GetAssemblyLabel(true_block); Label* false_label = chunk_->GetAssemblyLabel(false_block); - Condition final_branch_condition = - EmitTypeofIs(true_label, false_label, input, instr->type_literal()); - if (final_branch_condition != no_condition) { - EmitBranch(true_block, false_block, final_branch_condition); - } + Condition final_branch_condition = EmitTypeofIs(true_label, + false_label, + input, + instr->type_literal()); + + EmitBranch(true_block, false_block, final_branch_condition); } @@ -4146,6 +4048,7 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label, final_branch_condition = zero; } else { + final_branch_condition = never; __ jmp(false_label); } diff --git a/deps/v8/src/x64/lithium-codegen-x64.h b/deps/v8/src/x64/lithium-codegen-x64.h index f3cb66797..106d7bb2e 100644 --- a/deps/v8/src/x64/lithium-codegen-x64.h +++ b/deps/v8/src/x64/lithium-codegen-x64.h @@ -77,7 +77,6 @@ class LCodeGen BASE_EMBEDDED { XMMRegister ToDoubleRegister(LOperand* op) const; bool IsInteger32Constant(LConstantOperand* op) const; int ToInteger32(LConstantOperand* op) const; - double ToDouble(LConstantOperand* op) const; bool IsTaggedConstant(LConstantOperand* op) const; Handle<Object> ToHandle(LConstantOperand* op) const; Operand ToOperand(LOperand* op) const; @@ -126,8 +125,8 @@ class LCodeGen BASE_EMBEDDED { bool is_done() const { return status_ == DONE; } bool is_aborted() const { return status_ == ABORTED; } - StrictModeFlag strict_mode_flag() const { - return info()->strict_mode_flag(); + int strict_mode_flag() const { + return info()->is_strict_mode() ? kStrictMode : kNonStrictMode; } LChunk* chunk() const { return chunk_; } @@ -191,8 +190,9 @@ class LCodeGen BASE_EMBEDDED { int argc, LInstruction* instr); + // Generate a direct call to a known function. Expects the function - // to be in rdi. + // to be in edi. void CallKnownFunction(Handle<JSFunction> function, int arity, LInstruction* instr, @@ -251,6 +251,7 @@ class LCodeGen BASE_EMBEDDED { static Condition TokenToCondition(Token::Value op, bool is_unsigned); void EmitGoto(int block); void EmitBranch(int left_block, int right_block, Condition cc); + void EmitCmpI(LOperand* left, LOperand* right); void EmitNumberUntagD(Register input, XMMRegister result, bool deoptimize_on_undefined, @@ -259,10 +260,8 @@ class LCodeGen BASE_EMBEDDED { // Emits optimized code for typeof x == "y". Modifies input register. // Returns the condition on which a final split to // true and false label should be made, to optimize fallthrough. - Condition EmitTypeofIs(Label* true_label, - Label* false_label, - Register input, - Handle<String> type_name); + Condition EmitTypeofIs(Label* true_label, Label* false_label, + Register input, Handle<String> type_name); // Emits optimized code for %_IsObject(x). Preserves input register. // Returns the condition on which a final split to diff --git a/deps/v8/src/x64/lithium-x64.cc b/deps/v8/src/x64/lithium-x64.cc index 0af2ce4fc..a67a59320 100644 --- a/deps/v8/src/x64/lithium-x64.cc +++ b/deps/v8/src/x64/lithium-x64.cc @@ -447,12 +447,6 @@ void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) { } -void LTransitionElementsKind::PrintDataTo(StringStream* stream) { - object()->PrintTo(stream); - stream->Add(" %p -> %p", *original_map(), *transitioned_map()); -} - - void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) { LInstructionGap* gap = new LInstructionGap(block); int index = -1; @@ -1402,10 +1396,12 @@ LInstruction* LChunkBuilder::DoPower(HPower* instr) { LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) { + Token::Value op = instr->token(); ASSERT(instr->left()->representation().IsTagged()); ASSERT(instr->right()->representation().IsTagged()); - LOperand* left = UseFixed(instr->left(), rdx); - LOperand* right = UseFixed(instr->right(), rax); + bool reversed = (op == Token::GT || op == Token::LTE); + LOperand* left = UseFixed(instr->left(), reversed ? rax : rdx); + LOperand* right = UseFixed(instr->right(), reversed ? rdx : rax); LCmpT* result = new LCmpT(left, right); return MarkAsCall(DefineFixed(result, rax), instr); } @@ -1417,22 +1413,15 @@ LInstruction* LChunkBuilder::DoCompareIDAndBranch( if (r.IsInteger32()) { ASSERT(instr->left()->representation().IsInteger32()); ASSERT(instr->right()->representation().IsInteger32()); - LOperand* left = UseRegisterOrConstantAtStart(instr->left()); + LOperand* left = UseRegisterAtStart(instr->left()); LOperand* right = UseOrConstantAtStart(instr->right()); return new LCmpIDAndBranch(left, right); } else { ASSERT(r.IsDouble()); ASSERT(instr->left()->representation().IsDouble()); ASSERT(instr->right()->representation().IsDouble()); - LOperand* left; - LOperand* right; - if (instr->left()->IsConstant() && instr->right()->IsConstant()) { - left = UseRegisterOrConstantAtStart(instr->left()); - right = UseRegisterOrConstantAtStart(instr->right()); - } else { - left = UseRegisterAtStart(instr->left()); - right = UseRegisterAtStart(instr->right()); - } + LOperand* left = UseRegisterAtStart(instr->left()); + LOperand* right = UseRegisterAtStart(instr->right()); return new LCmpIDAndBranch(left, right); } } @@ -1967,27 +1956,6 @@ LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) { } -LInstruction* LChunkBuilder::DoTransitionElementsKind( - HTransitionElementsKind* instr) { - if (instr->original_map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS && - instr->transitioned_map()->elements_kind() == FAST_ELEMENTS) { - LOperand* object = UseRegister(instr->object()); - LOperand* new_map_reg = TempRegister(); - LOperand* temp_reg = TempRegister(); - LTransitionElementsKind* result = - new LTransitionElementsKind(object, new_map_reg, temp_reg); - return DefineSameAsFirst(result); - } else { - LOperand* object = UseFixed(instr->object(), rax); - LOperand* fixed_object_reg = FixedTemp(rdx); - LOperand* new_map_reg = FixedTemp(rbx); - LTransitionElementsKind* result = - new LTransitionElementsKind(object, new_map_reg, fixed_object_reg); - return MarkAsCall(DefineFixed(result, rax), instr); - } -} - - LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) { bool needs_write_barrier = instr->NeedsWriteBarrier(); diff --git a/deps/v8/src/x64/lithium-x64.h b/deps/v8/src/x64/lithium-x64.h index 20a69373c..d43a86a9a 100644 --- a/deps/v8/src/x64/lithium-x64.h +++ b/deps/v8/src/x64/lithium-x64.h @@ -162,7 +162,6 @@ class LCodeGen; V(ThisFunction) \ V(Throw) \ V(ToFastProperties) \ - V(TransitionElementsKind) \ V(Typeof) \ V(TypeofIsAndBranch) \ V(UnaryMathOperation) \ @@ -1261,6 +1260,7 @@ class LStoreContextSlot: public LTemplateInstruction<0, 2, 1> { LOperand* context() { return InputAt(0); } LOperand* value() { return InputAt(1); } int slot_index() { return hydrogen()->slot_index(); } + int needs_write_barrier() { return hydrogen()->NeedsWriteBarrier(); } virtual void PrintDataTo(StringStream* stream); }; @@ -1277,9 +1277,7 @@ class LPushArgument: public LTemplateInstruction<0, 1, 0> { class LThisFunction: public LTemplateInstruction<1, 0, 0> { - public: DECLARE_CONCRETE_INSTRUCTION(ThisFunction, "this-function") - DECLARE_HYDROGEN_ACCESSOR(ThisFunction) }; @@ -1553,6 +1551,7 @@ class LStoreNamedField: public LTemplateInstruction<0, 2, 1> { Handle<Object> name() const { return hydrogen()->name(); } bool is_in_object() { return hydrogen()->is_in_object(); } int offset() { return hydrogen()->offset(); } + bool needs_write_barrier() { return hydrogen()->NeedsWriteBarrier(); } Handle<Map> transition() const { return hydrogen()->transition(); } }; @@ -1572,8 +1571,7 @@ class LStoreNamedGeneric: public LTemplateInstruction<0, 2, 0> { LOperand* object() { return inputs_[0]; } LOperand* value() { return inputs_[1]; } Handle<Object> name() const { return hydrogen()->name(); } - StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); } - bool strict_mode() { return strict_mode_flag() == kStrictMode; } + bool strict_mode() { return hydrogen()->strict_mode(); } }; @@ -1662,30 +1660,6 @@ class LStoreKeyedGeneric: public LTemplateInstruction<0, 3, 0> { }; -class LTransitionElementsKind: public LTemplateInstruction<1, 1, 2> { - public: - LTransitionElementsKind(LOperand* object, - LOperand* new_map_temp, - LOperand* temp_reg) { - inputs_[0] = object; - temps_[0] = new_map_temp; - temps_[1] = temp_reg; - } - - DECLARE_CONCRETE_INSTRUCTION(TransitionElementsKind, - "transition-elements-kind") - DECLARE_HYDROGEN_ACCESSOR(TransitionElementsKind) - - virtual void PrintDataTo(StringStream* stream); - - LOperand* object() { return inputs_[0]; } - LOperand* new_map_reg() { return temps_[0]; } - LOperand* temp_reg() { return temps_[1]; } - Handle<Map> original_map() { return hydrogen()->original_map(); } - Handle<Map> transitioned_map() { return hydrogen()->transitioned_map(); } -}; - - class LStringAdd: public LTemplateInstruction<1, 2, 0> { public: LStringAdd(LOperand* left, LOperand* right) { diff --git a/deps/v8/src/x64/macro-assembler-x64.cc b/deps/v8/src/x64/macro-assembler-x64.cc index e3d463400..7fe6d5821 100644 --- a/deps/v8/src/x64/macro-assembler-x64.cc +++ b/deps/v8/src/x64/macro-assembler-x64.cc @@ -55,7 +55,7 @@ MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size) static intptr_t RootRegisterDelta(ExternalReference other, Isolate* isolate) { Address roots_register_value = kRootRegisterBias + - reinterpret_cast<Address>(isolate->heap()->roots_array_start()); + reinterpret_cast<Address>(isolate->heap()->roots_address()); intptr_t delta = other.address() - roots_register_value; return delta; } @@ -326,40 +326,6 @@ void MacroAssembler::RecordWriteField( } -void MacroAssembler::RecordWriteArray(Register object, - Register value, - Register index, - SaveFPRegsMode save_fp, - RememberedSetAction remembered_set_action, - SmiCheck smi_check) { - // First, check if a write barrier is even needed. The tests below - // catch stores of Smis. - Label done; - - // Skip barrier if writing a smi. - if (smi_check == INLINE_SMI_CHECK) { - JumpIfSmi(value, &done); - } - - // Array access: calculate the destination address. Index is not a smi. - Register dst = index; - lea(dst, Operand(object, index, times_pointer_size, - FixedArray::kHeaderSize - kHeapObjectTag)); - - RecordWrite( - object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK); - - bind(&done); - - // Clobber clobbered input registers when running with the debug-code flag - // turned on to provoke errors. - if (emit_debug_code()) { - movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE); - movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE); - } -} - - void MacroAssembler::RecordWrite(Register object, Register address, Register value, @@ -2351,13 +2317,6 @@ void MacroAssembler::Test(const Operand& src, Smi* source) { } -void MacroAssembler::TestBit(const Operand& src, int bits) { - int byte_offset = bits / kBitsPerByte; - int bit_in_byte = bits & (kBitsPerByte - 1); - testb(Operand(src, byte_offset), Immediate(1 << bit_in_byte)); -} - - void MacroAssembler::Jump(ExternalReference ext) { LoadAddress(kScratchRegister, ext); jmp(kScratchRegister); @@ -2724,7 +2683,7 @@ void MacroAssembler::CheckFastSmiOnlyElements(Register map, void MacroAssembler::StoreNumberToDoubleElements( Register maybe_number, Register elements, - Register index, + Register key, XMMRegister xmm_scratch, Label* fail) { Label smi_value, is_nan, maybe_nan, not_nan, have_double_value, done; @@ -2745,7 +2704,7 @@ void MacroAssembler::StoreNumberToDoubleElements( bind(¬_nan); movsd(xmm_scratch, FieldOperand(maybe_number, HeapNumber::kValueOffset)); bind(&have_double_value); - movsd(FieldOperand(elements, index, times_8, FixedDoubleArray::kHeaderSize), + movsd(FieldOperand(elements, key, times_8, FixedDoubleArray::kHeaderSize), xmm_scratch); jmp(&done); @@ -2768,7 +2727,7 @@ void MacroAssembler::StoreNumberToDoubleElements( // Preserve original value. SmiToInteger32(kScratchRegister, maybe_number); cvtlsi2sd(xmm_scratch, kScratchRegister); - movsd(FieldOperand(elements, index, times_8, FixedDoubleArray::kHeaderSize), + movsd(FieldOperand(elements, key, times_8, FixedDoubleArray::kHeaderSize), xmm_scratch); bind(&done); } @@ -2907,8 +2866,7 @@ Condition MacroAssembler::IsObjectStringType(Register heap_object, void MacroAssembler::TryGetFunctionPrototype(Register function, Register result, - Label* miss, - bool miss_on_bound_function) { + Label* miss) { // Check that the receiver isn't a smi. testl(function, Immediate(kSmiTagMask)); j(zero, miss); @@ -2917,17 +2875,6 @@ void MacroAssembler::TryGetFunctionPrototype(Register function, CmpObjectType(function, JS_FUNCTION_TYPE, result); j(not_equal, miss); - if (miss_on_bound_function) { - movq(kScratchRegister, - FieldOperand(function, JSFunction::kSharedFunctionInfoOffset)); - // It's not smi-tagged (stored in the top half of a smi-tagged 8-byte - // field). - TestBit(FieldOperand(kScratchRegister, - SharedFunctionInfo::kCompilerHintsOffset), - SharedFunctionInfo::kBoundFunction); - j(not_zero, miss); - } - // Make sure that the function has an instance prototype. Label non_instance; testb(FieldOperand(result, Map::kBitFieldOffset), @@ -3120,16 +3067,29 @@ void MacroAssembler::InvokeFunction(JSFunction* function, // You can't call a function without a valid frame. ASSERT(flag == JUMP_FUNCTION || has_frame()); + ASSERT(function->is_compiled()); // Get the function and setup the context. Move(rdi, Handle<JSFunction>(function)); movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); - // We call indirectly through the code field in the function to - // allow recompilation to take effect without changing any of the - // call sites. - movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); - ParameterCount expected(function->shared()->formal_parameter_count()); - InvokeCode(rdx, expected, actual, flag, call_wrapper, call_kind); + if (V8::UseCrankshaft()) { + // Since Crankshaft can recompile a function, we need to load + // the Code object every time we call the function. + movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); + ParameterCount expected(function->shared()->formal_parameter_count()); + InvokeCode(rdx, expected, actual, flag, call_wrapper, call_kind); + } else { + // Invoke the cached code. + Handle<Code> code(function->code()); + ParameterCount expected(function->shared()->formal_parameter_count()); + InvokeCode(code, + expected, + actual, + RelocInfo::CODE_TARGET, + flag, + call_wrapper, + call_kind); + } } diff --git a/deps/v8/src/x64/macro-assembler-x64.h b/deps/v8/src/x64/macro-assembler-x64.h index f5f81b131..7e0ba0054 100644 --- a/deps/v8/src/x64/macro-assembler-x64.h +++ b/deps/v8/src/x64/macro-assembler-x64.h @@ -256,8 +256,8 @@ class MacroAssembler: public Assembler { // Notify the garbage collector that we wrote a pointer into a fixed array. // |array| is the array being stored into, |value| is the - // object being stored. |index| is the array index represented as a non-smi. - // All registers are clobbered by the operation RecordWriteArray + // object being stored. |index| is the array index represented as a + // Smi. All registers are clobbered by the operation RecordWriteArray // filters out smis so it does not update the write barrier if the // value is a smi. void RecordWriteArray( @@ -319,9 +319,9 @@ class MacroAssembler: public Assembler { void LoadFromSafepointRegisterSlot(Register dst, Register src); void InitializeRootRegister() { - ExternalReference roots_array_start = - ExternalReference::roots_array_start(isolate()); - movq(kRootRegister, roots_array_start); + ExternalReference roots_address = + ExternalReference::roots_address(isolate()); + movq(kRootRegister, roots_address); addq(kRootRegister, Immediate(kRootRegisterBias)); } @@ -726,7 +726,6 @@ class MacroAssembler: public Assembler { void Push(Smi* smi); void Test(const Operand& dst, Smi* source); - // --------------------------------------------------------------------------- // String macros. @@ -772,9 +771,6 @@ class MacroAssembler: public Assembler { // Move if the registers are not identical. void Move(Register target, Register source); - // Bit-field support. - void TestBit(const Operand& dst, int bit_index); - // Handle support void Move(Register dst, Handle<Object> source); void Move(const Operand& dst, Handle<Object> source); @@ -864,12 +860,12 @@ class MacroAssembler: public Assembler { Label::Distance distance = Label::kFar); // Check to see if maybe_number can be stored as a double in - // FastDoubleElements. If it can, store it at the index specified by index in - // the FastDoubleElements array elements, otherwise jump to fail. Note that - // index must not be smi-tagged. + // FastDoubleElements. If it can, store it at the index specified by key in + // the FastDoubleElements array elements, otherwise jump to fail. + // Note that key must not be smi-tagged. void StoreNumberToDoubleElements(Register maybe_number, Register elements, - Register index, + Register key, XMMRegister xmm_scratch, Label* fail); @@ -1078,8 +1074,7 @@ class MacroAssembler: public Assembler { // clobbered. void TryGetFunctionPrototype(Register function, Register result, - Label* miss, - bool miss_on_bound_function = false); + Label* miss); // Generates code for reporting that an illegal operation has // occurred. diff --git a/deps/v8/src/x64/regexp-macro-assembler-x64.cc b/deps/v8/src/x64/regexp-macro-assembler-x64.cc index 1e0cd6a38..55fabc003 100644 --- a/deps/v8/src/x64/regexp-macro-assembler-x64.cc +++ b/deps/v8/src/x64/regexp-macro-assembler-x64.cc @@ -1248,11 +1248,6 @@ int RegExpMacroAssemblerX64::CheckStackGuardState(Address* return_address, frame_entry<const String*>(re_frame, kInputString) = *subject; frame_entry<const byte*>(re_frame, kInputStart) = new_address; frame_entry<const byte*>(re_frame, kInputEnd) = new_address + byte_length; - } else if (frame_entry<const String*>(re_frame, kInputString) != *subject) { - // Subject string might have been a ConsString that underwent - // short-circuiting during GC. That will not change start_address but - // will change pointer inside the subject handle. - frame_entry<const String*>(re_frame, kInputString) = *subject; } return 0; diff --git a/deps/v8/src/x64/stub-cache-x64.cc b/deps/v8/src/x64/stub-cache-x64.cc index 8af1bf2c4..c4b2672f6 100644 --- a/deps/v8/src/x64/stub-cache-x64.cc +++ b/deps/v8/src/x64/stub-cache-x64.cc @@ -82,55 +82,7 @@ static void ProbeTable(Isolate* isolate, // must always call a backup property check that is complete. // This function is safe to call if the receiver has fast properties. // Name must be a symbol and receiver must be a heap object. -static void GenerateDictionaryNegativeLookup(MacroAssembler* masm, - Label* miss_label, - Register receiver, - Handle<String> name, - Register r0, - Register r1) { - ASSERT(name->IsSymbol()); - Counters* counters = masm->isolate()->counters(); - __ IncrementCounter(counters->negative_lookups(), 1); - __ IncrementCounter(counters->negative_lookups_miss(), 1); - - __ movq(r0, FieldOperand(receiver, HeapObject::kMapOffset)); - - const int kInterceptorOrAccessCheckNeededMask = - (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); - - // Bail out if the receiver has a named interceptor or requires access checks. - __ testb(FieldOperand(r0, Map::kBitFieldOffset), - Immediate(kInterceptorOrAccessCheckNeededMask)); - __ j(not_zero, miss_label); - - // Check that receiver is a JSObject. - __ CmpInstanceType(r0, FIRST_SPEC_OBJECT_TYPE); - __ j(below, miss_label); - - // Load properties array. - Register properties = r0; - __ movq(properties, FieldOperand(receiver, JSObject::kPropertiesOffset)); - - // Check that the properties array is a dictionary. - __ CompareRoot(FieldOperand(properties, HeapObject::kMapOffset), - Heap::kHashTableMapRootIndex); - __ j(not_equal, miss_label); - - Label done; - StringDictionaryLookupStub::GenerateNegativeLookup(masm, - miss_label, - &done, - properties, - name, - r1); - __ bind(&done); - __ DecrementCounter(counters->negative_lookups_miss(), 1); -} - - -// TODO(kmillikin): Eliminate this function when the stub cache is fully -// handlified. -MUST_USE_RESULT static MaybeObject* TryGenerateDictionaryNegativeLookup( +MUST_USE_RESULT static MaybeObject* GenerateDictionaryNegativeLookup( MacroAssembler* masm, Label* miss_label, Register receiver, @@ -166,7 +118,7 @@ MUST_USE_RESULT static MaybeObject* TryGenerateDictionaryNegativeLookup( __ j(not_equal, miss_label); Label done; - MaybeObject* result = StringDictionaryLookupStub::TryGenerateNegativeLookup( + MaybeObject* result = StringDictionaryLookupStub::GenerateNegativeLookup( masm, miss_label, &done, @@ -360,10 +312,8 @@ void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm, // are loaded directly otherwise the property is loaded from the properties // fixed array. void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, - Register dst, - Register src, - Handle<JSObject> holder, - int index) { + Register dst, Register src, + JSObject* holder, int index) { // Adjust for the number of properties stored in the holder. index -= holder->map()->inobject_properties(); if (index < 0) { @@ -750,10 +700,15 @@ class CallInterceptorCompiler BASE_EMBEDDED { void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) { ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC); - Handle<Code> code = (kind == Code::LOAD_IC) - ? masm->isolate()->builtins()->LoadIC_Miss() - : masm->isolate()->builtins()->KeyedLoadIC_Miss(); - __ Jump(code, RelocInfo::CODE_TARGET); + Code* code = NULL; + if (kind == Code::LOAD_IC) { + code = masm->isolate()->builtins()->builtin(Builtins::kLoadIC_Miss); + } else { + code = masm->isolate()->builtins()->builtin(Builtins::kKeyedLoadIC_Miss); + } + + Handle<Code> ic(code); + __ Jump(ic, RelocInfo::CODE_TARGET); } @@ -768,9 +723,9 @@ void StubCompiler::GenerateKeyedLoadMissForceGeneric(MacroAssembler* masm) { // Both name_reg and receiver_reg are preserved on jumps to miss_label, // but may be destroyed if store is successful. void StubCompiler::GenerateStoreField(MacroAssembler* masm, - Handle<JSObject> object, + JSObject* object, int index, - Handle<Map> transition, + Map* transition, Register receiver_reg, Register name_reg, Register scratch, @@ -793,12 +748,12 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm, ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); // Perform map transition for the receiver if necessary. - if (!transition.is_null() && (object->map()->unused_property_fields() == 0)) { + if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) { // The properties must be extended before we can store the value. // We jump to a runtime call that extends the properties array. __ pop(scratch); // Return address. __ push(receiver_reg); - __ Push(transition); + __ Push(Handle<Map>(transition)); __ push(rax); __ push(scratch); __ TailCallExternalReference( @@ -809,10 +764,11 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm, return; } - if (!transition.is_null()) { + if (transition != NULL) { // Update the map of the object; no write barrier updating is // needed because the map is never in new space. - __ Move(FieldOperand(receiver_reg, HeapObject::kMapOffset), transition); + __ Move(FieldOperand(receiver_reg, HeapObject::kMapOffset), + Handle<Map>(transition)); } // Adjust for the number of properties stored in the object. Even in the @@ -852,24 +808,7 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm, // Generate code to check that a global property cell is empty. Create // the property cell at compilation time if no cell exists for the // property. -static void GenerateCheckPropertyCell(MacroAssembler* masm, - Handle<GlobalObject> global, - Handle<String> name, - Register scratch, - Label* miss) { - Handle<JSGlobalPropertyCell> cell = - GlobalObject::EnsurePropertyCell(global, name); - ASSERT(cell->value()->IsTheHole()); - __ Move(scratch, cell); - __ Cmp(FieldOperand(scratch, JSGlobalPropertyCell::kValueOffset), - masm->isolate()->factory()->the_hole_value()); - __ j(not_equal, miss); -} - - -// TODO(kmillikin): Eliminate this function when the stub cache is fully -// handlified. -MUST_USE_RESULT static MaybeObject* TryGenerateCheckPropertyCell( +MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell( MacroAssembler* masm, GlobalObject* global, String* name, @@ -889,172 +828,10 @@ MUST_USE_RESULT static MaybeObject* TryGenerateCheckPropertyCell( } -// Calls GenerateCheckPropertyCell for each global object in the prototype chain -// from object to (but not including) holder. -static void GenerateCheckPropertyCells(MacroAssembler* masm, - Handle<JSObject> object, - Handle<JSObject> holder, - Handle<String> name, - Register scratch, - Label* miss) { - Handle<JSObject> current = object; - while (!current.is_identical_to(holder)) { - if (current->IsGlobalObject()) { - GenerateCheckPropertyCell(masm, - Handle<GlobalObject>::cast(current), - name, - scratch, - miss); - } - current = Handle<JSObject>(JSObject::cast(current->GetPrototype())); - } -} - - -// TODO(kmillikin): Eliminate this function when the stub cache is fully -// handlified. -MUST_USE_RESULT static MaybeObject* TryGenerateCheckPropertyCells( - MacroAssembler* masm, - JSObject* object, - JSObject* holder, - String* name, - Register scratch, - Label* miss) { - JSObject* current = object; - while (current != holder) { - if (current->IsGlobalObject()) { - // Returns a cell or a failure. - MaybeObject* result = TryGenerateCheckPropertyCell( - masm, - GlobalObject::cast(current), - name, - scratch, - miss); - if (result->IsFailure()) return result; - } - ASSERT(current->IsJSObject()); - current = JSObject::cast(current->GetPrototype()); - } - return NULL; -} - - #undef __ #define __ ACCESS_MASM((masm())) -Register StubCompiler::CheckPrototypes(Handle<JSObject> object, - Register object_reg, - Handle<JSObject> holder, - Register holder_reg, - Register scratch1, - Register scratch2, - Handle<String> name, - int save_at_depth, - Label* miss) { - // Make sure there's no overlap between holder and object registers. - ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg)); - ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg) - && !scratch2.is(scratch1)); - - // Keep track of the current object in register reg. On the first - // iteration, reg is an alias for object_reg, on later iterations, - // it is an alias for holder_reg. - Register reg = object_reg; - int depth = 0; - - if (save_at_depth == depth) { - __ movq(Operand(rsp, kPointerSize), object_reg); - } - - // Check the maps in the prototype chain. - // Traverse the prototype chain from the object and do map checks. - Handle<JSObject> current = object; - while (!current.is_identical_to(holder)) { - ++depth; - - // Only global objects and objects that do not require access - // checks are allowed in stubs. - ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded()); - - Handle<JSObject> prototype(JSObject::cast(current->GetPrototype())); - if (!current->HasFastProperties() && - !current->IsJSGlobalObject() && - !current->IsJSGlobalProxy()) { - if (!name->IsSymbol()) { - name = factory()->LookupSymbol(name); - } - ASSERT(current->property_dictionary()->FindEntry(*name) == - StringDictionary::kNotFound); - - GenerateDictionaryNegativeLookup(masm(), miss, reg, name, - scratch1, scratch2); - - __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset)); - reg = holder_reg; // From now on the object will be in holder_reg. - __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset)); - } else { - bool in_new_space = heap()->InNewSpace(*prototype); - Handle<Map> current_map(current->map()); - if (in_new_space) { - // Save the map in scratch1 for later. - __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset)); - __ Cmp(scratch1, current_map); - } else { - __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), current_map); - } - // Branch on the result of the map check. - __ j(not_equal, miss); - // Check access rights to the global object. This has to happen after - // the map check so that we know that the object is actually a global - // object. - if (current->IsJSGlobalProxy()) { - __ CheckAccessGlobalProxy(reg, scratch2, miss); - } - reg = holder_reg; // From now on the object will be in holder_reg. - - if (in_new_space) { - // The prototype is in new space; we cannot store a reference to it - // in the code. Load it from the map. - __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset)); - } else { - // The prototype is in old space; load it directly. - __ Move(reg, prototype); - } - } - - if (save_at_depth == depth) { - __ movq(Operand(rsp, kPointerSize), reg); - } - - // Go to the next object in the prototype chain. - current = prototype; - } - ASSERT(current.is_identical_to(holder)); - - // Log the check depth. - LOG(isolate(), IntEvent("check-maps-depth", depth + 1)); - - // Check the holder map. - __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), Handle<Map>(holder->map())); - __ j(not_equal, miss); - - // Perform security check for access to the global object. - ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded()); - if (current->IsJSGlobalProxy()) { - __ CheckAccessGlobalProxy(reg, scratch1, miss); - } - - // If we've skipped any global objects, it's not enough to verify that - // their maps haven't changed. We also need to check that the property - // cell for the property is still empty. - GenerateCheckPropertyCells(masm(), object, holder, name, scratch1, miss); - - // Return the register containing the holder. - return reg; -} - - Register StubCompiler::CheckPrototypes(JSObject* object, Register object_reg, JSObject* holder, @@ -1105,13 +882,12 @@ Register StubCompiler::CheckPrototypes(JSObject* object, ASSERT(current->property_dictionary()->FindEntry(name) == StringDictionary::kNotFound); - MaybeObject* negative_lookup = - TryGenerateDictionaryNegativeLookup(masm(), - miss, - reg, - name, - scratch1, - scratch2); + MaybeObject* negative_lookup = GenerateDictionaryNegativeLookup(masm(), + miss, + reg, + name, + scratch1, + scratch2); if (negative_lookup->IsFailure()) { set_failure(Failure::cast(negative_lookup)); return reg; @@ -1184,34 +960,43 @@ Register StubCompiler::CheckPrototypes(JSObject* object, // If we've skipped any global objects, it's not enough to verify // that their maps haven't changed. We also need to check that the // property cell for the property is still empty. - MaybeObject* result = TryGenerateCheckPropertyCells(masm(), - object, - holder, - name, - scratch1, - miss); - if (result->IsFailure()) set_failure(Failure::cast(result)); + current = object; + while (current != holder) { + if (current->IsGlobalObject()) { + MaybeObject* cell = GenerateCheckPropertyCell(masm(), + GlobalObject::cast(current), + name, + scratch1, + miss); + if (cell->IsFailure()) { + set_failure(Failure::cast(cell)); + return reg; + } + } + current = JSObject::cast(current->GetPrototype()); + } // Return the register containing the holder. return reg; } -void StubCompiler::GenerateLoadField(Handle<JSObject> object, - Handle<JSObject> holder, +void StubCompiler::GenerateLoadField(JSObject* object, + JSObject* holder, Register receiver, Register scratch1, Register scratch2, Register scratch3, int index, - Handle<String> name, + String* name, Label* miss) { // Check that the receiver isn't a smi. __ JumpIfSmi(receiver, miss); // Check the prototype chain. - Register reg = CheckPrototypes( - object, receiver, holder, scratch1, scratch2, scratch3, name, miss); + Register reg = + CheckPrototypes(object, receiver, holder, + scratch1, scratch2, scratch3, name, miss); // Get the value from the properties. GenerateFastPropertyLoad(masm(), rax, reg, holder, index); @@ -1296,24 +1081,24 @@ MaybeObject* StubCompiler::GenerateLoadCallback(JSObject* object, } -void StubCompiler::GenerateLoadConstant(Handle<JSObject> object, - Handle<JSObject> holder, +void StubCompiler::GenerateLoadConstant(JSObject* object, + JSObject* holder, Register receiver, Register scratch1, Register scratch2, Register scratch3, - Handle<Object> value, - Handle<String> name, + Object* value, + String* name, Label* miss) { // Check that the receiver isn't a smi. __ JumpIfSmi(receiver, miss); // Check that the maps haven't changed. - CheckPrototypes( - object, receiver, holder, scratch1, scratch2, scratch3, name, miss); + CheckPrototypes(object, receiver, holder, + scratch1, scratch2, scratch3, name, miss); // Return the constant value. - __ Move(rax, value); + __ Move(rax, Handle<Object>(value)); __ ret(0); } @@ -1413,8 +1198,7 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object, // We found FIELD property in prototype chain of interceptor's holder. // Retrieve a field from field's holder. GenerateFastPropertyLoad(masm(), rax, holder_reg, - Handle<JSObject>(lookup->holder()), - lookup->GetFieldIndex()); + lookup->holder(), lookup->GetFieldIndex()); __ ret(0); } else { // We found CALLBACKS property in prototype chain of interceptor's @@ -1460,9 +1244,9 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object, } -void CallStubCompiler::GenerateNameCheck(Handle<String> name, Label* miss) { +void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) { if (kind_ == Code::KEYED_CALL_IC) { - __ Cmp(rcx, name); + __ Cmp(rcx, Handle<String>(name)); __ j(not_equal, miss); } } @@ -1521,22 +1305,11 @@ void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell, } -void CallStubCompiler::GenerateMissBranch() { - Handle<Code> code = +MaybeObject* CallStubCompiler::GenerateMissBranch() { + MaybeObject* maybe_obj = isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(), kind_, - extra_state_); - __ Jump(code, RelocInfo::CODE_TARGET); -} - - -// TODO(kmillikin): Eliminate this function when the stub cache is fully -// handlified. -MaybeObject* CallStubCompiler::TryGenerateMissBranch() { - MaybeObject* maybe_obj = - isolate()->stub_cache()->TryComputeCallMiss(arguments().immediate(), - kind_, - extra_state_); + extra_ic_state_); Object* obj; if (!maybe_obj->ToObject(&obj)) return maybe_obj; __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET); @@ -1544,10 +1317,10 @@ MaybeObject* CallStubCompiler::TryGenerateMissBranch() { } -Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object, - Handle<JSObject> holder, +MaybeObject* CallStubCompiler::CompileCallField(JSObject* object, + JSObject* holder, int index, - Handle<String> name) { + String* name) { // ----------- S t a t e ------------- // rcx : function name // rsp[0] : return address @@ -1587,7 +1360,7 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object, } // Invoke the function. - CallKind call_kind = CallICBase::Contextual::decode(extra_state_) + CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_) ? CALL_AS_FUNCTION : CALL_AS_METHOD; __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION, @@ -1595,7 +1368,8 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object, // Handle call cache miss. __ bind(&miss); - GenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); + if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. return GetCode(FIELD, name); @@ -1620,7 +1394,7 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object, Label miss; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); // Get the receiver from the stack. const int argc = arguments().immediate(); @@ -1714,8 +1488,8 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object, // the new element is non-Smi. For now, delegate to the builtin. Label no_fast_elements_check; __ JumpIfSmi(rdi, &no_fast_elements_check); - __ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset)); - __ CheckFastObjectElements(rcx, &call_builtin, Label::kFar); + __ movq(rsi, FieldOperand(rdx, HeapObject::kMapOffset)); + __ CheckFastObjectElements(rsi, &call_builtin, Label::kFar); __ bind(&no_fast_elements_check); ExternalReference new_space_allocation_top = @@ -1779,11 +1553,11 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object, } __ bind(&miss); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(function); + return GetCode(function); } @@ -1805,7 +1579,7 @@ MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object, Label miss, return_undefined, call_builtin; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); // Get the receiver from the stack. const int argc = arguments().immediate(); @@ -1862,11 +1636,11 @@ MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object, 1); __ bind(&miss); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(function); + return GetCode(function); } @@ -1895,12 +1669,12 @@ MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall( Label* index_out_of_range_label = &index_out_of_range; if (kind_ == Code::CALL_IC && - (CallICBase::StringStubState::decode(extra_state_) == + (CallICBase::StringStubState::decode(extra_ic_state_) == DEFAULT_STRING_STUB)) { index_out_of_range_label = &miss; } - GenerateNameCheck(Handle<String>(name), &name_miss); + GenerateNameCheck(name, &name_miss); // Check that the maps starting from the prototype haven't changed. GenerateDirectLoadGlobalFunctionPrototype(masm(), @@ -1946,11 +1720,11 @@ MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall( // Restore function name in rcx. __ Move(rcx, Handle<String>(name)); __ bind(&name_miss); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(function); + return GetCode(function); } @@ -1979,12 +1753,12 @@ MaybeObject* CallStubCompiler::CompileStringCharAtCall( Label* index_out_of_range_label = &index_out_of_range; if (kind_ == Code::CALL_IC && - (CallICBase::StringStubState::decode(extra_state_) == + (CallICBase::StringStubState::decode(extra_ic_state_) == DEFAULT_STRING_STUB)) { index_out_of_range_label = &miss; } - GenerateNameCheck(Handle<String>(name), &name_miss); + GenerateNameCheck(name, &name_miss); // Check that the maps starting from the prototype haven't changed. GenerateDirectLoadGlobalFunctionPrototype(masm(), @@ -2032,11 +1806,11 @@ MaybeObject* CallStubCompiler::CompileStringCharAtCall( // Restore function name in rcx. __ Move(rcx, Handle<String>(name)); __ bind(&name_miss); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(function); + return GetCode(function); } @@ -2061,7 +1835,7 @@ MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall( if (!object->IsJSObject() || argc != 1) return heap()->undefined_value(); Label miss; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); if (cell == NULL) { __ movq(rdx, Operand(rsp, 2 * kPointerSize)); @@ -2097,7 +1871,7 @@ MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall( // Tail call the full function. We do not have to patch the receiver // because the function makes no use of it. __ bind(&slow); - CallKind call_kind = CallICBase::Contextual::decode(extra_state_) + CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_) ? CALL_AS_FUNCTION : CALL_AS_METHOD; __ InvokeFunction(function, arguments(), JUMP_FUNCTION, @@ -2105,11 +1879,11 @@ MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall( __ bind(&miss); // rcx: function name. - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return (cell == NULL) ? TryGetCode(function) : TryGetCode(NORMAL, name); + return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name); } @@ -2143,7 +1917,7 @@ MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object, if (!object->IsJSObject() || argc != 1) return heap()->undefined_value(); Label miss; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); if (cell == NULL) { __ movq(rdx, Operand(rsp, 2 * kPointerSize)); @@ -2214,7 +1988,7 @@ MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object, // Tail call the full function. We do not have to patch the receiver // because the function makes no use of it. __ bind(&slow); - CallKind call_kind = CallICBase::Contextual::decode(extra_state_) + CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_) ? CALL_AS_FUNCTION : CALL_AS_METHOD; __ InvokeFunction(function, arguments(), JUMP_FUNCTION, @@ -2222,11 +1996,11 @@ MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object, __ bind(&miss); // rcx: function name. - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return (cell == NULL) ? TryGetCode(function) : TryGetCode(NORMAL, name); + return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name); } @@ -2249,7 +2023,7 @@ MaybeObject* CallStubCompiler::CompileFastApiCall( Label miss, miss_before_stack_reserved; - GenerateNameCheck(Handle<String>(name), &miss_before_stack_reserved); + GenerateNameCheck(name, &miss_before_stack_reserved); // Get the receiver from the stack. const int argc = arguments().immediate(); @@ -2281,11 +2055,11 @@ MaybeObject* CallStubCompiler::CompileFastApiCall( __ addq(rsp, Immediate(kFastApiCallArguments * kPointerSize)); __ bind(&miss_before_stack_reserved); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(function); + return GetCode(function); } @@ -2315,7 +2089,7 @@ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object, Label miss; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); // Get the receiver from the stack. const int argc = arguments().immediate(); @@ -2412,7 +2186,7 @@ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object, UNREACHABLE(); } - CallKind call_kind = CallICBase::Contextual::decode(extra_state_) + CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_) ? CALL_AS_FUNCTION : CALL_AS_METHOD; __ InvokeFunction(function, arguments(), JUMP_FUNCTION, @@ -2420,11 +2194,11 @@ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object, // Handle call cache miss. __ bind(&miss); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(function); + return GetCode(function); } @@ -2442,18 +2216,18 @@ MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object, // ----------------------------------- Label miss; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); // Get the number of arguments. const int argc = arguments().immediate(); - LookupResult lookup(isolate()); + LookupResult lookup; LookupPostInterceptor(holder, name, &lookup); // Get the receiver from the stack. __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); - CallInterceptorCompiler compiler(this, arguments(), rcx, extra_state_); + CallInterceptorCompiler compiler(this, arguments(), rcx, extra_ic_state_); MaybeObject* result = compiler.Compile(masm(), object, holder, @@ -2483,7 +2257,7 @@ MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object, // Invoke the function. __ movq(rdi, rax); - CallKind call_kind = CallICBase::Contextual::decode(extra_state_) + CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_) ? CALL_AS_FUNCTION : CALL_AS_METHOD; __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION, @@ -2491,11 +2265,11 @@ MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object, // Handle load cache miss. __ bind(&miss); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(INTERCEPTOR, name); + return GetCode(INTERCEPTOR, name); } @@ -2525,7 +2299,7 @@ MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object, Label miss; - GenerateNameCheck(Handle<String>(name), &miss); + GenerateNameCheck(name, &miss); // Get the number of arguments. const int argc = arguments().immediate(); @@ -2546,32 +2320,39 @@ MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object, // Jump to the cached code (tail call). Counters* counters = isolate()->counters(); __ IncrementCounter(counters->call_global_inline(), 1); + ASSERT(function->is_compiled()); ParameterCount expected(function->shared()->formal_parameter_count()); - CallKind call_kind = CallICBase::Contextual::decode(extra_state_) + CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_) ? CALL_AS_FUNCTION : CALL_AS_METHOD; - // We call indirectly through the code field in the function to - // allow recompilation to take effect without changing any of the - // call sites. - __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); - __ InvokeCode(rdx, expected, arguments(), JUMP_FUNCTION, - NullCallWrapper(), call_kind); - + if (V8::UseCrankshaft()) { + // TODO(kasperl): For now, we always call indirectly through the + // code field in the function to allow recompilation to take effect + // without changing any of the call sites. + __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); + __ InvokeCode(rdx, expected, arguments(), JUMP_FUNCTION, + NullCallWrapper(), call_kind); + } else { + Handle<Code> code(function->code()); + __ InvokeCode(code, expected, arguments(), + RelocInfo::CODE_TARGET, JUMP_FUNCTION, + NullCallWrapper(), call_kind); + } // Handle call cache miss. __ bind(&miss); __ IncrementCounter(counters->call_global_inline_miss(), 1); - MaybeObject* maybe_result = TryGenerateMissBranch(); + MaybeObject* maybe_result = GenerateMissBranch(); if (maybe_result->IsFailure()) return maybe_result; // Return the generated code. - return TryGetCode(NORMAL, name); + return GetCode(NORMAL, name); } -Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object, +MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object, int index, - Handle<Map> transition, - Handle<String> name) { + Map* transition, + String* name) { // ----------- S t a t e ------------- // -- rax : value // -- rcx : name @@ -2581,7 +2362,12 @@ Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object, Label miss; // Generate store field code. Preserves receiver and name on jump to miss. - GenerateStoreField(masm(), object, index, transition, rdx, rcx, rbx, &miss); + GenerateStoreField(masm(), + object, + index, + transition, + rdx, rcx, rbx, + &miss); // Handle store cache miss. __ bind(&miss); @@ -2589,14 +2375,13 @@ Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object, __ Jump(ic, RelocInfo::CODE_TARGET); // Return the generated code. - return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name); + return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name); } -Handle<Code> StoreStubCompiler::CompileStoreCallback( - Handle<JSObject> object, - Handle<AccessorInfo> callback, - Handle<String> name) { +MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object, + AccessorInfo* callback, + String* name) { // ----------- S t a t e ------------- // -- rax : value // -- rcx : name @@ -2624,7 +2409,7 @@ Handle<Code> StoreStubCompiler::CompileStoreCallback( __ pop(rbx); // remove the return address __ push(rdx); // receiver - __ Push(callback); // callback info + __ Push(Handle<AccessorInfo>(callback)); // callback info __ push(rcx); // name __ push(rax); // value __ push(rbx); // restore return address @@ -2644,9 +2429,8 @@ Handle<Code> StoreStubCompiler::CompileStoreCallback( } -Handle<Code> StoreStubCompiler::CompileStoreInterceptor( - Handle<JSObject> receiver, - Handle<String> name) { +MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver, + String* name) { // ----------- S t a t e ------------- // -- rax : value // -- rcx : name @@ -2694,10 +2478,9 @@ Handle<Code> StoreStubCompiler::CompileStoreInterceptor( } -Handle<Code> StoreStubCompiler::CompileStoreGlobal( - Handle<GlobalObject> object, - Handle<JSGlobalPropertyCell> cell, - Handle<String> name) { +MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object, + JSGlobalPropertyCell* cell, + String* name) { // ----------- S t a t e ------------- // -- rax : value // -- rcx : name @@ -2712,7 +2495,7 @@ Handle<Code> StoreStubCompiler::CompileStoreGlobal( __ j(not_equal, &miss); // Compute the cell operand to use. - __ Move(rbx, cell); + __ Move(rbx, Handle<JSGlobalPropertyCell>(cell)); Operand cell_operand = FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset); // Check that the value in the cell is not the hole. If it is, this @@ -2756,10 +2539,10 @@ Handle<Code> StoreStubCompiler::CompileStoreGlobal( } -Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object, +MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object, int index, - Handle<Map> transition, - Handle<String> name) { + Map* transition, + String* name) { // ----------- S t a t e ------------- // -- rax : value // -- rcx : key @@ -2772,11 +2555,16 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object, __ IncrementCounter(counters->keyed_store_field(), 1); // Check that the name has not changed. - __ Cmp(rcx, name); + __ Cmp(rcx, Handle<String>(name)); __ j(not_equal, &miss); // Generate store field code. Preserves receiver and name on jump to miss. - GenerateStoreField(masm(), object, index, transition, rdx, rcx, rbx, &miss); + GenerateStoreField(masm(), + object, + index, + transition, + rdx, rcx, rbx, + &miss); // Handle store cache miss. __ bind(&miss); @@ -2785,38 +2573,40 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object, __ Jump(ic, RelocInfo::CODE_TARGET); // Return the generated code. - return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name); + return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name); } -Handle<Code> KeyedStoreStubCompiler::CompileStoreElement( - Handle<Map> receiver_map) { +MaybeObject* KeyedStoreStubCompiler::CompileStoreElement(Map* receiver_map) { // ----------- S t a t e ------------- // -- rax : value // -- rcx : key // -- rdx : receiver // -- rsp[0] : return address // ----------------------------------- - + Code* stub; ElementsKind elements_kind = receiver_map->elements_kind(); bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE; - Handle<Code> stub = - KeyedStoreElementStub(is_js_array, elements_kind).GetCode(); - - __ DispatchMap(rdx, receiver_map, stub, DO_SMI_CHECK); + MaybeObject* maybe_stub = + KeyedStoreElementStub(is_js_array, elements_kind).TryGetCode(); + if (!maybe_stub->To(&stub)) return maybe_stub; + __ DispatchMap(rdx, + Handle<Map>(receiver_map), + Handle<Code>(stub), + DO_SMI_CHECK); Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss(); __ jmp(ic, RelocInfo::CODE_TARGET); // Return the generated code. - return GetCode(NORMAL, factory()->empty_string()); + return GetCode(NORMAL, NULL); } -Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic( - MapHandleList* receiver_maps, - CodeHandleList* handler_stubs, - MapHandleList* transitioned_maps) { +MaybeObject* KeyedStoreStubCompiler::CompileStorePolymorphic( + MapList* receiver_maps, + CodeList* handler_stubs, + MapList* transitioned_maps) { // ----------- S t a t e ------------- // -- rax : value // -- rcx : key @@ -2830,14 +2620,17 @@ Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic( int receiver_count = receiver_maps->length(); for (int i = 0; i < receiver_count; ++i) { // Check map and tail call if there's a match - __ Cmp(rdi, receiver_maps->at(i)); - if (transitioned_maps->at(i).is_null()) { - __ j(equal, handler_stubs->at(i), RelocInfo::CODE_TARGET); + Handle<Map> map(receiver_maps->at(i)); + __ Cmp(rdi, map); + if (transitioned_maps->at(i) == NULL) { + __ j(equal, Handle<Code>(handler_stubs->at(i)), RelocInfo::CODE_TARGET); } else { Label next_map; __ j(not_equal, &next_map, Label::kNear); - __ movq(rbx, transitioned_maps->at(i), RelocInfo::EMBEDDED_OBJECT); - __ jmp(handler_stubs->at(i), RelocInfo::CODE_TARGET); + __ movq(rbx, + Handle<Map>(transitioned_maps->at(i)), + RelocInfo::EMBEDDED_OBJECT); + __ jmp(Handle<Code>(handler_stubs->at(i)), RelocInfo::CODE_TARGET); __ bind(&next_map); } } @@ -2847,13 +2640,13 @@ Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic( __ jmp(ic, RelocInfo::CODE_TARGET); // Return the generated code. - return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC); + return GetCode(NORMAL, NULL, MEGAMORPHIC); } -Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name, - Handle<JSObject> object, - Handle<JSObject> last) { +MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name, + JSObject* object, + JSObject* last) { // ----------- S t a t e ------------- // -- rax : receiver // -- rcx : name @@ -2872,8 +2665,15 @@ Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name, // If the last object in the prototype chain is a global object, // check that the global property cell is empty. if (last->IsGlobalObject()) { - GenerateCheckPropertyCell( - masm(), Handle<GlobalObject>::cast(last), name, rdx, &miss); + MaybeObject* cell = GenerateCheckPropertyCell(masm(), + GlobalObject::cast(last), + name, + rdx, + &miss); + if (cell->IsFailure()) { + miss.Unuse(); + return cell; + } } // Return undefined if maps of the full prototype chain are still the @@ -2885,14 +2685,14 @@ Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name, GenerateLoadMiss(masm(), Code::LOAD_IC); // Return the generated code. - return GetCode(NONEXISTENT, factory()->empty_string()); + return GetCode(NONEXISTENT, heap()->empty_string()); } -Handle<Code> LoadStubCompiler::CompileLoadField(Handle<JSObject> object, - Handle<JSObject> holder, +MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object, + JSObject* holder, int index, - Handle<String> name) { + String* name) { // ----------- S t a t e ------------- // -- rax : receiver // -- rcx : name @@ -2931,14 +2731,14 @@ MaybeObject* LoadStubCompiler::CompileLoadCallback(String* name, GenerateLoadMiss(masm(), Code::LOAD_IC); // Return the generated code. - return TryGetCode(CALLBACKS, name); + return GetCode(CALLBACKS, name); } -Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object, - Handle<JSObject> holder, - Handle<Object> value, - Handle<String> name) { +MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object, + JSObject* holder, + Object* value, + String* name) { // ----------- S t a t e ------------- // -- rax : receiver // -- rcx : name @@ -2965,7 +2765,7 @@ MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, // ----------------------------------- Label miss; - LookupResult lookup(isolate()); + LookupResult lookup; LookupPostInterceptor(holder, name, &lookup); // TODO(368): Compile in the whole chain: all the interceptors in @@ -2985,16 +2785,15 @@ MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, GenerateLoadMiss(masm(), Code::LOAD_IC); // Return the generated code. - return TryGetCode(INTERCEPTOR, name); + return GetCode(INTERCEPTOR, name); } -Handle<Code> LoadStubCompiler::CompileLoadGlobal( - Handle<JSObject> object, - Handle<GlobalObject> holder, - Handle<JSGlobalPropertyCell> cell, - Handle<String> name, - bool is_dont_delete) { +MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object, + GlobalObject* holder, + JSGlobalPropertyCell* cell, + String* name, + bool is_dont_delete) { // ----------- S t a t e ------------- // -- rax : receiver // -- rcx : name @@ -3005,7 +2804,7 @@ Handle<Code> LoadStubCompiler::CompileLoadGlobal( // If the object is the holder then we know that it's a global // object which can only happen for contextual loads. In this case, // the receiver cannot be a smi. - if (!object.is_identical_to(holder)) { + if (object != holder) { __ JumpIfSmi(rax, &miss); } @@ -3013,7 +2812,7 @@ Handle<Code> LoadStubCompiler::CompileLoadGlobal( CheckPrototypes(object, rax, holder, rbx, rdx, rdi, name, &miss); // Get the value from the cell. - __ Move(rbx, cell); + __ Move(rbx, Handle<JSGlobalPropertyCell>(cell)); __ movq(rbx, FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset)); // Check for deleted property if property can actually be deleted. @@ -3039,9 +2838,9 @@ Handle<Code> LoadStubCompiler::CompileLoadGlobal( } -Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name, - Handle<JSObject> receiver, - Handle<JSObject> holder, +MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name, + JSObject* receiver, + JSObject* holder, int index) { // ----------- S t a t e ------------- // -- rax : key @@ -3054,7 +2853,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name, __ IncrementCounter(counters->keyed_load_field(), 1); // Check that the name has not changed. - __ Cmp(rax, name); + __ Cmp(rax, Handle<String>(name)); __ j(not_equal, &miss); GenerateLoadField(receiver, holder, rdx, rbx, rcx, rdi, index, name, &miss); @@ -3100,15 +2899,14 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback( GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); // Return the generated code. - return TryGetCode(CALLBACKS, name); + return GetCode(CALLBACKS, name); } -Handle<Code> KeyedLoadStubCompiler::CompileLoadConstant( - Handle<String> name, - Handle<JSObject> receiver, - Handle<JSObject> holder, - Handle<Object> value) { +MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name, + JSObject* receiver, + JSObject* holder, + Object* value) { // ----------- S t a t e ------------- // -- rax : key // -- rdx : receiver @@ -3151,7 +2949,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, __ Cmp(rax, Handle<String>(name)); __ j(not_equal, &miss); - LookupResult lookup(isolate()); + LookupResult lookup; LookupPostInterceptor(holder, name, &lookup); GenerateLoadInterceptor(receiver, holder, @@ -3168,12 +2966,11 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); // Return the generated code. - return TryGetCode(INTERCEPTOR, name); + return GetCode(INTERCEPTOR, name); } -Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength( - Handle<String> name) { +MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) { // ----------- S t a t e ------------- // -- rax : key // -- rdx : receiver @@ -3185,7 +2982,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength( __ IncrementCounter(counters->keyed_load_array_length(), 1); // Check that the name has not changed. - __ Cmp(rax, name); + __ Cmp(rax, Handle<String>(name)); __ j(not_equal, &miss); GenerateLoadArrayLength(masm(), rdx, rcx, &miss); @@ -3198,8 +2995,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength( } -Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength( - Handle<String> name) { +MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) { // ----------- S t a t e ------------- // -- rax : key // -- rdx : receiver @@ -3211,7 +3007,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength( __ IncrementCounter(counters->keyed_load_string_length(), 1); // Check that the name has not changed. - __ Cmp(rax, name); + __ Cmp(rax, Handle<String>(name)); __ j(not_equal, &miss); GenerateLoadStringLength(masm(), rdx, rcx, rbx, &miss, true); @@ -3224,8 +3020,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength( } -Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype( - Handle<String> name) { +MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) { // ----------- S t a t e ------------- // -- rax : key // -- rdx : receiver @@ -3237,7 +3032,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype( __ IncrementCounter(counters->keyed_load_function_prototype(), 1); // Check that the name has not changed. - __ Cmp(rax, name); + __ Cmp(rax, Handle<String>(name)); __ j(not_equal, &miss); GenerateLoadFunctionPrototype(masm(), rdx, rcx, rbx, &miss); @@ -3250,29 +3045,32 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype( } -Handle<Code> KeyedLoadStubCompiler::CompileLoadElement( - Handle<Map> receiver_map) { +MaybeObject* KeyedLoadStubCompiler::CompileLoadElement(Map* receiver_map) { // ----------- S t a t e ------------- // -- rax : key // -- rdx : receiver // -- rsp[0] : return address // ----------------------------------- + Code* stub; ElementsKind elements_kind = receiver_map->elements_kind(); - Handle<Code> stub = KeyedLoadElementStub(elements_kind).GetCode(); - - __ DispatchMap(rdx, receiver_map, stub, DO_SMI_CHECK); + MaybeObject* maybe_stub = KeyedLoadElementStub(elements_kind).TryGetCode(); + if (!maybe_stub->To(&stub)) return maybe_stub; + __ DispatchMap(rdx, + Handle<Map>(receiver_map), + Handle<Code>(stub), + DO_SMI_CHECK); Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss(); __ jmp(ic, RelocInfo::CODE_TARGET); // Return the generated code. - return GetCode(NORMAL, factory()->empty_string()); + return GetCode(NORMAL, NULL); } -Handle<Code> KeyedLoadStubCompiler::CompileLoadPolymorphic( - MapHandleList* receiver_maps, - CodeHandleList* handler_ics) { +MaybeObject* KeyedLoadStubCompiler::CompileLoadPolymorphic( + MapList* receiver_maps, + CodeList* handler_ics) { // ----------- S t a t e ------------- // -- rax : key // -- rdx : receiver @@ -3286,15 +3084,18 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadPolymorphic( int receiver_count = receiver_maps->length(); for (int current = 0; current < receiver_count; ++current) { // Check map and tail call if there's a match - __ Cmp(map_reg, receiver_maps->at(current)); - __ j(equal, handler_ics->at(current), RelocInfo::CODE_TARGET); + Handle<Map> map(receiver_maps->at(current)); + __ Cmp(map_reg, map); + __ j(equal, + Handle<Code>(handler_ics->at(current)), + RelocInfo::CODE_TARGET); } __ bind(&miss); GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); // Return the generated code. - return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC); + return GetCode(NORMAL, NULL, MEGAMORPHIC); } diff --git a/deps/v8/test/cctest/cctest.status b/deps/v8/test/cctest/cctest.status index 7161345ec..759f69f33 100644 --- a/deps/v8/test/cctest/cctest.status +++ b/deps/v8/test/cctest/cctest.status @@ -52,6 +52,9 @@ test-profile-generator/RecordStackTraceAtStartProfiling: PASS || FAIL # We do not yet shrink weak maps after they have been emptied by the GC test-weakmaps/Shrinking: FAIL +# NewGC: BUG(1717) +test-api/OutOfMemoryNested: PASS || TIMEOUT + ############################################################################## [ $arch == arm ] diff --git a/deps/v8/test/cctest/test-api.cc b/deps/v8/test/cctest/test-api.cc index 5081a648b..167c4cd15 100644 --- a/deps/v8/test/cctest/test-api.cc +++ b/deps/v8/test/cctest/test-api.cc @@ -5438,109 +5438,67 @@ static int StrNCmp16(uint16_t* a, uint16_t* b, int n) { THREADED_TEST(StringWrite) { - LocalContext context; v8::HandleScope scope; v8::Handle<String> str = v8_str("abcde"); // abc<Icelandic eth><Unicode snowman>. v8::Handle<String> str2 = v8_str("abc\303\260\342\230\203"); - const int kStride = 4; // Must match stride in for loops in JS below. - CompileRun( - "var left = '';" - "for (var i = 0; i < 0xd800; i += 4) {" - " left = left + String.fromCharCode(i);" - "}"); - CompileRun( - "var right = '';" - "for (var i = 0; i < 0xd800; i += 4) {" - " right = String.fromCharCode(i) + right;" - "}"); - v8::Handle<v8::Object> global = Context::GetCurrent()->Global(); - Handle<String> left_tree = global->Get(v8_str("left")).As<String>(); - Handle<String> right_tree = global->Get(v8_str("right")).As<String>(); CHECK_EQ(5, str2->Length()); - CHECK_EQ(0xd800 / kStride, left_tree->Length()); - CHECK_EQ(0xd800 / kStride, right_tree->Length()); char buf[100]; - char utf8buf[0xd800 * 3]; + char utf8buf[100]; uint16_t wbuf[100]; int len; int charlen; - memset(utf8buf, 0x1, 1000); + memset(utf8buf, 0x1, sizeof(utf8buf)); len = str2->WriteUtf8(utf8buf, sizeof(utf8buf), &charlen); CHECK_EQ(9, len); CHECK_EQ(5, charlen); CHECK_EQ(0, strcmp(utf8buf, "abc\303\260\342\230\203")); - memset(utf8buf, 0x1, 1000); + memset(utf8buf, 0x1, sizeof(utf8buf)); len = str2->WriteUtf8(utf8buf, 8, &charlen); CHECK_EQ(8, len); CHECK_EQ(5, charlen); CHECK_EQ(0, strncmp(utf8buf, "abc\303\260\342\230\203\1", 9)); - memset(utf8buf, 0x1, 1000); + memset(utf8buf, 0x1, sizeof(utf8buf)); len = str2->WriteUtf8(utf8buf, 7, &charlen); CHECK_EQ(5, len); CHECK_EQ(4, charlen); CHECK_EQ(0, strncmp(utf8buf, "abc\303\260\1", 5)); - memset(utf8buf, 0x1, 1000); + memset(utf8buf, 0x1, sizeof(utf8buf)); len = str2->WriteUtf8(utf8buf, 6, &charlen); CHECK_EQ(5, len); CHECK_EQ(4, charlen); CHECK_EQ(0, strncmp(utf8buf, "abc\303\260\1", 5)); - memset(utf8buf, 0x1, 1000); + memset(utf8buf, 0x1, sizeof(utf8buf)); len = str2->WriteUtf8(utf8buf, 5, &charlen); CHECK_EQ(5, len); CHECK_EQ(4, charlen); CHECK_EQ(0, strncmp(utf8buf, "abc\303\260\1", 5)); - memset(utf8buf, 0x1, 1000); + memset(utf8buf, 0x1, sizeof(utf8buf)); len = str2->WriteUtf8(utf8buf, 4, &charlen); CHECK_EQ(3, len); CHECK_EQ(3, charlen); CHECK_EQ(0, strncmp(utf8buf, "abc\1", 4)); - memset(utf8buf, 0x1, 1000); + memset(utf8buf, 0x1, sizeof(utf8buf)); len = str2->WriteUtf8(utf8buf, 3, &charlen); CHECK_EQ(3, len); CHECK_EQ(3, charlen); CHECK_EQ(0, strncmp(utf8buf, "abc\1", 4)); - memset(utf8buf, 0x1, 1000); + memset(utf8buf, 0x1, sizeof(utf8buf)); len = str2->WriteUtf8(utf8buf, 2, &charlen); CHECK_EQ(2, len); CHECK_EQ(2, charlen); CHECK_EQ(0, strncmp(utf8buf, "ab\1", 3)); - memset(utf8buf, 0x1, sizeof(utf8buf)); - len = left_tree->Utf8Length(); - int utf8_expected = - (0x80 + (0x800 - 0x80) * 2 + (0xd800 - 0x800) * 3) / kStride; - CHECK_EQ(utf8_expected, len); - len = left_tree->WriteUtf8(utf8buf, utf8_expected, &charlen); - CHECK_EQ(utf8_expected, len); - CHECK_EQ(0xd800 / kStride, charlen); - CHECK_EQ(0xed, static_cast<unsigned char>(utf8buf[utf8_expected - 3])); - CHECK_EQ(0x9f, static_cast<unsigned char>(utf8buf[utf8_expected - 2])); - CHECK_EQ(0xc0 - kStride, - static_cast<unsigned char>(utf8buf[utf8_expected - 1])); - CHECK_EQ(1, utf8buf[utf8_expected]); - - memset(utf8buf, 0x1, sizeof(utf8buf)); - len = right_tree->Utf8Length(); - CHECK_EQ(utf8_expected, len); - len = right_tree->WriteUtf8(utf8buf, utf8_expected, &charlen); - CHECK_EQ(utf8_expected, len); - CHECK_EQ(0xd800 / kStride, charlen); - CHECK_EQ(0xed, static_cast<unsigned char>(utf8buf[0])); - CHECK_EQ(0x9f, static_cast<unsigned char>(utf8buf[1])); - CHECK_EQ(0xc0 - kStride, static_cast<unsigned char>(utf8buf[2])); - CHECK_EQ(1, utf8buf[utf8_expected]); - memset(buf, 0x1, sizeof(buf)); memset(wbuf, 0x1, sizeof(wbuf)); len = str->WriteAscii(buf); @@ -11482,7 +11440,6 @@ static void MorphAString(i::String* string, // Test that we can still flatten a string if the components it is built up // from have been turned into 16 bit strings in the mean time. THREADED_TEST(MorphCompositeStringTest) { - char utf_buffer[129]; const char* c_string = "Now is the time for all good men" " to come to the aid of the party"; uint16_t* two_byte_string = AsciiToTwoByteString(c_string); @@ -11511,17 +11468,6 @@ THREADED_TEST(MorphCompositeStringTest) { MorphAString(*v8::Utils::OpenHandle(*lhs), &ascii_resource, &uc16_resource); MorphAString(*v8::Utils::OpenHandle(*rhs), &ascii_resource, &uc16_resource); - // This should UTF-8 without flattening, since everything is ASCII. - Handle<String> cons = v8_compile("cons")->Run().As<String>(); - CHECK_EQ(128, cons->Utf8Length()); - int nchars = -1; - CHECK_EQ(129, cons->WriteUtf8(utf_buffer, -1, &nchars)); - CHECK_EQ(128, nchars); - CHECK_EQ(0, strcmp( - utf_buffer, - "Now is the time for all good men to come to the aid of the party" - "Now is the time for all good men to come to the aid of the party")); - // Now do some stuff to make sure the strings are flattened, etc. CompileRun( "/[^a-z]/.test(cons);" diff --git a/deps/v8/test/cctest/test-debug.cc b/deps/v8/test/cctest/test-debug.cc index cf723bafb..de60d4999 100644 --- a/deps/v8/test/cctest/test-debug.cc +++ b/deps/v8/test/cctest/test-debug.cc @@ -409,8 +409,11 @@ Handle<FixedArray> GetDebuggedFunctions() { static Handle<Code> ComputeCallDebugBreak(int argc) { - return Isolate::Current()->stub_cache()->ComputeCallDebugBreak(argc, - Code::CALL_IC); + CALL_HEAP_FUNCTION( + v8::internal::Isolate::Current(), + v8::internal::Isolate::Current()->stub_cache()->ComputeCallDebugBreak( + argc, Code::CALL_IC), + Code); } @@ -422,8 +425,8 @@ void CheckDebuggerUnloaded(bool check_functions) { CHECK_EQ(NULL, Isolate::Current()->debug()->debug_info_list_); // Collect garbage to ensure weak handles are cleared. - HEAP->CollectAllGarbage(Heap::kNoGCFlags); - HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask); + HEAP->CollectAllGarbage(i::Heap::kNoGCFlags); + HEAP->CollectAllGarbage(i::Heap::kMakeHeapIterableMask); // Iterate the head and check that there are no debugger related objects left. HeapIterator iterator; diff --git a/deps/v8/test/cctest/test-dictionary.cc b/deps/v8/test/cctest/test-dictionary.cc index 793e228a9..15a854b36 100644 --- a/deps/v8/test/cctest/test-dictionary.cc +++ b/deps/v8/test/cctest/test-dictionary.cc @@ -38,7 +38,6 @@ using namespace v8::internal; - TEST(ObjectHashTable) { v8::HandleScope scope; LocalContext context; @@ -67,8 +66,7 @@ TEST(ObjectHashTable) { CHECK_EQ(table->NumberOfDeletedElements(), 1); CHECK_EQ(table->Lookup(*a), HEAP->undefined_value()); - // Keys should map back to their respective values and also should get - // an identity hash code generated. + // Keys should map back to their respective values. for (int i = 0; i < 100; i++) { Handle<JSObject> key = FACTORY->NewJSArray(7); Handle<JSObject> value = FACTORY->NewJSArray(11); @@ -76,67 +74,12 @@ TEST(ObjectHashTable) { CHECK_EQ(table->NumberOfElements(), i + 1); CHECK_NE(table->FindEntry(*key), ObjectHashTable::kNotFound); CHECK_EQ(table->Lookup(*key), *value); - CHECK(key->GetIdentityHash(OMIT_CREATION)->ToObjectChecked()->IsSmi()); - } - - // Keys never added to the map which already have an identity hash - // code should not be found. - for (int i = 0; i < 100; i++) { - Handle<JSObject> key = FACTORY->NewJSArray(7); - CHECK(key->GetIdentityHash(ALLOW_CREATION)->ToObjectChecked()->IsSmi()); - CHECK_EQ(table->FindEntry(*key), ObjectHashTable::kNotFound); - CHECK_EQ(table->Lookup(*key), HEAP->undefined_value()); - CHECK(key->GetIdentityHash(OMIT_CREATION)->ToObjectChecked()->IsSmi()); } - // Keys that don't have an identity hash should not be found and also - // should not get an identity hash code generated. - for (int i = 0; i < 100; i++) { - Handle<JSObject> key = FACTORY->NewJSArray(7); - CHECK_EQ(table->Lookup(*key), HEAP->undefined_value()); - CHECK_EQ(key->GetIdentityHash(OMIT_CREATION), HEAP->undefined_value()); + // Keys never added to the map should not be found. + for (int i = 0; i < 1000; i++) { + Handle<JSObject> o = FACTORY->NewJSArray(100); + CHECK_EQ(table->FindEntry(*o), ObjectHashTable::kNotFound); + CHECK_EQ(table->Lookup(*o), HEAP->undefined_value()); } } - - -#ifdef DEBUG -TEST(ObjectHashSetCausesGC) { - v8::HandleScope scope; - LocalContext context; - Handle<ObjectHashSet> table = FACTORY->NewObjectHashSet(1); - Handle<JSObject> key = FACTORY->NewJSArray(0); - - // Simulate a full heap so that generating an identity hash code - // in subsequent calls will request GC. - FLAG_gc_interval = 0; - - // Calling Contains() should not cause GC ever. - CHECK(!table->Contains(*key)); - - // Calling Remove() should not cause GC ever. - CHECK(!table->Remove(*key)->IsFailure()); - - // Calling Add() should request GC by returning a failure. - CHECK(table->Add(*key)->IsRetryAfterGC()); -} -#endif - - -#ifdef DEBUG -TEST(ObjectHashTableCausesGC) { - v8::HandleScope scope; - LocalContext context; - Handle<ObjectHashTable> table = FACTORY->NewObjectHashTable(1); - Handle<JSObject> key = FACTORY->NewJSArray(0); - - // Simulate a full heap so that generating an identity hash code - // in subsequent calls will request GC. - FLAG_gc_interval = 0; - - // Calling Lookup() should not cause GC ever. - CHECK(table->Lookup(*key)->IsUndefined()); - - // Calling Put() should request GC by returning a failure. - CHECK(table->Put(*key, *key)->IsRetryAfterGC()); -} -#endif diff --git a/deps/v8/test/cctest/test-heap-profiler.cc b/deps/v8/test/cctest/test-heap-profiler.cc index 87e7a7d0f..d695d7438 100644 --- a/deps/v8/test/cctest/test-heap-profiler.cc +++ b/deps/v8/test/cctest/test-heap-profiler.cc @@ -252,28 +252,6 @@ TEST(HeapSnapshotHeapNumbers) { CHECK_EQ(v8::HeapGraphNode::kHeapNumber, b->GetType()); } -TEST(HeapSnapshotSlicedString) { - v8::HandleScope scope; - LocalContext env; - CompileRun( - "parent_string = \"123456789.123456789.123456789.123456789.123456789." - "123456789.123456789.123456789.123456789.123456789." - "123456789.123456789.123456789.123456789.123456789." - "123456789.123456789.123456789.123456789.123456789.\";" - "child_string = parent_string.slice(100);"); - const v8::HeapSnapshot* snapshot = - v8::HeapProfiler::TakeSnapshot(v8_str("strings")); - const v8::HeapGraphNode* global = GetGlobalObject(snapshot); - const v8::HeapGraphNode* parent_string = - GetProperty(global, v8::HeapGraphEdge::kShortcut, "parent_string"); - CHECK_NE(NULL, parent_string); - const v8::HeapGraphNode* child_string = - GetProperty(global, v8::HeapGraphEdge::kShortcut, "child_string"); - CHECK_NE(NULL, child_string); - const v8::HeapGraphNode* parent = - GetProperty(child_string, v8::HeapGraphEdge::kInternal, "parent"); - CHECK_EQ(parent_string, parent); -} TEST(HeapSnapshotInternalReferences) { v8::HandleScope scope; diff --git a/deps/v8/test/cctest/test-parsing.cc b/deps/v8/test/cctest/test-parsing.cc index 8cfd5f71f..8f217e6cd 100755 --- a/deps/v8/test/cctest/test-parsing.cc +++ b/deps/v8/test/cctest/test-parsing.cc @@ -260,11 +260,10 @@ TEST(StandAlonePreParser) { i::JavaScriptScanner scanner(i::Isolate::Current()->unicode_cache()); scanner.Initialize(&stream); - int flags = i::kAllowLazy | i::kAllowNativesSyntax; v8::preparser::PreParser::PreParseResult result = v8::preparser::PreParser::PreParseProgram(&scanner, &log, - flags, + true, stack_limit); CHECK_EQ(v8::preparser::PreParser::kPreParseSuccess, result); i::ScriptDataImpl data(log.ExtractData()); @@ -273,43 +272,6 @@ TEST(StandAlonePreParser) { } -TEST(StandAlonePreParserNoNatives) { - v8::V8::Initialize(); - - int marker; - i::Isolate::Current()->stack_guard()->SetStackLimit( - reinterpret_cast<uintptr_t>(&marker) - 128 * 1024); - - const char* programs[] = { - "%ArgleBargle(glop);", - "var x = %_IsSmi(42);", - NULL - }; - - uintptr_t stack_limit = i::Isolate::Current()->stack_guard()->real_climit(); - for (int i = 0; programs[i]; i++) { - const char* program = programs[i]; - i::Utf8ToUC16CharacterStream stream( - reinterpret_cast<const i::byte*>(program), - static_cast<unsigned>(strlen(program))); - i::CompleteParserRecorder log; - i::JavaScriptScanner scanner(i::Isolate::Current()->unicode_cache()); - scanner.Initialize(&stream); - - // Flags don't allow natives syntax. - v8::preparser::PreParser::PreParseResult result = - v8::preparser::PreParser::PreParseProgram(&scanner, - &log, - i::kAllowLazy, - stack_limit); - CHECK_EQ(v8::preparser::PreParser::kPreParseSuccess, result); - i::ScriptDataImpl data(log.ExtractData()); - // Data contains syntax error. - CHECK(data.has_error()); - } -} - - TEST(RegressChromium62639) { v8::V8::Initialize(); @@ -744,135 +706,3 @@ TEST(RegExpScanning) { TestScanRegExp("/=/", "="); TestScanRegExp("/=?/", "=?"); } - - -TEST(ScopePositions) { - // Test the parser for correctly setting the start and end positions - // of a scope. We check the scope positions of exactly one scope - // nested in the global scope of a program. 'inner source' is the - // source code that determines the part of the source belonging - // to the nested scope. 'outer_prefix' and 'outer_suffix' are - // parts of the source that belong to the global scope. - struct SourceData { - const char* outer_prefix; - const char* inner_source; - const char* outer_suffix; - i::ScopeType scope_type; - }; - - const SourceData source_data[] = { - { " with ({}) ", "{ block; }", " more;", i::WITH_SCOPE }, - { " with ({}) ", "{ block; }", "; more;", i::WITH_SCOPE }, - { " with ({}) ", "{\n" - " block;\n" - " }", "\n" - " more;", i::WITH_SCOPE }, - { " with ({}) ", "statement;", " more;", i::WITH_SCOPE }, - { " with ({}) ", "statement", "\n" - " more;", i::WITH_SCOPE }, - { " with ({})\n" - " ", "statement;", "\n" - " more;", i::WITH_SCOPE }, - { " try {} catch ", "(e) { block; }", " more;", i::CATCH_SCOPE }, - { " try {} catch ", "(e) { block; }", "; more;", i::CATCH_SCOPE }, - { " try {} catch ", "(e) {\n" - " block;\n" - " }", "\n" - " more;", i::CATCH_SCOPE }, - { " try {} catch ", "(e) { block; }", " finally { block; } more;", - i::CATCH_SCOPE }, - { " start;\n" - " ", "{ let block; }", " more;", i::BLOCK_SCOPE }, - { " start;\n" - " ", "{ let block; }", "; more;", i::BLOCK_SCOPE }, - { " start;\n" - " ", "{\n" - " let block;\n" - " }", "\n" - " more;", i::BLOCK_SCOPE }, - { " start;\n" - " function fun", "(a,b) { infunction; }", " more;", - i::FUNCTION_SCOPE }, - { " start;\n" - " function fun", "(a,b) {\n" - " infunction;\n" - " }", "\n" - " more;", i::FUNCTION_SCOPE }, - { " (function fun", "(a,b) { infunction; }", ")();", - i::FUNCTION_SCOPE }, - { " for ", "(let x = 1 ; x < 10; ++ x) { block; }", " more;", - i::BLOCK_SCOPE }, - { " for ", "(let x = 1 ; x < 10; ++ x) { block; }", "; more;", - i::BLOCK_SCOPE }, - { " for ", "(let x = 1 ; x < 10; ++ x) {\n" - " block;\n" - " }", "\n" - " more;", i::BLOCK_SCOPE }, - { " for ", "(let x = 1 ; x < 10; ++ x) statement;", " more;", - i::BLOCK_SCOPE }, - { " for ", "(let x = 1 ; x < 10; ++ x) statement", "\n" - " more;", i::BLOCK_SCOPE }, - { " for ", "(let x = 1 ; x < 10; ++ x)\n" - " statement;", "\n" - " more;", i::BLOCK_SCOPE }, - { " for ", "(let x in {}) { block; }", " more;", i::BLOCK_SCOPE }, - { " for ", "(let x in {}) { block; }", "; more;", i::BLOCK_SCOPE }, - { " for ", "(let x in {}) {\n" - " block;\n" - " }", "\n" - " more;", i::BLOCK_SCOPE }, - { " for ", "(let x in {}) statement;", " more;", i::BLOCK_SCOPE }, - { " for ", "(let x in {}) statement", "\n" - " more;", i::BLOCK_SCOPE }, - { " for ", "(let x in {})\n" - " statement;", "\n" - " more;", i::BLOCK_SCOPE }, - { NULL, NULL, NULL, i::EVAL_SCOPE } - }; - - v8::HandleScope handles; - v8::Persistent<v8::Context> context = v8::Context::New(); - v8::Context::Scope context_scope(context); - - int marker; - i::Isolate::Current()->stack_guard()->SetStackLimit( - reinterpret_cast<uintptr_t>(&marker) - 128 * 1024); - - for (int i = 0; source_data[i].outer_prefix; i++) { - int kPrefixLen = i::StrLength(source_data[i].outer_prefix); - int kInnerLen = i::StrLength(source_data[i].inner_source); - int kSuffixLen = i::StrLength(source_data[i].outer_suffix); - int kProgramSize = kPrefixLen + kInnerLen + kSuffixLen; - i::Vector<char> program = i::Vector<char>::New(kProgramSize + 1); - int length; - length = i::OS::SNPrintF(program, "%s%s%s", - source_data[i].outer_prefix, - source_data[i].inner_source, - source_data[i].outer_suffix); - ASSERT(length == kProgramSize); - - // Parse program source. - i::Handle<i::String> source( - FACTORY->NewStringFromAscii(i::CStrVector(program.start()))); - i::Handle<i::Script> script = FACTORY->NewScript(source); - i::Parser parser(script, false, NULL, NULL); - parser.SetHarmonyScoping(true); - i::FunctionLiteral* function = - parser.ParseProgram(source, true, i::kNonStrictMode); - ASSERT(function != NULL); - - // Check scope types and positions. - i::Scope* scope = function->scope(); - CHECK(scope->is_global_scope()); - CHECK_EQ(scope->start_position(), 0); - CHECK_EQ(scope->end_position(), kProgramSize); - CHECK_EQ(scope->inner_scopes()->length(), 1); - - i::Scope* inner_scope = scope->inner_scopes()->at(0); - CHECK_EQ(inner_scope->type(), source_data[i].scope_type); - CHECK_EQ(inner_scope->start_position(), kPrefixLen); - // The end position of a token is one position after the last - // character belonging to that token. - CHECK_EQ(inner_scope->end_position(), kPrefixLen + kInnerLen); - } -} diff --git a/deps/v8/test/cctest/test-serialize.cc b/deps/v8/test/cctest/test-serialize.cc index b5c1a0976..cccd2eec0 100644 --- a/deps/v8/test/cctest/test-serialize.cc +++ b/deps/v8/test/cctest/test-serialize.cc @@ -130,8 +130,7 @@ TEST(ExternalReferenceEncoder) { encoder.Encode( ExternalReference::new_space_start(isolate).address())); CHECK_EQ(make_code(UNCLASSIFIED, 3), - encoder.Encode( - ExternalReference::roots_array_start(isolate).address())); + encoder.Encode(ExternalReference::roots_address(isolate).address())); } diff --git a/deps/v8/test/mjsunit/apply.js b/deps/v8/test/mjsunit/apply.js index 413ee937c..c166110df 100644 --- a/deps/v8/test/mjsunit/apply.js +++ b/deps/v8/test/mjsunit/apply.js @@ -190,10 +190,3 @@ assertEquals("morseper", "moreseper-prime"); delete(Array.prototype["1"]); - -// Check correct handling of non-array argument lists. -assertSame(this, f0.apply(this, {}), "non-array-1"); -assertSame(this, f0.apply(this, { length:1 }), "non-array-2"); -assertEquals(void 0, f1.apply(this, { length:1 }), "non-array-3"); -assertEquals(void 0, f1.apply(this, { 0:"foo" }), "non-array-4"); -assertEquals("foo", f1.apply(this, { length:1, 0:"foo" }), "non-array-5"); diff --git a/deps/v8/test/mjsunit/array-literal-transitions.js b/deps/v8/test/mjsunit/array-literal-transitions.js deleted file mode 100644 index 321340c4b..000000000 --- a/deps/v8/test/mjsunit/array-literal-transitions.js +++ /dev/null @@ -1,125 +0,0 @@ -// Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Flags: --allow-natives-syntax --smi-only-arrays --expose-gc -// Test element kind of objects. -// Since --smi-only-arrays affects builtins, its default setting at compile -// time sticks if built with snapshot. If --smi-only-arrays is deactivated -// by default, only a no-snapshot build actually has smi-only arrays enabled -// in this test case. Depending on whether smi-only arrays are actually -// enabled, this test takes the appropriate code path to check smi-only arrays. - -support_smi_only_arrays = %HasFastSmiOnlyElements(new Array()); - -// IC and Crankshaft support for smi-only elements in dynamic array literals. -function get(foo) { return foo; } // Used to generate dynamic values. - -function array_literal_test() { - var a0 = [1, 2, 3]; - assertTrue(%HasFastSmiOnlyElements(a0)); - var a1 = [get(1), get(2), get(3)]; - assertTrue(%HasFastSmiOnlyElements(a1)); - - var b0 = [1, 2, get("three")]; - assertTrue(%HasFastElements(b0)); - var b1 = [get(1), get(2), get("three")]; - assertTrue(%HasFastElements(b1)); - - var c0 = [1, 2, get(3.5)]; - assertTrue(%HasFastDoubleElements(c0)); - assertEquals(3.5, c0[2]); - assertEquals(2, c0[1]); - assertEquals(1, c0[0]); - - var c1 = [1, 2, 3.5]; - assertTrue(%HasFastDoubleElements(c1)); - assertEquals(3.5, c1[2]); - assertEquals(2, c1[1]); - assertEquals(1, c1[0]); - - var c2 = [get(1), get(2), get(3.5)]; - assertTrue(%HasFastDoubleElements(c2)); - assertEquals(3.5, c2[2]); - assertEquals(2, c2[1]); - assertEquals(1, c2[0]); - - var object = new Object(); - var d0 = [1, 2, object]; - assertTrue(%HasFastElements(d0)); - assertEquals(object, d0[2]); - assertEquals(2, d0[1]); - assertEquals(1, d0[0]); - - var e0 = [1, 2, 3.5]; - assertTrue(%HasFastDoubleElements(e0)); - assertEquals(3.5, e0[2]); - assertEquals(2, e0[1]); - assertEquals(1, e0[0]); - - var f0 = [1, 2, [1, 2]]; - assertTrue(%HasFastElements(f0)); - assertEquals([1,2], f0[2]); - assertEquals(2, f0[1]); - assertEquals(1, f0[0]); -} - -if (support_smi_only_arrays) { - for (var i = 0; i < 3; i++) { - array_literal_test(); - } - %OptimizeFunctionOnNextCall(array_literal_test); - array_literal_test(); - - function test_large_literal() { - - function d() { - gc(); - return 2.5; - } - - function o() { - gc(); - return new Object(); - } - - large = - [ 0, 1, 2, 3, 4, 5, d(), d(), d(), d(), d(), d(), o(), o(), o(), o() ]; - assertFalse(%HasDictionaryElements(large)); - assertFalse(%HasFastSmiOnlyElements(large)); - assertFalse(%HasFastDoubleElements(large)); - assertTrue(%HasFastElements(large)); - assertEquals(large, - [0, 1, 2, 3, 4, 5, 2.5, 2.5, 2.5, 2.5, 2.5, 2.5, - new Object(), new Object(), new Object(), new Object()]); - } - - for (var i = 0; i < 3; i++) { - test_large_literal(); - } - %OptimizeFunctionOnNextCall(test_large_literal); - test_large_literal(); -} diff --git a/deps/v8/test/mjsunit/compiler/compare.js b/deps/v8/test/mjsunit/compiler/compare.js index 460b0ab00..3f9608700 100644 --- a/deps/v8/test/mjsunit/compiler/compare.js +++ b/deps/v8/test/mjsunit/compiler/compare.js @@ -83,9 +83,9 @@ function TestNonPrimitive(order, f) { } TestNonPrimitive("xy", MaxLT); -TestNonPrimitive("xy", MaxLE); +TestNonPrimitive("yx", MaxLE); TestNonPrimitive("xy", MaxGE); -TestNonPrimitive("xy", MaxGT); +TestNonPrimitive("yx", MaxGT); // Test compare in case of aliased registers. function CmpX(x) { if (x == x) return 42; } diff --git a/deps/v8/test/mjsunit/compiler/regress-deopt-call-as-function.js b/deps/v8/test/mjsunit/compiler/regress-deopt-call-as-function.js deleted file mode 100644 index d82c690ad..000000000 --- a/deps/v8/test/mjsunit/compiler/regress-deopt-call-as-function.js +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Test deoptimization after inlined call. - -function bar(a, b) {try { return a; } finally { } } - -function test_context() { - function foo(x) { return 42; } - var s, t; - for (var i = 0x7ff00000; i < 0x80000000; i++) { - bar(t = foo(i) ? bar(42 + i - i) : bar(0), s = i + t); - } - return s; -} -assertEquals(0x7fffffff + 42, test_context()); - - -function value_context() { - function foo(x) { return 42; } - var s, t; - for (var i = 0x7ff00000; i < 0x80000000; i++) { - bar(t = foo(i), s = i + t); - } - return s; -} -assertEquals(0x7fffffff + 42, value_context()); - - -function effect_context() { - function foo(x) { return 42; } - var s, t; - for (var i = 0x7ff00000; i < 0x80000000; i++) { - bar(foo(i), s = i + 42); - } - return s; -} -assertEquals(0x7fffffff + 42, effect_context()); diff --git a/deps/v8/test/mjsunit/compiler/strict-recompile.js b/deps/v8/test/mjsunit/compiler/strict-recompile.js deleted file mode 100644 index 96e8bcab7..000000000 --- a/deps/v8/test/mjsunit/compiler/strict-recompile.js +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Flags: --allow-natives-syntax - -function foo() { - try { - var o = {}; - Object.defineProperty(o, 'x', {value: 12, writable: false}); - o.x = 13; - } catch(e) { - return true; - } - return false; -} - -assertFalse(foo()); - -function do_eval(str) { - "use strict"; - return eval(str); -} - -var eval_foo = do_eval('(' + foo + ')'); -for (var i = 0; i < 5; i++) assertTrue(eval_foo()); -%OptimizeFunctionOnNextCall(eval_foo); -assertTrue(eval_foo()); diff --git a/deps/v8/test/mjsunit/compiler/regress-inline-callfunctionstub.js b/deps/v8/test/mjsunit/cyclic-error-to-string.js index a39d26df0..2502b5340 100644 --- a/deps/v8/test/mjsunit/compiler/regress-inline-callfunctionstub.js +++ b/deps/v8/test/mjsunit/cyclic-error-to-string.js @@ -25,22 +25,22 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// Flags: --allow-natives-syntax +// Test printing of cyclic errors which return the empty string for +// compatibility with Safari and Firefox. -// Test inlined of calls-as-function two levels deep. -function f() { return 42; } +var e = new Error(); +assertEquals('Error', e + ''); -var o = {g : function () { return f(); } } -function main(func) { - var v=0; - for (var i=0; i<1; i++) { - if (func()) v = 42; - } -} - -main(o.g); -main(o.g); -main(o.g); -%OptimizeFunctionOnNextCall(main); -main(o.g); +e = new Error(); +e.name = e; +e.message = e; +e.stack = e; +e.arguments = e; +assertEquals(': ', e + ''); +e = new Error(); +e.name = [ e ]; +e.message = [ e ]; +e.stack = [ e ]; +e.arguments = [ e ]; +assertEquals(': ', e + ''); diff --git a/deps/v8/test/mjsunit/debug-scopes.js b/deps/v8/test/mjsunit/debug-scopes.js index 0788a55b0..1c23b0bf9 100644 --- a/deps/v8/test/mjsunit/debug-scopes.js +++ b/deps/v8/test/mjsunit/debug-scopes.js @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2008 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -25,7 +25,7 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// Flags: --expose-debug-as debug --allow-natives-syntax +// Flags: --expose-debug-as debug // The functions used for testing backtraces. They are at the top to make the // testing of source line/column easier. @@ -439,26 +439,6 @@ with(with_object) { EndTest(); -// With block in function that is marked for optimization while being executed. -BeginTest("With 7"); - -function with_7() { - with({}) { - %OptimizeFunctionOnNextCall(with_7); - debugger; - } -} - -listener_delegate = function(exec_state) { - CheckScopeChain([debug.ScopeType.With, - debug.ScopeType.Local, - debug.ScopeType.Global], exec_state); - CheckScopeContent({}, 0, exec_state); -}; -with_7(); -EndTest(); - - // Simple closure formed by returning an inner function referering the outer // functions arguments. BeginTest("Closure 1"); @@ -970,28 +950,6 @@ try { EndTest(); -// Catch block in function that is marked for optimization while being executed. -BeginTest("Catch block 7"); -function catch_block_7() { - %OptimizeFunctionOnNextCall(catch_block_7); - try { - throw 'Exception'; - } catch (e) { - debugger; - } -}; - - -listener_delegate = function(exec_state) { - CheckScopeChain([debug.ScopeType.Catch, - debug.ScopeType.Local, - debug.ScopeType.Global], exec_state); - CheckScopeContent({e:'Exception'}, 0, exec_state); -}; -catch_block_7(); -EndTest(); - - assertEquals(begin_test_count, break_count, 'one or more tests did not enter the debugger'); assertEquals(begin_test_count, end_test_count, diff --git a/deps/v8/test/mjsunit/debug-step-3.js b/deps/v8/test/mjsunit/debug-step-3.js deleted file mode 100644 index ad036678e..000000000 --- a/deps/v8/test/mjsunit/debug-step-3.js +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Flags: --expose-debug-as debug - -// This test tests that full code compiled without debug break slots -// is recompiled with debug break slots when debugging is started. - -// Get the Debug object exposed from the debug context global object. -Debug = debug.Debug - -var bp; -var done = false; -var step_count = 0; -var set_bp = false - -// Debug event listener which steps until the global variable done is true. -function listener(event, exec_state, event_data, data) { - if (event == Debug.DebugEvent.Break) { - if (!done) exec_state.prepareStep(Debug.StepAction.StepNext); - step_count++; - } -}; - -// Set the global variables state to prpare the stepping test. -function prepare_step_test() { - done = false; - step_count = 0; -} - -// Test function to step through. -function f() { - var a = 0; - if (set_bp) { bp = Debug.setBreakPoint(f, 3); } - var i = 1; - var j = 2; - done = true; -}; - -prepare_step_test(); -f(); - -// Add the debug event listener. -Debug.setListener(listener); - -// Make f set a breakpoint with an activation on the stack. -prepare_step_test(); -set_bp = true; -f(); -// TODO(1782): Fix issue to bring back this assert. -//assertEquals(4, step_count); -Debug.clearBreakPoint(bp); - -// Set a breakpoint on the first var statement (line 1). -set_bp = false; -bp = Debug.setBreakPoint(f, 3); - -// Step through the function ensuring that the var statements are hit as well. -prepare_step_test(); -f(); -// TODO(1782): Fix issue to bring back this assert. -//assertEquals(4, step_count); - -// Clear the breakpoint and check that no stepping happens. -Debug.clearBreakPoint(bp); -prepare_step_test(); -f(); -assertEquals(0, step_count); - -// Get rid of the debug event listener. -Debug.setListener(null); diff --git a/deps/v8/test/mjsunit/element-kind.js b/deps/v8/test/mjsunit/element-kind.js new file mode 100644 index 000000000..46fd8f567 --- /dev/null +++ b/deps/v8/test/mjsunit/element-kind.js @@ -0,0 +1,261 @@ +// Copyright 2011 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Flags: --allow-natives-syntax --smi-only-arrays +// Test element kind of objects. +// Since --smi-only-arrays affects builtins, its default setting at compile +// time sticks if built with snapshot. If --smi-only-arrays is deactivated +// by default, only a no-snapshot build actually has smi-only arrays enabled +// in this test case. Depending on whether smi-only arrays are actually +// enabled, this test takes the appropriate code path to check smi-only arrays. + + +support_smi_only_arrays = %HasFastSmiOnlyElements([]); + +if (support_smi_only_arrays) { + print("Tests include smi-only arrays."); +} else { + print("Tests do NOT include smi-only arrays."); +} + +var element_kind = { + fast_smi_only_elements : 0, + fast_elements : 1, + fast_double_elements : 2, + dictionary_elements : 3, + external_byte_elements : 4, + external_unsigned_byte_elements : 5, + external_short_elements : 6, + external_unsigned_short_elements : 7, + external_int_elements : 8, + external_unsigned_int_elements : 9, + external_float_elements : 10, + external_double_elements : 11, + external_pixel_elements : 12 +} + +// We expect an object to only be of one element kind. +function assertKind(expected, obj) { + if (support_smi_only_arrays) { + assertEquals(expected == element_kind.fast_smi_only_elements, + %HasFastSmiOnlyElements(obj)); + assertEquals(expected == element_kind.fast_elements, + %HasFastElements(obj)); + } else { + assertEquals(expected == element_kind.fast_elements || + expected == element_kind.fast_smi_only_elements, + %HasFastElements(obj)); + } + assertEquals(expected == element_kind.fast_double_elements, + %HasFastDoubleElements(obj)); + assertEquals(expected == element_kind.dictionary_elements, + %HasDictionaryElements(obj)); + assertEquals(expected == element_kind.external_byte_elements, + %HasExternalByteElements(obj)); + assertEquals(expected == element_kind.external_unsigned_byte_elements, + %HasExternalUnsignedByteElements(obj)); + assertEquals(expected == element_kind.external_short_elements, + %HasExternalShortElements(obj)); + assertEquals(expected == element_kind.external_unsigned_short_elements, + %HasExternalUnsignedShortElements(obj)); + assertEquals(expected == element_kind.external_int_elements, + %HasExternalIntElements(obj)); + assertEquals(expected == element_kind.external_unsigned_int_elements, + %HasExternalUnsignedIntElements(obj)); + assertEquals(expected == element_kind.external_float_elements, + %HasExternalFloatElements(obj)); + assertEquals(expected == element_kind.external_double_elements, + %HasExternalDoubleElements(obj)); + assertEquals(expected == element_kind.external_pixel_elements, + %HasExternalPixelElements(obj)); + // every external kind is also an external array + assertEquals(expected >= element_kind.external_byte_elements, + %HasExternalArrayElements(obj)); +} + +var me = {}; +assertKind(element_kind.fast_elements, me); +me.dance = 0xD15C0; +me.drink = 0xC0C0A; +assertKind(element_kind.fast_elements, me); + +var too = [1,2,3]; +assertKind(element_kind.fast_smi_only_elements, too); +too.dance = 0xD15C0; +too.drink = 0xC0C0A; +assertKind(element_kind.fast_smi_only_elements, too); + +// Make sure the element kind transitions from smionly when a non-smi is stored. +var you = new Array(); +assertKind(element_kind.fast_smi_only_elements, you); +for (var i = 0; i < 1337; i++) { + var val = i; + if (i == 1336) { + assertKind(element_kind.fast_smi_only_elements, you); + val = new Object(); + } + you[i] = val; +} +assertKind(element_kind.fast_elements, you); + +assertKind(element_kind.dictionary_elements, new Array(0xDECAF)); + +var fast_double_array = new Array(0xDECAF); +for (var i = 0; i < 0xDECAF; i++) fast_double_array[i] = i / 2; +assertKind(element_kind.fast_double_elements, fast_double_array); + +assertKind(element_kind.external_byte_elements, new Int8Array(9001)); +assertKind(element_kind.external_unsigned_byte_elements, new Uint8Array(007)); +assertKind(element_kind.external_short_elements, new Int16Array(666)); +assertKind(element_kind.external_unsigned_short_elements, new Uint16Array(42)); +assertKind(element_kind.external_int_elements, new Int32Array(0xF)); +assertKind(element_kind.external_unsigned_int_elements, new Uint32Array(23)); +assertKind(element_kind.external_float_elements, new Float32Array(7)); +assertKind(element_kind.external_double_elements, new Float64Array(0)); +assertKind(element_kind.external_pixel_elements, new PixelArray(512)); + +// Crankshaft support for smi-only array elements. +function monomorphic(array) { + for (var i = 0; i < 3; i++) { + array[i] = i + 10; + } + assertKind(element_kind.fast_smi_only_elements, array); + for (var i = 0; i < 3; i++) { + var a = array[i]; + assertEquals(i + 10, a); + } +} +var smi_only = [1, 2, 3]; +for (var i = 0; i < 3; i++) monomorphic(smi_only); +%OptimizeFunctionOnNextCall(monomorphic); +monomorphic(smi_only); +function polymorphic(array, expected_kind) { + array[1] = 42; + assertKind(expected_kind, array); + var a = array[1]; + assertEquals(42, a); +} +var smis = [1, 2, 3]; +var strings = ["one", "two", "three"]; +var doubles = [0, 0, 0]; doubles[0] = 1.5; doubles[1] = 2.5; doubles[2] = 3.5; +assertKind(support_smi_only_arrays + ? element_kind.fast_double_elements + : element_kind.fast_elements, + doubles); +for (var i = 0; i < 3; i++) { + polymorphic(smis, element_kind.fast_smi_only_elements); + polymorphic(strings, element_kind.fast_elements); + polymorphic(doubles, support_smi_only_arrays + ? element_kind.fast_double_elements + : element_kind.fast_elements); +} +%OptimizeFunctionOnNextCall(polymorphic); +polymorphic(smis, element_kind.fast_smi_only_elements); +polymorphic(strings, element_kind.fast_elements); +polymorphic(doubles, support_smi_only_arrays + ? element_kind.fast_double_elements + : element_kind.fast_elements); + +// Crankshaft support for smi-only elements in dynamic array literals. +function get(foo) { return foo; } // Used to generate dynamic values. + +function crankshaft_test() { + var a = [get(1), get(2), get(3)]; + assertKind(element_kind.fast_smi_only_elements, a); + var b = [get(1), get(2), get("three")]; + assertKind(element_kind.fast_elements, b); + var c = [get(1), get(2), get(3.5)]; + // The full code generator doesn't support conversion to fast_double_elements + // yet. Crankshaft does, but only with --smi-only-arrays support. + if ((%GetOptimizationStatus(crankshaft_test) & 1) && + support_smi_only_arrays) { + assertKind(element_kind.fast_double_elements, c); + } else { + assertKind(element_kind.fast_elements, c); + } +} +for (var i = 0; i < 3; i++) { + crankshaft_test(); +} +%OptimizeFunctionOnNextCall(crankshaft_test); +crankshaft_test(); + +// Elements_kind transitions for arrays. + +// A map can have three different elements_kind transitions: SMI->DOUBLE, +// DOUBLE->OBJECT, and SMI->OBJECT. No matter in which order these three are +// created, they must always end up with the same FAST map. + +// This test is meaningless without FAST_SMI_ONLY_ELEMENTS. +if (support_smi_only_arrays) { + // Preparation: create one pair of identical objects for each case. + var a = [1, 2, 3]; + var b = [1, 2, 3]; + assertTrue(%HaveSameMap(a, b)); + assertKind(element_kind.fast_smi_only_elements, a); + var c = [1, 2, 3]; + c["case2"] = true; + var d = [1, 2, 3]; + d["case2"] = true; + assertTrue(%HaveSameMap(c, d)); + assertFalse(%HaveSameMap(a, c)); + assertKind(element_kind.fast_smi_only_elements, c); + var e = [1, 2, 3]; + e["case3"] = true; + var f = [1, 2, 3]; + f["case3"] = true; + assertTrue(%HaveSameMap(e, f)); + assertFalse(%HaveSameMap(a, e)); + assertFalse(%HaveSameMap(c, e)); + assertKind(element_kind.fast_smi_only_elements, e); + // Case 1: SMI->DOUBLE, DOUBLE->OBJECT, SMI->OBJECT. + a[0] = 1.5; + assertKind(element_kind.fast_double_elements, a); + a[0] = "foo"; + assertKind(element_kind.fast_elements, a); + b[0] = "bar"; + assertTrue(%HaveSameMap(a, b)); + // Case 2: SMI->DOUBLE, SMI->OBJECT, DOUBLE->OBJECT. + c[0] = 1.5; + assertKind(element_kind.fast_double_elements, c); + assertFalse(%HaveSameMap(c, d)); + d[0] = "foo"; + assertKind(element_kind.fast_elements, d); + assertFalse(%HaveSameMap(c, d)); + c[0] = "bar"; + assertTrue(%HaveSameMap(c, d)); + // Case 3: SMI->OBJECT, SMI->DOUBLE, DOUBLE->OBJECT. + e[0] = "foo"; + assertKind(element_kind.fast_elements, e); + assertFalse(%HaveSameMap(e, f)); + f[0] = 1.5; + assertKind(element_kind.fast_double_elements, f); + assertFalse(%HaveSameMap(e, f)); + f[0] = "bar"; + assertKind(element_kind.fast_elements, f); + assertTrue(%HaveSameMap(e, f)); +} diff --git a/deps/v8/test/mjsunit/elements-kind.js b/deps/v8/test/mjsunit/elements-kind.js deleted file mode 100644 index cfd47c778..000000000 --- a/deps/v8/test/mjsunit/elements-kind.js +++ /dev/null @@ -1,309 +0,0 @@ -// Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Flags: --allow-natives-syntax --smi-only-arrays --expose-gc - -// Test element kind of objects. -// Since --smi-only-arrays affects builtins, its default setting at compile -// time sticks if built with snapshot. If --smi-only-arrays is deactivated -// by default, only a no-snapshot build actually has smi-only arrays enabled -// in this test case. Depending on whether smi-only arrays are actually -// enabled, this test takes the appropriate code path to check smi-only arrays. - -support_smi_only_arrays = %HasFastSmiOnlyElements([]); - -if (support_smi_only_arrays) { - print("Tests include smi-only arrays."); -} else { - print("Tests do NOT include smi-only arrays."); -} - -var elements_kind = { - fast_smi_only : 'fast smi only elements', - fast : 'fast elements', - fast_double : 'fast double elements', - dictionary : 'dictionary elements', - external_byte : 'external byte elements', - external_unsigned_byte : 'external unsigned byte elements', - external_short : 'external short elements', - external_unsigned_short : 'external unsigned short elements', - external_int : 'external int elements', - external_unsigned_int : 'external unsigned int elements', - external_float : 'external float elements', - external_double : 'external double elements', - external_pixel : 'external pixel elements' -} - -function getKind(obj) { - if (%HasFastSmiOnlyElements(obj)) return elements_kind.fast_smi_only; - if (%HasFastElements(obj)) return elements_kind.fast; - if (%HasFastDoubleElements(obj)) return elements_kind.fast_double; - if (%HasDictionaryElements(obj)) return elements_kind.dictionary; - // Every external kind is also an external array. - assertTrue(%HasExternalArrayElements(obj)); - if (%HasExternalByteElements(obj)) { - return elements_kind.external_byte; - } - if (%HasExternalUnsignedByteElements(obj)) { - return elements_kind.external_unsigned_byte; - } - if (%HasExternalShortElements(obj)) { - return elements_kind.external_short; - } - if (%HasExternalUnsignedShortElements(obj)) { - return elements_kind.external_unsigned_short; - } - if (%HasExternalIntElements(obj)) { - return elements_kind.external_int; - } - if (%HasExternalUnsignedIntElements(obj)) { - return elements_kind.external_unsigned_int; - } - if (%HasExternalFloatElements(obj)) { - return elements_kind.external_float; - } - if (%HasExternalDoubleElements(obj)) { - return elements_kind.external_double; - } - if (%HasExternalPixelElements(obj)) { - return elements_kind.external_pixel; - } -} - -function assertKind(expected, obj, name_opt) { - if (!support_smi_only_arrays && - expected == elements_kind.fast_smi_only) { - expected = elements_kind.fast; - } - assertEquals(expected, getKind(obj), name_opt); -} - -var me = {}; -assertKind(elements_kind.fast, me); -me.dance = 0xD15C0; -me.drink = 0xC0C0A; -assertKind(elements_kind.fast, me); - -var too = [1,2,3]; -assertKind(elements_kind.fast_smi_only, too); -too.dance = 0xD15C0; -too.drink = 0xC0C0A; -assertKind(elements_kind.fast_smi_only, too); - -// Make sure the element kind transitions from smionly when a non-smi is stored. -var you = new Array(); -assertKind(elements_kind.fast_smi_only, you); -for (var i = 0; i < 1337; i++) { - var val = i; - if (i == 1336) { - assertKind(elements_kind.fast_smi_only, you); - val = new Object(); - } - you[i] = val; -} -assertKind(elements_kind.fast, you); - -assertKind(elements_kind.dictionary, new Array(0xDECAF)); - -var fast_double_array = new Array(0xDECAF); -for (var i = 0; i < 0xDECAF; i++) fast_double_array[i] = i / 2; -assertKind(elements_kind.fast_double, fast_double_array); - -assertKind(elements_kind.external_byte, new Int8Array(9001)); -assertKind(elements_kind.external_unsigned_byte, new Uint8Array(007)); -assertKind(elements_kind.external_short, new Int16Array(666)); -assertKind(elements_kind.external_unsigned_short, new Uint16Array(42)); -assertKind(elements_kind.external_int, new Int32Array(0xF)); -assertKind(elements_kind.external_unsigned_int, new Uint32Array(23)); -assertKind(elements_kind.external_float, new Float32Array(7)); -assertKind(elements_kind.external_double, new Float64Array(0)); -assertKind(elements_kind.external_pixel, new PixelArray(512)); - -// Crankshaft support for smi-only array elements. -function monomorphic(array) { - for (var i = 0; i < 3; i++) { - array[i] = i + 10; - } - assertKind(elements_kind.fast_smi_only, array); - for (var i = 0; i < 3; i++) { - var a = array[i]; - assertEquals(i + 10, a); - } -} -var smi_only = [1, 2, 3]; -for (var i = 0; i < 3; i++) monomorphic(smi_only); -%OptimizeFunctionOnNextCall(monomorphic); -monomorphic(smi_only); - -if (support_smi_only_arrays) { - function construct_smis() { - var a = [0, 0, 0]; - a[0] = 0; // Send the COW array map to the steak house. - assertKind(elements_kind.fast_smi_only, a); - return a; - } - function construct_doubles() { - var a = construct_smis(); - a[0] = 1.5; - assertKind(elements_kind.fast_double, a); - return a; - } - function construct_objects() { - var a = construct_smis(); - a[0] = "one"; - assertKind(elements_kind.fast, a); - return a; - } - - // Test crankshafted transition SMI->DOUBLE. - function convert_to_double(array) { - array[1] = 2.5; - assertKind(elements_kind.fast_double, array); - assertEquals(2.5, array[1]); - } - var smis = construct_smis(); - for (var i = 0; i < 3; i++) convert_to_double(smis); - %OptimizeFunctionOnNextCall(convert_to_double); - smis = construct_smis(); - convert_to_double(smis); - // Test crankshafted transitions SMI->FAST and DOUBLE->FAST. - function convert_to_fast(array) { - array[1] = "two"; - assertKind(elements_kind.fast, array); - assertEquals("two", array[1]); - } - smis = construct_smis(); - for (var i = 0; i < 3; i++) convert_to_fast(smis); - var doubles = construct_doubles(); - for (var i = 0; i < 3; i++) convert_to_fast(doubles); - smis = construct_smis(); - doubles = construct_doubles(); - %OptimizeFunctionOnNextCall(convert_to_fast); - convert_to_fast(smis); - convert_to_fast(doubles); - // Test transition chain SMI->DOUBLE->FAST (crankshafted function will - // transition to FAST directly). - function convert_mixed(array, value, kind) { - array[1] = value; - assertKind(kind, array); - assertEquals(value, array[1]); - } - smis = construct_smis(); - for (var i = 0; i < 3; i++) { - convert_mixed(smis, 1.5, elements_kind.fast_double); - } - doubles = construct_doubles(); - for (var i = 0; i < 3; i++) { - convert_mixed(doubles, "three", elements_kind.fast); - } - smis = construct_smis(); - doubles = construct_doubles(); - %OptimizeFunctionOnNextCall(convert_mixed); - convert_mixed(smis, 1, elements_kind.fast); - convert_mixed(doubles, 1, elements_kind.fast); - assertTrue(%HaveSameMap(smis, doubles)); -} - -// Crankshaft support for smi-only elements in dynamic array literals. -function get(foo) { return foo; } // Used to generate dynamic values. - -function crankshaft_test() { - var a = [get(1), get(2), get(3)]; - assertKind(elements_kind.fast_smi_only, a); - var b = [get(1), get(2), get("three")]; - assertKind(elements_kind.fast, b); - var c = [get(1), get(2), get(3.5)]; - if (support_smi_only_arrays) { - assertKind(elements_kind.fast_double, c); - } else { - assertKind(elements_kind.fast, c); - } -} -for (var i = 0; i < 3; i++) { - crankshaft_test(); -} -%OptimizeFunctionOnNextCall(crankshaft_test); -crankshaft_test(); - -// Elements_kind transitions for arrays. - -// A map can have three different elements_kind transitions: SMI->DOUBLE, -// DOUBLE->OBJECT, and SMI->OBJECT. No matter in which order these three are -// created, they must always end up with the same FAST map. - -// This test is meaningless without FAST_SMI_ONLY_ELEMENTS. -if (support_smi_only_arrays) { - // Preparation: create one pair of identical objects for each case. - var a = [1, 2, 3]; - var b = [1, 2, 3]; - assertTrue(%HaveSameMap(a, b)); - assertKind(elements_kind.fast_smi_only, a); - var c = [1, 2, 3]; - c["case2"] = true; - var d = [1, 2, 3]; - d["case2"] = true; - assertTrue(%HaveSameMap(c, d)); - assertFalse(%HaveSameMap(a, c)); - assertKind(elements_kind.fast_smi_only, c); - var e = [1, 2, 3]; - e["case3"] = true; - var f = [1, 2, 3]; - f["case3"] = true; - assertTrue(%HaveSameMap(e, f)); - assertFalse(%HaveSameMap(a, e)); - assertFalse(%HaveSameMap(c, e)); - assertKind(elements_kind.fast_smi_only, e); - // Case 1: SMI->DOUBLE, DOUBLE->OBJECT, SMI->OBJECT. - a[0] = 1.5; - assertKind(elements_kind.fast_double, a); - a[0] = "foo"; - assertKind(elements_kind.fast, a); - b[0] = "bar"; - assertTrue(%HaveSameMap(a, b)); - // Case 2: SMI->DOUBLE, SMI->OBJECT, DOUBLE->OBJECT. - c[0] = 1.5; - assertKind(elements_kind.fast_double, c); - assertFalse(%HaveSameMap(c, d)); - d[0] = "foo"; - assertKind(elements_kind.fast, d); - assertFalse(%HaveSameMap(c, d)); - c[0] = "bar"; - assertTrue(%HaveSameMap(c, d)); - // Case 3: SMI->OBJECT, SMI->DOUBLE, DOUBLE->OBJECT. - e[0] = "foo"; - assertKind(elements_kind.fast, e); - assertFalse(%HaveSameMap(e, f)); - f[0] = 1.5; - assertKind(elements_kind.fast_double, f); - assertFalse(%HaveSameMap(e, f)); - f[0] = "bar"; - assertKind(elements_kind.fast, f); - assertTrue(%HaveSameMap(e, f)); -} - -// Throw away type information in the ICs for next stress run. -gc(); diff --git a/deps/v8/test/mjsunit/elements-transition.js b/deps/v8/test/mjsunit/elements-transition.js deleted file mode 100644 index 5f6cc4fa3..000000000 --- a/deps/v8/test/mjsunit/elements-transition.js +++ /dev/null @@ -1,107 +0,0 @@ -// Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Flags: --allow-natives-syntax --smi-only-arrays - -support_smi_only_arrays = %HasFastSmiOnlyElements([]); - -if (support_smi_only_arrays) { - function test(test_double, test_object, set, length) { - // We apply the same operations to two identical arrays. The first array - // triggers an IC miss, upon which the conversion stub is generated, but the - // actual conversion is done in runtime. The second array, arriving at - // the previously patched IC, is then converted using the conversion stub. - var array_1 = new Array(length); - var array_2 = new Array(length); - - assertTrue(%HasFastSmiOnlyElements(array_1)); - assertTrue(%HasFastSmiOnlyElements(array_2)); - for (var i = 0; i < length; i++) { - if (i == length - 5 && test_double) { - // Trigger conversion to fast double elements at length-5. - set(array_1, i, 0.5); - set(array_2, i, 0.5); - assertTrue(%HasFastDoubleElements(array_1)); - assertTrue(%HasFastDoubleElements(array_2)); - } else if (i == length - 3 && test_object) { - // Trigger conversion to fast object elements at length-3. - set(array_1, i, 'object'); - set(array_2, i, 'object'); - assertTrue(%HasFastElements(array_1)); - assertTrue(%HasFastElements(array_2)); - } else if (i != length - 7) { - // Set the element to an integer but leave a hole at length-7. - set(array_1, i, 2*i+1); - set(array_2, i, 2*i+1); - } - } - - for (var i = 0; i < length; i++) { - if (i == length - 5 && test_double) { - assertEquals(0.5, array_1[i]); - assertEquals(0.5, array_2[i]); - } else if (i == length - 3 && test_object) { - assertEquals('object', array_1[i]); - assertEquals('object', array_2[i]); - } else if (i != length - 7) { - assertEquals(2*i+1, array_1[i]); - assertEquals(2*i+1, array_2[i]); - } else { - assertEquals(undefined, array_1[i]); - assertEquals(undefined, array_2[i]); - } - } - - assertEquals(length, array_1.length); - assertEquals(length, array_2.length); - } - - test(false, false, function(a,i,v){ a[i] = v; }, 20); - test(true, false, function(a,i,v){ a[i] = v; }, 20); - test(false, true, function(a,i,v){ a[i] = v; }, 20); - test(true, true, function(a,i,v){ a[i] = v; }, 20); - - test(false, false, function(a,i,v){ a[i] = v; }, 10000); - test(true, false, function(a,i,v){ a[i] = v; }, 10000); - test(false, true, function(a,i,v){ a[i] = v; }, 10000); - test(true, true, function(a,i,v){ a[i] = v; }, 10000); - - // Check COW arrays - function get_cow() { return [1, 2, 3]; } - - function transition(x) { x[0] = 1.5; } - - var ignore = get_cow(); - transition(ignore); // Handled by runtime. - var a = get_cow(); - var b = get_cow(); - transition(a); // Handled by IC. - assertEquals(1.5, a[0]); - assertEquals(1, b[0]); -} else { - print("Test skipped because smi only arrays are not supported."); -}
\ No newline at end of file diff --git a/deps/v8/test/mjsunit/error-tostring.js b/deps/v8/test/mjsunit/error-tostring.js deleted file mode 100644 index a28564144..000000000 --- a/deps/v8/test/mjsunit/error-tostring.js +++ /dev/null @@ -1,85 +0,0 @@ -// Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - -// Test default string representation of an Error object. - -var e = new Error(); -assertEquals('Error', e.toString()); - - -// Test printing of cyclic errors which return the empty string for -// compatibility with Safari and Firefox. - -e = new Error(); -e.name = e; -e.message = e; -e.stack = "Does not occur in output"; -e.arguments = "Does not occur in output"; -e.type = "Does not occur in output"; -assertEquals('', e.toString()); - -e = new Error(); -e.name = [ e ]; -e.message = [ e ]; -e.stack = "Does not occur in output"; -e.arguments = "Does not occur in output"; -e.type = "Does not occur in output"; -assertEquals('', e.toString()); - - -// Test the sequence in which getters and toString operations are called -// on a given Error object. Verify the produced string representation. - -function testErrorToString(nameValue, messageValue) { - var seq = []; - var e = { - get name() { - seq.push(1); - return (nameValue === undefined) ? nameValue : { - toString: function() { seq.push(2); return nameValue; } - }; - }, - get message() { - seq.push(3); - return (messageValue === undefined) ? messageValue : { - toString: function() { seq.push(4); return messageValue; } - }; - } - }; - var string = Error.prototype.toString.call(e); - return [string,seq]; -} - -assertEquals(["Error",[1,3]], testErrorToString(undefined, undefined)); -assertEquals(["e1",[1,2,3]], testErrorToString("e1", undefined)); -assertEquals(["e1: null",[1,2,3,4]], testErrorToString("e1", null)); -assertEquals(["e1",[1,2,3,4]], testErrorToString("e1", "")); -assertEquals(["Error: e2",[1,3,4]], testErrorToString(undefined, "e2")); -assertEquals(["null: e2",[1,2,3,4]], testErrorToString(null, "e2")); -assertEquals(["e2",[1,2,3,4]], testErrorToString("", "e2")); -assertEquals(["e1: e2",[1,2,3,4]], testErrorToString("e1", "e2")); diff --git a/deps/v8/test/mjsunit/function-bind.js b/deps/v8/test/mjsunit/function-bind.js index 4a8f2d2a6..e9d02213e 100644 --- a/deps/v8/test/mjsunit/function-bind.js +++ b/deps/v8/test/mjsunit/function-bind.js @@ -29,31 +29,29 @@ // Simple tests. function foo(x, y, z) { - return [this, arguments.length, x]; + return x + y + z; } -assertEquals(3, foo.length); - var f = foo.bind(foo); -assertEquals([foo, 3, 1], f(1, 2, 3)); +assertEquals(3, f(1, 1, 1)); assertEquals(3, f.length); -f = foo.bind(foo, 1); -assertEquals([foo, 3, 1], f(2, 3)); +f = foo.bind(foo, 2); +assertEquals(4, f(1, 1)); assertEquals(2, f.length); -f = foo.bind(foo, 1, 2); -assertEquals([foo, 3, 1], f(3)); +f = foo.bind(foo, 2, 2); +assertEquals(5, f(1)); assertEquals(1, f.length); -f = foo.bind(foo, 1, 2, 3); -assertEquals([foo, 3, 1], f()); +f = foo.bind(foo, 2, 2, 2); +assertEquals(6, f()); assertEquals(0, f.length); // Test that length works correctly even if more than the actual number // of arguments are given when binding. f = foo.bind(foo, 1, 2, 3, 4, 5, 6, 7, 8, 9); -assertEquals([foo, 9, 1], f()); +assertEquals(6, f()); assertEquals(0, f.length); // Use a different bound object. @@ -80,97 +78,64 @@ assertEquals(0, f.length); // When only giving the thisArg, any number of binds should have // the same effect. f = foo.bind(foo); -assertEquals([foo, 3, 1], f(1, 2, 3)); - -var not_foo = {}; -f = foo.bind(foo).bind(not_foo).bind(not_foo).bind(not_foo); -assertEquals([foo, 3, 1], f(1, 2, 3)); +assertEquals(3, f(1, 1, 1)); +f = foo.bind(foo).bind(foo).bind(foo).bind(foo); +assertEquals(3, f(1, 1, 1)); assertEquals(3, f.length); // Giving bound parameters should work at any place in the chain. -f = foo.bind(foo, 1).bind(not_foo).bind(not_foo).bind(not_foo); -assertEquals([foo, 3, 1], f(2, 3)); +f = foo.bind(foo, 1).bind(foo).bind(foo).bind(foo); +assertEquals(3, f(1, 1)); assertEquals(2, f.length); -f = foo.bind(foo).bind(not_foo, 1).bind(not_foo).bind(not_foo); -assertEquals([foo, 3, 1], f(2, 3)); +f = foo.bind(foo).bind(foo, 1).bind(foo).bind(foo); +assertEquals(3, f(1, 1)); assertEquals(2, f.length); -f = foo.bind(foo).bind(not_foo).bind(not_foo,1 ).bind(not_foo); -assertEquals([foo, 3, 1], f(2, 3)); +f = foo.bind(foo).bind(foo).bind(foo,1 ).bind(foo); +assertEquals(3, f(1, 1)); assertEquals(2, f.length); -f = foo.bind(foo).bind(not_foo).bind(not_foo).bind(not_foo, 1); -assertEquals([foo, 3, 1], f(2, 3)); +f = foo.bind(foo).bind(foo).bind(foo).bind(foo, 1); +assertEquals(3, f(1, 1)); assertEquals(2, f.length); -// Several parameters can be given, and given in different bind invocations. -f = foo.bind(foo, 1, 2).bind(not_foo).bind(not_foo).bind(not_foo); -assertEquals([foo, 3, 1], f(3)); -assertEquals(1, f.length); - -f = foo.bind(foo).bind(not_foo, 1, 2).bind(not_foo).bind(not_foo); -assertEquals([foo, 3, 1], f(1)); +// Several parameters can be given, and given in different bind invokations. +f = foo.bind(foo, 1, 1).bind(foo).bind(foo).bind(foo); +assertEquals(3, f(1)); assertEquals(1, f.length); -f = foo.bind(foo).bind(not_foo, 1, 2).bind(not_foo).bind(not_foo); -assertEquals([foo, 3, 1], f(3)); +f = foo.bind(foo).bind(foo, 1, 1).bind(foo).bind(foo); +assertEquals(3, f(1)); assertEquals(1, f.length); -f = foo.bind(foo).bind(not_foo).bind(not_foo, 1, 2).bind(not_foo); -assertEquals([foo, 3, 1], f(1)); +f = foo.bind(foo).bind(foo, 1, 1).bind(foo).bind(foo); +assertEquals(3, f(1)); assertEquals(1, f.length); -f = foo.bind(foo).bind(not_foo).bind(not_foo).bind(not_foo, 1, 2); -assertEquals([foo, 3, 1], f(3)); +f = foo.bind(foo).bind(foo).bind(foo, 1, 1).bind(foo); +assertEquals(3, f(1)); assertEquals(1, f.length); -f = foo.bind(foo, 1).bind(not_foo, 2).bind(not_foo).bind(not_foo); -assertEquals([foo, 3, 1], f(3)); +f = foo.bind(foo).bind(foo).bind(foo).bind(foo, 1, 1); +assertEquals(3, f(1)); assertEquals(1, f.length); -f = foo.bind(foo, 1).bind(not_foo).bind(not_foo, 2).bind(not_foo); -assertEquals([foo, 3, 1], f(3)); +f = foo.bind(foo, 1).bind(foo, 1).bind(foo).bind(foo); +assertEquals(3, f(1)); assertEquals(1, f.length); -f = foo.bind(foo, 1).bind(not_foo).bind(not_foo).bind(not_foo, 2); -assertEquals([foo, 3, 1], f(3)); +f = foo.bind(foo, 1).bind(foo).bind(foo, 1).bind(foo); +assertEquals(3, f(1)); assertEquals(1, f.length); -f = foo.bind(foo).bind(not_foo, 1).bind(not_foo).bind(not_foo, 2); -assertEquals([foo, 3, 1], f(3)); +f = foo.bind(foo, 1).bind(foo).bind(foo).bind(foo, 1); +assertEquals(3, f(1)); assertEquals(1, f.length); -// The wrong number of arguments can be given to bound functions too. -f = foo.bind(foo); -assertEquals(3, f.length); -assertEquals([foo, 0, undefined], f()); -assertEquals([foo, 1, 1], f(1)); -assertEquals([foo, 2, 1], f(1, 2)); -assertEquals([foo, 3, 1], f(1, 2, 3)); -assertEquals([foo, 4, 1], f(1, 2, 3, 4)); - -f = foo.bind(foo, 1); -assertEquals(2, f.length); -assertEquals([foo, 1, 1], f()); -assertEquals([foo, 2, 1], f(2)); -assertEquals([foo, 3, 1], f(2, 3)); -assertEquals([foo, 4, 1], f(2, 3, 4)); - -f = foo.bind(foo, 1, 2); +f = foo.bind(foo).bind(foo, 1).bind(foo).bind(foo, 1); +assertEquals(3, f(1)); assertEquals(1, f.length); -assertEquals([foo, 2, 1], f()); -assertEquals([foo, 3, 1], f(3)); -assertEquals([foo, 4, 1], f(3, 4)); - -f = foo.bind(foo, 1, 2, 3); -assertEquals(0, f.length); -assertEquals([foo, 3, 1], f()); -assertEquals([foo, 4, 1], f(4)); - -f = foo.bind(foo, 1, 2, 3, 4); -assertEquals(0, f.length); -assertEquals([foo, 4, 1], f()); // Test constructor calls. @@ -206,91 +171,13 @@ assertEquals(3, obj2.z); // Test bind chains when used as a constructor. + f = bar.bind(bar, 1).bind(bar, 2).bind(bar, 3); obj2 = new f(); assertEquals(1, obj2.x); assertEquals(2, obj2.y); assertEquals(3, obj2.z); -// Test obj2 is instanceof both bar and f. +// Test instanceof obj2 is bar, not f. assertTrue(obj2 instanceof bar); -assertTrue(obj2 instanceof f); - -// This-args are not relevant to instanceof. -f = bar.bind(foo.prototype, 1). - bind(String.prototype, 2). - bind(Function.prototype, 3); -var obj3 = new f(); -assertTrue(obj3 instanceof bar); -assertTrue(obj3 instanceof f); -assertFalse(obj3 instanceof foo); -assertFalse(obj3 instanceof Function); -assertFalse(obj3 instanceof String); - -// thisArg is converted to object. -f = foo.bind(undefined); -assertEquals([this, 0, undefined], f()); - -f = foo.bind(null); -assertEquals([this, 0, undefined], f()); - -f = foo.bind(42); -assertEquals([Object(42), 0, undefined], f()); - -f = foo.bind("foo"); -assertEquals([Object("foo"), 0, undefined], f()); - -f = foo.bind(true); -assertEquals([Object(true), 0, undefined], f()); - -// Strict functions don't convert thisArg. -function soo(x, y, z) { - "use strict"; - return [this, arguments.length, x]; -} - -var s = soo.bind(undefined); -assertEquals([undefined, 0, undefined], s()); - -s = soo.bind(null); -assertEquals([null, 0, undefined], s()); - -s = soo.bind(42); -assertEquals([42, 0, undefined], s()); - -s = soo.bind("foo"); -assertEquals(["foo", 0, undefined], s()); - -s = soo.bind(true); -assertEquals([true, 0, undefined], s()); - -// Test that .arguments and .caller are poisoned according to the ES5 spec. - -// Check that property descriptors are correct (unconfigurable, unenumerable, -// and both get and set is the ThrowTypeError function). -var cdesc = Object.getOwnPropertyDescriptor(f, "caller"); -var adesc = Object.getOwnPropertyDescriptor(f, "arguments"); - -assertFalse(cdesc.enumerable); -assertFalse(cdesc.configurable); - -assertFalse(adesc.enumerable); -assertFalse(adesc.configurable); - -assertSame(cdesc.get, cdesc.set); -assertSame(cdesc.get, adesc.get); -assertSame(cdesc.get, adesc.set); - -assertTrue(cdesc.get instanceof Function); -assertEquals(0, cdesc.get.length); -assertThrows(cdesc.get, TypeError); - -assertThrows(function() { return f.caller; }, TypeError); -assertThrows(function() { f.caller = 42; }, TypeError); -assertThrows(function() { return f.arguments; }, TypeError); -assertThrows(function() { f.arguments = 42; }, TypeError); - -// Shouldn't throw. Accessing the functions caller must throw if -// the caller is strict and the callee isn't. A bound function is built-in, -// but not considered strict. -(function foo() { return foo.caller; }).bind()(); +assertFalse(obj2 instanceof f); diff --git a/deps/v8/test/mjsunit/harmony/block-conflicts.js b/deps/v8/test/mjsunit/harmony/block-conflicts.js index e27d6a1d3..8b171f171 100644 --- a/deps/v8/test/mjsunit/harmony/block-conflicts.js +++ b/deps/v8/test/mjsunit/harmony/block-conflicts.js @@ -80,11 +80,6 @@ var letbinds = [ "let x", "let x = function() {}", "let x, y", "let y, x", - "const x = 0", - "const x = undefined", - "const x = function() {}", - "const x = 2, y = 3", - "const y = 4, x = 5", ]; var varbinds = [ "var x", "var x = 0", diff --git a/deps/v8/test/mjsunit/harmony/block-for.js b/deps/v8/test/mjsunit/harmony/block-for.js deleted file mode 100644 index 1f68037a2..000000000 --- a/deps/v8/test/mjsunit/harmony/block-for.js +++ /dev/null @@ -1,142 +0,0 @@ -// Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Flags: --harmony-scoping - -function props(x) { - var array = []; - for (let p in x) array.push(p); - return array.sort(); -} - -assertEquals(0, props({}).length); -assertEquals(1, props({x:1}).length); -assertEquals(2, props({x:1, y:2}).length); - -assertArrayEquals(["x"], props({x:1})); -assertArrayEquals(["x", "y"], props({x:1, y:2})); -assertArrayEquals(["x", "y", "zoom"], props({x:1, y:2, zoom:3})); - -assertEquals(0, props([]).length); -assertEquals(1, props([1]).length); -assertEquals(2, props([1,2]).length); - -assertArrayEquals(["0"], props([1])); -assertArrayEquals(["0", "1"], props([1,2])); -assertArrayEquals(["0", "1", "2"], props([1,2,3])); - -var o = {}; -var a = []; -let i = "outer_i"; -let s = "outer_s"; -for (let i = 0x0020; i < 0x01ff; i+=2) { - let s = 'char:' + String.fromCharCode(i); - a.push(s); - o[s] = i; -} -assertArrayEquals(a, props(o)); -assertEquals(i, "outer_i"); -assertEquals(s, "outer_s"); - -var a = []; -assertEquals(0, props(a).length); -a[Math.pow(2,30)-1] = 0; -assertEquals(1, props(a).length); -a[Math.pow(2,31)-1] = 0; -assertEquals(2, props(a).length); -a[1] = 0; -assertEquals(3, props(a).length); - -var result = ''; -for (let p in {a : [0], b : 1}) { result += p; } -assertEquals('ab', result); - -var result = ''; -for (let p in {a : {v:1}, b : 1}) { result += p; } -assertEquals('ab', result); - -var result = ''; -for (let p in { get a() {}, b : 1}) { result += p; } -assertEquals('ab', result); - -var result = ''; -for (let p in { get a() {}, set a(x) {}, b : 1}) { result += p; } -assertEquals('ab', result); - - -// Check that there is exactly one variable without initializer -// in a for-in statement with let variables. -assertThrows("function foo() { for (let in {}) { } }", SyntaxError); -assertThrows("function foo() { for (let x = 3 in {}) { } }", SyntaxError); -assertThrows("function foo() { for (let x, y in {}) { } }", SyntaxError); -assertThrows("function foo() { for (let x = 3, y in {}) { } }", SyntaxError); -assertThrows("function foo() { for (let x, y = 4 in {}) { } }", SyntaxError); -assertThrows("function foo() { for (let x = 3, y = 4 in {}) { } }", SyntaxError); - - -// In a normal for statement the iteration variable is not -// freshly allocated for each iteration. -function closures1() { - let a = []; - for (let i = 0; i < 5; ++i) { - a.push(function () { return i; }); - } - for (let j = 0; j < 5; ++j) { - assertEquals(5, a[j]()); - } -} -closures1(); - - -function closures2() { - let a = [], b = []; - for (let i = 0, j = 10; i < 5; ++i, ++j) { - a.push(function () { return i; }); - b.push(function () { return j; }); - } - for (let k = 0; k < 5; ++k) { - assertEquals(5, a[k]()); - assertEquals(15, b[k]()); - } -} -closures2(); - - -// In a for-in statement the iteration variable is fresh -// for earch iteration. -function closures3(x) { - let a = []; - for (let p in x) { - a.push(function () { return p; }); - } - let k = 0; - for (let q in x) { - assertEquals(q, a[k]()); - ++k; - } -} -closures3({a : [0], b : 1, c : {v : 1}, get d() {}, set e(x) {}}); diff --git a/deps/v8/test/mjsunit/harmony/block-let-declaration.js b/deps/v8/test/mjsunit/harmony/block-let-declaration.js index a1acc28da..7f3264f25 100644 --- a/deps/v8/test/mjsunit/harmony/block-let-declaration.js +++ b/deps/v8/test/mjsunit/harmony/block-let-declaration.js @@ -32,18 +32,15 @@ // Global let x; let y = 2; -const z = 4; // Block local { let y; let x = 3; - const z = 5; } assertEquals(undefined, x); assertEquals(2,y); -assertEquals(4,z); if (true) { let y; @@ -61,7 +58,7 @@ function TestLocalDoesNotThrow(str) { assertDoesNotThrow("(function(){" + str + "})()"); } -// Test let declarations in statement positions. +// Test let declarations statement positions. TestLocalThrows("if (true) let x;", SyntaxError); TestLocalThrows("if (true) {} else let x;", SyntaxError); TestLocalThrows("do let x; while (false)", SyntaxError); @@ -71,32 +68,7 @@ TestLocalThrows("for (;false;) let x;", SyntaxError); TestLocalThrows("switch (true) { case true: let x; }", SyntaxError); TestLocalThrows("switch (true) { default: let x; }", SyntaxError); -// Test const declarations with initialisers in statement positions. -TestLocalThrows("if (true) const x = 1;", SyntaxError); -TestLocalThrows("if (true) {} else const x = 1;", SyntaxError); -TestLocalThrows("do const x = 1; while (false)", SyntaxError); -TestLocalThrows("while (false) const x = 1;", SyntaxError); -TestLocalThrows("label: const x = 1;", SyntaxError); -TestLocalThrows("for (;false;) const x = 1;", SyntaxError); -TestLocalThrows("switch (true) { case true: const x = 1; }", SyntaxError); -TestLocalThrows("switch (true) { default: const x = 1; }", SyntaxError); - -// Test const declarations without initialisers. -TestLocalThrows("const x;", SyntaxError); -TestLocalThrows("const x = 1, y;", SyntaxError); -TestLocalThrows("const x, y = 1;", SyntaxError); - -// Test const declarations without initialisers in statement positions. -TestLocalThrows("if (true) const x;", SyntaxError); -TestLocalThrows("if (true) {} else const x;", SyntaxError); -TestLocalThrows("do const x; while (false)", SyntaxError); -TestLocalThrows("while (false) const x;", SyntaxError); -TestLocalThrows("label: const x;", SyntaxError); -TestLocalThrows("for (;false;) const x;", SyntaxError); -TestLocalThrows("switch (true) { case true: const x; }", SyntaxError); -TestLocalThrows("switch (true) { default: const x; }", SyntaxError); - -// Test var declarations in statement positions. +// Test var declarations statement positions. TestLocalDoesNotThrow("if (true) var x;"); TestLocalDoesNotThrow("if (true) {} else var x;"); TestLocalDoesNotThrow("do var x; while (false)"); @@ -121,15 +93,24 @@ function f() { { function g1() { } } + // Non-strict statement positions. + if (true) function g2() { } + if (true) {} else function g3() { } + do function g4() { } while (false) + while (false) function g5() { } + label: function g6() { } + for (;false;) function g7() { } + switch (true) { case true: function g8() { } } + switch (true) { default: function g9() { } } } f(); // Test function declarations in statement position in strict mode. -TestLocalThrows("function f() { if (true) function g() {}", SyntaxError); -TestLocalThrows("function f() { if (true) {} else function g() {}", SyntaxError); -TestLocalThrows("function f() { do function g() {} while (false)", SyntaxError); -TestLocalThrows("function f() { while (false) function g() {}", SyntaxError); -TestLocalThrows("function f() { label: function g() {}", SyntaxError); -TestLocalThrows("function f() { for (;false;) function g() {}", SyntaxError); -TestLocalThrows("function f() { switch (true) { case true: function g() {} }", SyntaxError); -TestLocalThrows("function f() { switch (true) { default: function g() {} }", SyntaxError); +TestLocalThrows("function f() { 'use strict'; if (true) function g() {}", SyntaxError); +TestLocalThrows("function f() { 'use strict'; if (true) {} else function g() {}", SyntaxError); +TestLocalThrows("function f() { 'use strict'; do function g() {} while (false)", SyntaxError); +TestLocalThrows("function f() { 'use strict'; while (false) function g() {}", SyntaxError); +TestLocalThrows("function f() { 'use strict'; label: function g() {}", SyntaxError); +TestLocalThrows("function f() { 'use strict'; for (;false;) function g() {}", SyntaxError); +TestLocalThrows("function f() { 'use strict'; switch (true) { case true: function g() {} }", SyntaxError); +TestLocalThrows("function f() { 'use strict'; switch (true) { default: function g() {} }", SyntaxError); diff --git a/deps/v8/test/mjsunit/harmony/block-let-semantics.js b/deps/v8/test/mjsunit/harmony/block-let-semantics.js index f45b72ff0..94020a4ca 100644 --- a/deps/v8/test/mjsunit/harmony/block-let-semantics.js +++ b/deps/v8/test/mjsunit/harmony/block-let-semantics.js @@ -61,7 +61,6 @@ TestAll('let x = x + 1'); TestAll('let x = x += 1'); TestAll('let x = x++'); TestAll('let x = ++x'); -TestAll('const x = x + 1'); // Use before initialization in prior statement. TestAll('x + 1; let x;'); @@ -69,21 +68,18 @@ TestAll('x = 1; let x;'); TestAll('x += 1; let x;'); TestAll('++x; let x;'); TestAll('x++; let x;'); -TestAll('let y = x; const x = 1;'); TestAll('f(); let x; function f() { return x + 1; }'); TestAll('f(); let x; function f() { x = 1; }'); TestAll('f(); let x; function f() { x += 1; }'); TestAll('f(); let x; function f() { ++x; }'); TestAll('f(); let x; function f() { x++; }'); -TestAll('f(); const x = 1; function f() { return x; }'); TestAll('f()(); let x; function f() { return function() { return x + 1; } }'); TestAll('f()(); let x; function f() { return function() { x = 1; } }'); TestAll('f()(); let x; function f() { return function() { x += 1; } }'); TestAll('f()(); let x; function f() { return function() { ++x; } }'); TestAll('f()(); let x; function f() { return function() { x++; } }'); -TestAll('f()(); const x = 1; function f() { return function() { return x; } }'); // Use before initialization with a dynamic lookup. TestAll('eval("x + 1;"); let x;'); @@ -91,7 +87,6 @@ TestAll('eval("x = 1;"); let x;'); TestAll('eval("x += 1;"); let x;'); TestAll('eval("++x;"); let x;'); TestAll('eval("x++;"); let x;'); -TestAll('eval("x"); const x = 1;'); // Use before initialization with check for eval-shadowed bindings. TestAll('function f() { eval("var y = 2;"); x + 1; }; f(); let x;'); @@ -144,31 +139,10 @@ function f2() { function h() { return b + c; } - let c = 3; + let b = 3; } assertEquals(5, n()); - - { - o = i; - function i() { - return d; - } - let d = 4; - } - assertEquals(4, o()); - - try { - throw 5; - } catch(e) { - p = j; - function j() { - return e + f; - } - let f = 6; - } - assertEquals(11, p()); } -f2(); // Test that resolution of let bound variables works with scopes that call eval. function outer() { diff --git a/deps/v8/test/mjsunit/harmony/block-scoping.js b/deps/v8/test/mjsunit/harmony/block-scoping.js index 0d0526afa..c70b3b6ea 100644 --- a/deps/v8/test/mjsunit/harmony/block-scoping.js +++ b/deps/v8/test/mjsunit/harmony/block-scoping.js @@ -44,16 +44,12 @@ f1(); function f2(one) { var x = one + 1; let y = one + 2; - const u = one + 4; { let z = one + 3; - const v = one + 5; assertEquals(1, eval('one')); assertEquals(2, eval('x')); assertEquals(3, eval('y')); assertEquals(4, eval('z')); - assertEquals(5, eval('u')); - assertEquals(6, eval('v')); } } f2(1); @@ -63,17 +59,12 @@ f2(1); function f3(one) { var x = one + 1; let y = one + 2; - const u = one + 4; { let z = one + 3; - const v = one + 5; assertEquals(1, one); assertEquals(2, x); assertEquals(3, y); assertEquals(4, z); - assertEquals(5, u); - assertEquals(6, v); - } } f3(1); @@ -83,17 +74,13 @@ f3(1); function f4(one) { var x = one + 1; let y = one + 2; - const u = one + 4; { let z = one + 3; - const v = one + 5; function f() { assertEquals(1, eval('one')); assertEquals(2, eval('x')); assertEquals(3, eval('y')); assertEquals(4, eval('z')); - assertEquals(5, eval('u')); - assertEquals(6, eval('v')); }; } } @@ -104,17 +91,13 @@ f4(1); function f5(one) { var x = one + 1; let y = one + 2; - const u = one + 4; { let z = one + 3; - const v = one + 5; function f() { assertEquals(1, one); assertEquals(2, x); assertEquals(3, y); assertEquals(4, z); - assertEquals(5, u); - assertEquals(6, v); }; } } @@ -124,10 +107,8 @@ f5(1); // Return from block. function f6() { let x = 1; - const u = 3; { let y = 2; - const v = 4; return x + y; } } @@ -139,26 +120,13 @@ function f7(a) { let b = 1; var c = 1; var d = 1; - const e = 1; - { // let variables shadowing argument, let, const and var variables + { // let variables shadowing argument, let and var variables let a = 2; let b = 2; let c = 2; - let e = 2; - assertEquals(2,a); - assertEquals(2,b); - assertEquals(2,c); - assertEquals(2,e); - } - { // const variables shadowing argument, let, const and var variables - const a = 2; - const b = 2; - const c = 2; - const e = 2; assertEquals(2,a); assertEquals(2,b); assertEquals(2,c); - assertEquals(2,e); } try { throw 'stuff1'; @@ -188,12 +156,6 @@ function f7(a) { } catch (c) { // catch variable shadowing var variable assertEquals('stuff3',c); - { - // const variable shadowing catch variable - const c = 3; - assertEquals(3,c); - } - assertEquals('stuff3',c); try { throw 'stuff4'; } catch(c) { @@ -216,16 +178,14 @@ function f7(a) { c = 2; } assertEquals(1,c); - (function(a,b,c,e) { - // arguments shadowing argument, let, const and var variable + (function(a,b,c) { + // arguments shadowing argument, let and var variable a = 2; b = 2; c = 2; - e = 2; assertEquals(2,a); assertEquals(2,b); assertEquals(2,c); - assertEquals(2,e); // var variable shadowing var variable var d = 2; })(1,1); @@ -233,30 +193,24 @@ function f7(a) { assertEquals(1,b); assertEquals(1,c); assertEquals(1,d); - assertEquals(1,e); } f7(1); -// Ensure let and const variables are block local -// and var variables function local. +// Ensure let variables are block local and var variables function local. function f8() { var let_accessors = []; var var_accessors = []; - var const_accessors = []; for (var i = 0; i < 10; i++) { let x = i; var y = i; - const z = i; let_accessors[i] = function() { return x; } var_accessors[i] = function() { return y; } - const_accessors[i] = function() { return z; } } for (var j = 0; j < 10; j++) { y = j + 10; assertEquals(j, let_accessors[j]()); assertEquals(y, var_accessors[j]()); - assertEquals(j, const_accessors[j]()); } } f8(); diff --git a/deps/v8/test/mjsunit/harmony/collections.js b/deps/v8/test/mjsunit/harmony/collections.js deleted file mode 100644 index 1ad1c6f34..000000000 --- a/deps/v8/test/mjsunit/harmony/collections.js +++ /dev/null @@ -1,273 +0,0 @@ -// Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Flags: --harmony-collections --expose-gc - - -// Test valid getter and setter calls on Sets. -function TestValidSetCalls(m) { - assertDoesNotThrow(function () { m.add(new Object) }); - assertDoesNotThrow(function () { m.has(new Object) }); - assertDoesNotThrow(function () { m.delete(new Object) }); -} -TestValidSetCalls(new Set); - - -// Test valid getter and setter calls on Maps and WeakMaps -function TestValidMapCalls(m) { - assertDoesNotThrow(function () { m.get(new Object) }); - assertDoesNotThrow(function () { m.set(new Object) }); - assertDoesNotThrow(function () { m.has(new Object) }); - assertDoesNotThrow(function () { m.delete(new Object) }); -} -TestValidMapCalls(new Map); -TestValidMapCalls(new WeakMap); - - -// Test invalid getter and setter calls for WeakMap only -function TestInvalidCalls(m) { - assertThrows(function () { m.get(undefined) }, TypeError); - assertThrows(function () { m.set(undefined, 0) }, TypeError); - assertThrows(function () { m.get(0) }, TypeError); - assertThrows(function () { m.set(0, 0) }, TypeError); - assertThrows(function () { m.get('a-key') }, TypeError); - assertThrows(function () { m.set('a-key', 0) }, TypeError); -} -TestInvalidCalls(new WeakMap); - - -// Test expected behavior for Sets -function TestSet(set, key) { - assertFalse(set.has(key)); - set.add(key); - assertTrue(set.has(key)); - set.delete(key); - assertFalse(set.has(key)); -} -function TestSetBehavior(set) { - for (i = 0; i < 20; i++) { - TestSet(set, new Object); - } -} -TestSet(new Set, 23); -TestSet(new Set, 'foo'); -TestSetBehavior(new Set); - - -// Test expected mapping behavior for Maps and WeakMaps -function TestMapping(map, key, value) { - map.set(key, value); - assertSame(value, map.get(key)); -} -function TestMapBehavior1(m) { - TestMapping(m, new Object, 23); - TestMapping(m, new Object, 'the-value'); - TestMapping(m, new Object, new Object); -} -TestMapBehavior1(new Map); -TestMapBehavior1(new WeakMap); - - -// Test expected mapping behavior for Maps only -function TestMapBehavior2(m) { - for (var i = 0; i < 20; i++) { - TestMapping(m, i, new Object); - TestMapping(m, i / 10, new Object); - TestMapping(m, 'key-' + i, new Object); - } - var keys = [ +0, -0, +Infinity, -Infinity, true, false ]; - for (var i = 0; i < keys.length; i++) { - TestMapping(m, keys[i], new Object); - } -} -TestMapBehavior2(new Map); - - -// Test expected querying behavior of Maps and WeakMaps -function TestQuery(m) { - var key = new Object; - TestMapping(m, key, 'to-be-present'); - assertTrue(m.has(key)); - assertFalse(m.has(new Object)); - TestMapping(m, key, undefined); - assertFalse(m.has(key)); - assertFalse(m.has(new Object)); -} -TestQuery(new Map); -TestQuery(new WeakMap); - - -// Test expected deletion behavior of Maps and WeakMaps -function TestDelete(m) { - var key = new Object; - TestMapping(m, key, 'to-be-deleted'); - assertTrue(m.delete(key)); - assertFalse(m.delete(key)); - assertFalse(m.delete(new Object)); - assertSame(m.get(key), undefined); -} -TestDelete(new Map); -TestDelete(new WeakMap); - - -// Test GC of Maps and WeakMaps with entry -function TestGC1(m) { - var key = new Object; - m.set(key, 'not-collected'); - gc(); - assertSame('not-collected', m.get(key)); -} -TestGC1(new Map); -TestGC1(new WeakMap); - - -// Test GC of Maps and WeakMaps with chained entries -function TestGC2(m) { - var head = new Object; - for (key = head, i = 0; i < 10; i++, key = m.get(key)) { - m.set(key, new Object); - } - gc(); - var count = 0; - for (key = head; key != undefined; key = m.get(key)) { - count++; - } - assertEquals(11, count); -} -TestGC2(new Map); -TestGC2(new WeakMap); - - -// Test property attribute [[Enumerable]] -function TestEnumerable(func) { - function props(x) { - var array = []; - for (var p in x) array.push(p); - return array.sort(); - } - assertArrayEquals([], props(func)); - assertArrayEquals([], props(func.prototype)); - assertArrayEquals([], props(new func())); -} -TestEnumerable(Set); -TestEnumerable(Map); -TestEnumerable(WeakMap); - - -// Test arbitrary properties on Maps and WeakMaps -function TestArbitrary(m) { - function TestProperty(map, property, value) { - map[property] = value; - assertEquals(value, map[property]); - } - for (i = 0; i < 20; i++) { - TestProperty(m, i, 'val' + i); - TestProperty(m, 'foo' + i, 'bar' + i); - } - TestMapping(m, new Object, 'foobar'); -} -TestArbitrary(new Map); -TestArbitrary(new WeakMap); - - -// Test direct constructor call -assertTrue(Set() instanceof Set); -assertTrue(Map() instanceof Map); -assertTrue(WeakMap() instanceof WeakMap); - - -// Test whether NaN values as keys are treated correctly. -var s = new Set; -assertFalse(s.has(NaN)); -assertFalse(s.has(NaN + 1)); -assertFalse(s.has(23)); -s.add(NaN); -assertTrue(s.has(NaN)); -assertTrue(s.has(NaN + 1)); -assertFalse(s.has(23)); -var m = new Map; -assertFalse(m.has(NaN)); -assertFalse(m.has(NaN + 1)); -assertFalse(m.has(23)); -m.set(NaN, 'a-value'); -assertTrue(m.has(NaN)); -assertTrue(m.has(NaN + 1)); -assertFalse(m.has(23)); - - -// Test some common JavaScript idioms for Sets -var s = new Set; -assertTrue(s instanceof Set); -assertTrue(Set.prototype.add instanceof Function) -assertTrue(Set.prototype.has instanceof Function) -assertTrue(Set.prototype.delete instanceof Function) - - -// Test some common JavaScript idioms for Maps -var m = new Map; -assertTrue(m instanceof Map); -assertTrue(Map.prototype.set instanceof Function) -assertTrue(Map.prototype.get instanceof Function) -assertTrue(Map.prototype.has instanceof Function) -assertTrue(Map.prototype.delete instanceof Function) - - -// Test some common JavaScript idioms for WeakMaps -var m = new WeakMap; -assertTrue(m instanceof WeakMap); -assertTrue(WeakMap.prototype.set instanceof Function) -assertTrue(WeakMap.prototype.get instanceof Function) -assertTrue(WeakMap.prototype.has instanceof Function) -assertTrue(WeakMap.prototype.delete instanceof Function) - - -// Regression test for WeakMap prototype. -assertTrue(WeakMap.prototype.constructor === WeakMap) -assertTrue(Object.getPrototypeOf(WeakMap.prototype) === Object.prototype) - - -// Regression test for issue 1617: The prototype of the WeakMap constructor -// needs to be unique (i.e. different from the one of the Object constructor). -assertFalse(WeakMap.prototype === Object.prototype); -var o = Object.create({}); -assertFalse("get" in o); -assertFalse("set" in o); -assertEquals(undefined, o.get); -assertEquals(undefined, o.set); -var o = Object.create({}, { myValue: { - value: 10, - enumerable: false, - configurable: true, - writable: true -}}); -assertEquals(10, o.myValue); - - -// Stress Test -// There is a proposed stress-test available at the es-discuss mailing list -// which cannot be reasonably automated. Check it out by hand if you like: -// https://mail.mozilla.org/pipermail/es-discuss/2011-May/014096.html
\ No newline at end of file diff --git a/deps/v8/test/mjsunit/harmony/debug-blockscopes.js b/deps/v8/test/mjsunit/harmony/debug-blockscopes.js index 4c49d9a4b..020f52774 100644 --- a/deps/v8/test/mjsunit/harmony/debug-blockscopes.js +++ b/deps/v8/test/mjsunit/harmony/debug-blockscopes.js @@ -464,112 +464,3 @@ listener_delegate = function(exec_state) { }; closure_1(1)(); EndTest(); - - -// Simple for-in loop over the keys of an object. -BeginTest("For loop 1"); - -function for_loop_1() { - for (let x in {y:undefined}) { - debugger; - } -} - -listener_delegate = function(exec_state) { - CheckScopeChain([debug.ScopeType.Block, - debug.ScopeType.Local, - debug.ScopeType.Global], exec_state); - CheckScopeContent({x:'y'}, 0, exec_state); - // The function scope contains a temporary iteration variable. - CheckScopeContent({x:'y'}, 1, exec_state); -}; -for_loop_1(); -EndTest(); - - -// For-in loop over the keys of an object with a block scoped let variable -// shadowing the iteration variable. -BeginTest("For loop 2"); - -function for_loop_2() { - for (let x in {y:undefined}) { - let x = 3; - debugger; - } -} - -listener_delegate = function(exec_state) { - CheckScopeChain([debug.ScopeType.Block, - debug.ScopeType.Block, - debug.ScopeType.Local, - debug.ScopeType.Global], exec_state); - CheckScopeContent({x:3}, 0, exec_state); - CheckScopeContent({x:'y'}, 1, exec_state); - // The function scope contains a temporary iteration variable. - CheckScopeContent({x:'y'}, 2, exec_state); -}; -for_loop_2(); -EndTest(); - - -// Simple for loop. -BeginTest("For loop 3"); - -function for_loop_3() { - for (let x = 3; x < 4; ++x) { - debugger; - } -} - -listener_delegate = function(exec_state) { - CheckScopeChain([debug.ScopeType.Block, - debug.ScopeType.Local, - debug.ScopeType.Global], exec_state); - CheckScopeContent({x:3}, 0, exec_state); - CheckScopeContent({}, 1, exec_state); -}; -for_loop_3(); -EndTest(); - - -// For loop with a block scoped let variable shadowing the iteration variable. -BeginTest("For loop 4"); - -function for_loop_4() { - for (let x = 3; x < 4; ++x) { - let x = 5; - debugger; - } -} - -listener_delegate = function(exec_state) { - CheckScopeChain([debug.ScopeType.Block, - debug.ScopeType.Block, - debug.ScopeType.Local, - debug.ScopeType.Global], exec_state); - CheckScopeContent({x:5}, 0, exec_state); - CheckScopeContent({x:3}, 1, exec_state); - CheckScopeContent({}, 2, exec_state); -}; -for_loop_4(); -EndTest(); - - -// For loop with two variable declarations. -BeginTest("For loop 5"); - -function for_loop_5() { - for (let x = 3, y = 5; x < 4; ++x) { - debugger; - } -} - -listener_delegate = function(exec_state) { - CheckScopeChain([debug.ScopeType.Block, - debug.ScopeType.Local, - debug.ScopeType.Global], exec_state); - CheckScopeContent({x:3,y:5}, 0, exec_state); - CheckScopeContent({}, 1, exec_state); -}; -for_loop_5(); -EndTest(); diff --git a/deps/v8/test/mjsunit/harmony/proxies-for.js b/deps/v8/test/mjsunit/harmony/proxies-for.js deleted file mode 100644 index 3d419c6dc..000000000 --- a/deps/v8/test/mjsunit/harmony/proxies-for.js +++ /dev/null @@ -1,168 +0,0 @@ -// Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Flags: --harmony-proxies - - -// Helper. - -function TestWithProxies(test, x, y, z) { - test(Proxy.create, x, y, z) - test(function(h) {return Proxy.createFunction(h, function() {})}, x, y, z) -} - - -// Iterate over a proxy. - -function TestForIn(properties, handler) { - TestWithProxies(TestForIn2, properties, handler) -} - -function TestForIn2(create, properties, handler) { - var p = create(handler) - var found = [] - for (var x in p) found.push(x) - assertArrayEquals(properties, found) -} - -TestForIn(["0", "a"], { - enumerate: function() { return [0, "a"] } -}) - -TestForIn(["null", "a"], { - enumerate: function() { return this.enumerate2() }, - enumerate2: function() { return [null, "a"] } -}) - -TestForIn(["b", "d"], { - getPropertyNames: function() { return ["a", "b", "c", "d", "e"] }, - getPropertyDescriptor: function(k) { - switch (k) { - case "a": return {enumerable: false, value: "3"}; - case "b": return {enumerable: true, get get() {}}; - case "c": return {value: 4}; - case "d": return {get enumerable() { return true }}; - default: return undefined; - } - } -}) - -TestForIn(["b", "a", "0", "c"], Proxy.create({ - get: function(pr, pk) { - return function() { return ["b", "a", 0, "c"] } - } -})) - - - -// Iterate over an object with a proxy prototype. - -function TestForInDerived(properties, handler) { - TestWithProxies(TestForInDerived2, properties, handler) -} - -function TestForInDerived2(create, properties, handler) { - var p = create(handler) - var o = Object.create(p) - o.z = 0 - var found = [] - for (var x in o) found.push(x) - assertArrayEquals(["z"].concat(properties), found) - - var oo = Object.create(o) - oo.y = 0 - var found = [] - for (var x in oo) found.push(x) - assertArrayEquals(["y", "z"].concat(properties), found) -} - -TestForInDerived(["0", "a"], { - enumerate: function() { return [0, "a"] }, - getPropertyDescriptor: function(k) { - return k == "0" || k == "a" ? {} : undefined - } -}) - -TestForInDerived(["null", "a"], { - enumerate: function() { return this.enumerate2() }, - enumerate2: function() { return [null, "a"] }, - getPropertyDescriptor: function(k) { - return k == "null" || k == "a" ? {} : undefined - } -}) - -TestForInDerived(["b", "d"], { - getPropertyNames: function() { return ["a", "b", "c", "d", "e"] }, - getPropertyDescriptor: function(k) { - switch (k) { - case "a": return {enumerable: false, value: "3"}; - case "b": return {enumerable: true, get get() {}}; - case "c": return {value: 4}; - case "d": return {get enumerable() { return true }}; - default: return undefined; - } - } -}) - - - -// Throw exception in enumerate trap. - -function TestForInThrow(handler) { - TestWithProxies(TestForInThrow2, handler) -} - -function TestForInThrow2(create, handler) { - var p = create(handler) - var o = Object.create(p) - assertThrows(function(){ for (var x in p) {} }, "myexn") - assertThrows(function(){ for (var x in o) {} }, "myexn") -} - -TestForInThrow({ - enumerate: function() { throw "myexn" } -}) - -TestForInThrow({ - enumerate: function() { return this.enumerate2() }, - enumerate2: function() { throw "myexn" } -}) - -TestForInThrow({ - getPropertyNames: function() { throw "myexn" } -}) - -TestForInThrow({ - getPropertyNames: function() { return ["a"] }, - getPropertyDescriptor: function() { throw "myexn" } -}) - -TestForInThrow(Proxy.create({ - get: function(pr, pk) { - return function() { throw "myexn" } - } -})) diff --git a/deps/v8/test/mjsunit/harmony/proxies-function.js b/deps/v8/test/mjsunit/harmony/proxies-function.js index 6a88d19b3..541bca8cc 100644 --- a/deps/v8/test/mjsunit/harmony/proxies-function.js +++ b/deps/v8/test/mjsunit/harmony/proxies-function.js @@ -38,13 +38,6 @@ function CreateFrozen(handler, callTrap, constructTrap) { } -// Ensures that checking the "length" property of a function proxy doesn't -// crash due to lack of a [[Get]] method. -var handler = { - get : function(r, n) { return n == "length" ? 2 : undefined } -} - - // Calling (call, Function.prototype.call, Function.prototype.apply, // Function.prototype.bind). @@ -53,167 +46,81 @@ var receiver function TestCall(isStrict, callTrap) { assertEquals(42, callTrap(5, 37)) - assertEquals(isStrict ? undefined : global_object, receiver) - - var handler = { - get: function(r, k) { - return k == "length" ? 2 : Function.prototype[k] - } - } - var f = Proxy.createFunction(handler, callTrap) + // TODO(rossberg): unrelated bug: this does not succeed for optimized code: + // assertEquals(isStrict ? undefined : global_object, receiver) + var f = Proxy.createFunction({}, callTrap) receiver = 333 assertEquals(42, f(11, 31)) assertEquals(isStrict ? undefined : global_object, receiver) - var o = {f: f} - receiver = 333 - assertEquals(42, o.f(10, 32)) - assertSame(o, receiver) - receiver = 333 - assertEquals(42, o["f"](9, 33)) - assertSame(o, receiver) - receiver = 333 - assertEquals(42, (1, o).f(8, 34)) - assertSame(o, receiver) - receiver = 333 - assertEquals(42, (1, o)["f"](7, 35)) - assertSame(o, receiver) - receiver = 333 - assertEquals(42, f.call(o, 32, 10)) - assertSame(o, receiver) - receiver = 333 - assertEquals(42, f.call(null, 33, 9)) - assertSame(isStrict ? null : global_object, receiver) - receiver = 333 - assertEquals(44, f.call(2, 21, 23)) - assertSame(2, receiver.valueOf()) - receiver = 333 + var o = {} assertEquals(42, Function.prototype.call.call(f, o, 20, 22)) - assertSame(o, receiver) - receiver = 333 + assertEquals(o, receiver) assertEquals(43, Function.prototype.call.call(f, null, 20, 23)) - assertSame(isStrict ? null : global_object, receiver) + assertEquals(isStrict ? null : global_object, receiver) assertEquals(44, Function.prototype.call.call(f, 2, 21, 23)) assertEquals(2, receiver.valueOf()) receiver = 333 - assertEquals(32, f.apply(o, [16, 16])) - assertSame(o, receiver) - receiver = 333 assertEquals(32, Function.prototype.apply.call(f, o, [17, 15])) - assertSame(o, receiver) - + assertEquals(o, receiver) var ff = Function.prototype.bind.call(f, o, 12) - assertTrue(ff.length <= 1) // TODO(rossberg): Not spec'ed yet, be lax. receiver = 333 assertEquals(42, ff(30)) - assertSame(o, receiver) + assertEquals(o, receiver) receiver = 333 assertEquals(32, Function.prototype.apply.call(ff, {}, [20])) - assertSame(o, receiver) - - var fff = Function.prototype.bind.call(ff, o, 30) - assertEquals(0, fff.length) - receiver = 333 - assertEquals(42, fff()) - assertSame(o, receiver) - receiver = 333 - assertEquals(42, Function.prototype.call.call(fff, {})) - assertSame(o, receiver) + assertEquals(o, receiver) var f = CreateFrozen({}, callTrap) receiver = 333 assertEquals(42, f(11, 31)) - assertSame(isStrict ? undefined : global_object, receiver) - var o = {f: f} - receiver = 333 - assertEquals(42, o.f(10, 32)) - assertSame(o, receiver) - receiver = 333 - assertEquals(42, o["f"](9, 33)) - assertSame(o, receiver) - receiver = 333 - assertEquals(42, (1, o).f(8, 34)) - assertSame(o, receiver) - receiver = 333 - assertEquals(42, (1, o)["f"](7, 35)) - assertSame(o, receiver) + // TODO(rossberg): unrelated bug: this does not succeed for optimized code. + // assertEquals(isStrict ? undefined : global, receiver) receiver = 333 assertEquals(42, Function.prototype.call.call(f, o, 20, 22)) - assertSame(o, receiver) + assertEquals(o, receiver) receiver = 333 assertEquals(32, Function.prototype.apply.call(f, o, [17, 15])) - assertSame(o, receiver) + assertEquals(o, receiver) receiver = 333 assertEquals(42, ff(30)) - assertSame(o, receiver) + assertEquals(o, receiver) receiver = 333 assertEquals(32, Function.prototype.apply.call(ff, {}, [20])) - assertSame(o, receiver) + assertEquals(o, receiver) } TestCall(false, function(x, y) { - receiver = this - return x + y + receiver = this; return x + y }) TestCall(true, function(x, y) { - "use strict" - receiver = this - return x + y -}) - -TestCall(false, function() { - receiver = this; return arguments[0] + arguments[1] + "use strict"; + receiver = this; return x + y }) -TestCall(false, Proxy.createFunction(handler, function(x, y) { - receiver = this - return x + y +TestCall(false, Proxy.createFunction({}, function(x, y) { + receiver = this; return x + y })) -TestCall(true, Proxy.createFunction(handler, function(x, y) { - "use strict" - receiver = this - return x + y +TestCall(true, Proxy.createFunction({}, function(x, y) { + "use strict"; + receiver = this; return x + y })) -TestCall(false, CreateFrozen(handler, function(x, y) { - receiver = this - return x + y +TestCall(false, CreateFrozen({}, function(x, y) { + receiver = this; return x + y })) - -// Using intrinsics as call traps. - -function TestCallIntrinsic(type, callTrap) { - var f = Proxy.createFunction({}, callTrap) - var x = f() - assertTrue(typeof x == type) -} - -TestCallIntrinsic("boolean", Boolean) -TestCallIntrinsic("number", Number) -TestCallIntrinsic("string", String) -TestCallIntrinsic("object", Object) -TestCallIntrinsic("function", Function) - - - -// Throwing from call trap. - function TestCallThrow(callTrap) { var f = Proxy.createFunction({}, callTrap) assertThrows(function(){ f(11) }, "myexn") - assertThrows(function(){ ({x: f}).x(11) }, "myexn") - assertThrows(function(){ ({x: f})["x"](11) }, "myexn") assertThrows(function(){ Function.prototype.call.call(f, {}, 2) }, "myexn") assertThrows(function(){ Function.prototype.apply.call(f, {}, [1]) }, "myexn") var f = CreateFrozen({}, callTrap) assertThrows(function(){ f(11) }, "myexn") - assertThrows(function(){ ({x: f}).x(11) }, "myexn") - assertThrows(function(){ ({x: f})["x"](11) }, "myexn") assertThrows(function(){ Function.prototype.call.call(f, {}, 2) }, "myexn") assertThrows(function(){ Function.prototype.apply.call(f, {}, [1]) }, "myexn") } @@ -230,48 +137,24 @@ var prototype = {} var receiver var handlerWithPrototype = { - fix: function() { return { prototype: { value: prototype } }; }, - get: function(r, n) { - if (n == "length") return 2; - assertEquals("prototype", n); - return prototype; - } + fix: function() { return {prototype: prototype} }, + get: function(r, n) { assertEquals("prototype", n); return prototype } } var handlerSansPrototype = { - fix: function() { return { length: { value: 2 } } }, - get: function(r, n) { - if (n == "length") return 2; - assertEquals("prototype", n); - return undefined; - } -} - -function ReturnUndef(x, y) { - "use strict"; - receiver = this; - this.sum = x + y; -} - -function ReturnThis(x, y) { - "use strict"; - receiver = this; - this.sum = x + y; - return this; -} - -function ReturnNew(x, y) { - "use strict"; - receiver = this; - return {sum: x + y}; + fix: function() { return {} }, + get: function(r, n) { assertEquals("prototype", n); return undefined } } +function ReturnUndef(x, y) { "use strict"; receiver = this; this.sum = x + y } +function ReturnThis(x, y) { "use strict"; receiver = this; this.sum = x + y; return this } +function ReturnNew(x, y) { "use strict"; receiver = this; return {sum: x + y} } function ReturnNewWithProto(x, y) { "use strict"; receiver = this; - var result = Object.create(prototype); - result.sum = x + y; - return result; + var result = Object.create(prototype) + result.sum = x + y + return result } function TestConstruct(proto, constructTrap) { @@ -282,13 +165,15 @@ function TestConstruct(proto, constructTrap) { function TestConstruct2(proto, constructTrap, handler) { var f = Proxy.createFunction(handler, function() {}, constructTrap) var o = new f(11, 31) - assertEquals(undefined, receiver) + // TODO(rossberg): doesn't hold, due to unrelated bug. + // assertEquals(undefined, receiver) assertEquals(42, o.sum) assertSame(proto, Object.getPrototypeOf(o)) var f = CreateFrozen(handler, function() {}, constructTrap) var o = new f(11, 32) - assertEquals(undefined, receiver) + // TODO(rossberg): doesn't hold, due to unrelated bug. + // assertEquals(undefined, receiver) assertEquals(43, o.sum) assertSame(proto, Object.getPrototypeOf(o)) } @@ -296,16 +181,13 @@ function TestConstruct2(proto, constructTrap, handler) { TestConstruct(Object.prototype, ReturnNew) TestConstruct(prototype, ReturnNewWithProto) -TestConstruct(Object.prototype, Proxy.createFunction(handler, ReturnNew)) -TestConstruct(prototype, Proxy.createFunction(handler, ReturnNewWithProto)) - -TestConstruct(Object.prototype, CreateFrozen(handler, ReturnNew)) -TestConstruct(prototype, CreateFrozen(handler, ReturnNewWithProto)) +TestConstruct(Object.prototype, Proxy.createFunction({}, ReturnNew)) +TestConstruct(prototype, Proxy.createFunction({}, ReturnNewWithProto)) +TestConstruct(Object.prototype, CreateFrozen({}, ReturnNew)) +TestConstruct(prototype, CreateFrozen({}, ReturnNewWithProto)) -// Construction with derived construct trap. - function TestConstructFromCall(proto, returnsThis, callTrap) { TestConstructFromCall2(proto, returnsThis, callTrap, handlerWithPrototype) TestConstructFromCall2(proto, returnsThis, callTrap, handlerSansPrototype) @@ -330,14 +212,10 @@ TestConstructFromCall(Object.prototype, true, ReturnThis) TestConstructFromCall(Object.prototype, false, ReturnNew) TestConstructFromCall(prototype, false, ReturnNewWithProto) -TestConstructFromCall(Object.prototype, true, - Proxy.createFunction(handler, ReturnUndef)) -TestConstructFromCall(Object.prototype, true, - Proxy.createFunction(handler, ReturnThis)) -TestConstructFromCall(Object.prototype, false, - Proxy.createFunction(handler, ReturnNew)) -TestConstructFromCall(prototype, false, - Proxy.createFunction(handler, ReturnNewWithProto)) +TestConstructFromCall(Object.prototype, true, Proxy.createFunction({}, ReturnUndef)) +TestConstructFromCall(Object.prototype, true, Proxy.createFunction({}, ReturnThis)) +TestConstructFromCall(Object.prototype, false, Proxy.createFunction({}, ReturnNew)) +TestConstructFromCall(prototype, false, Proxy.createFunction({}, ReturnNewWithProto)) TestConstructFromCall(Object.prototype, true, CreateFrozen({}, ReturnUndef)) TestConstructFromCall(Object.prototype, true, CreateFrozen({}, ReturnThis)) @@ -354,44 +232,26 @@ TestConstructFromCall(prototype, true, ReturnThis) TestConstructFromCall(Object.prototype, false, ReturnNew) TestConstructFromCall(prototype, false, ReturnNewWithProto) -TestConstructFromCall(Object.prototype, true, - Proxy.createFunction(handler, ReturnUndef)) -TestConstructFromCall(Object.prototype, true, - Proxy.createFunction(handler, ReturnThis)) -TestConstructFromCall(Object.prototype, false, - Proxy.createFunction(handler, ReturnNew)) -TestConstructFromCall(prototype, false, - Proxy.createFunction(handler, ReturnNewWithProto)) - -TestConstructFromCall(prototype, true, - Proxy.createFunction(handlerWithPrototype, ReturnUndef)) -TestConstructFromCall(prototype, true, - Proxy.createFunction(handlerWithPrototype, ReturnThis)) -TestConstructFromCall(Object.prototype, false, - Proxy.createFunction(handlerWithPrototype, ReturnNew)) -TestConstructFromCall(prototype, false, - Proxy.createFunction(handlerWithPrototype, - ReturnNewWithProto)) - -TestConstructFromCall(prototype, true, - CreateFrozen(handlerWithPrototype, ReturnUndef)) -TestConstructFromCall(prototype, true, - CreateFrozen(handlerWithPrototype, ReturnThis)) -TestConstructFromCall(Object.prototype, false, - CreateFrozen(handlerWithPrototype, ReturnNew)) -TestConstructFromCall(prototype, false, - CreateFrozen(handlerWithPrototype, ReturnNewWithProto)) - - - -// Throwing from the construct trap. +TestConstructFromCall(Object.prototype, true, Proxy.createFunction({}, ReturnUndef)) +TestConstructFromCall(Object.prototype, true, Proxy.createFunction({}, ReturnThis)) +TestConstructFromCall(Object.prototype, false, Proxy.createFunction({}, ReturnNew)) +TestConstructFromCall(prototype, false, Proxy.createFunction({}, ReturnNewWithProto)) + +TestConstructFromCall(prototype, true, Proxy.createFunction(handlerWithPrototype, ReturnUndef)) +TestConstructFromCall(prototype, true, Proxy.createFunction(handlerWithPrototype, ReturnThis)) +TestConstructFromCall(Object.prototype, false, Proxy.createFunction(handlerWithPrototype, ReturnNew)) +TestConstructFromCall(prototype, false, Proxy.createFunction(handlerWithPrototype, ReturnNewWithProto)) + +TestConstructFromCall(prototype, true, CreateFrozen(handlerWithPrototype, ReturnUndef)) +TestConstructFromCall(prototype, true, CreateFrozen(handlerWithPrototype, ReturnThis)) +TestConstructFromCall(Object.prototype, false, CreateFrozen(handlerWithPrototype, ReturnNew)) +TestConstructFromCall(prototype, false, CreateFrozen(handlerWithPrototype, ReturnNewWithProto)) + function TestConstructThrow(trap) { - TestConstructThrow2(Proxy.createFunction({ fix: function() {return {};} }, - trap)) - TestConstructThrow2(Proxy.createFunction({ fix: function() {return {};} }, - function() {}, - trap)) + TestConstructThrow2(Proxy.createFunction({fix: function() {return {}}}, trap)) + TestConstructThrow2(Proxy.createFunction({fix: function() {return {}}}, + function() {}, trap)) } function TestConstructThrow2(f) { @@ -406,13 +266,13 @@ TestConstructThrow(CreateFrozen({}, function() { throw "myexn" })) -// Using function proxies as getters and setters. +// Getters and setters. var value var receiver function TestAccessorCall(getterCallTrap, setterCallTrap) { - var handler = { fix: function() { return {} } } + var handler = {fix: function() { return {} }} var pgetter = Proxy.createFunction(handler, getterCallTrap) var psetter = Proxy.createFunction(handler, setterCallTrap) diff --git a/deps/v8/test/mjsunit/harmony/proxies-hash.js b/deps/v8/test/mjsunit/harmony/proxies-hash.js index abfc0f5f0..2bf183013 100644 --- a/deps/v8/test/mjsunit/harmony/proxies-hash.js +++ b/deps/v8/test/mjsunit/harmony/proxies-hash.js @@ -25,98 +25,42 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// Flags: --harmony-proxies --harmony-collections +// Flags: --harmony-proxies --harmony-weakmaps // Helper. -function TestWithProxies(test, construct, handler) { - test(construct, handler, Proxy.create) - test(construct, handler, function(h) { - return Proxy.createFunction(h, function() {}) - }) +function TestWithProxies(test, handler) { + test(handler, Proxy.create) + test(handler, function(h) {return Proxy.createFunction(h, function() {})}) } -// Sets. +// Weak maps. -function TestSet(construct, fix) { - TestWithProxies(TestSet2, construct, fix) +function TestWeakMap(fix) { + TestWithProxies(TestWeakMap2, fix) } -function TestSet2(construct, fix, create) { +function TestWeakMap2(fix, create) { var handler = {fix: function() { return {} }} var p1 = create(handler) var p2 = create(handler) var p3 = create(handler) fix(p3) - var s = construct(); - s.add(p1); - s.add(p2); - assertTrue(s.has(p1)); - assertTrue(s.has(p2)); - assertFalse(s.has(p3)); - - fix(p1) - fix(p2) - assertTrue(s.has(p1)); - assertTrue(s.has(p2)); - assertFalse(s.has(p3)); - - s.delete(p2); - assertTrue(s.has(p1)); - assertFalse(s.has(p2)); - assertFalse(s.has(p3)); -} - -TestSet(Set, Object.seal) -TestSet(Set, Object.freeze) -TestSet(Set, Object.preventExtensions) - - -// Maps and weak maps. - -function TestMap(construct, fix) { - TestWithProxies(TestMap2, construct, fix) -} - -function TestMap2(construct, fix, create) { - var handler = {fix: function() { return {} }} - var p1 = create(handler) - var p2 = create(handler) - var p3 = create(handler) - fix(p3) - - var m = construct(); + var m = new WeakMap m.set(p1, 123); m.set(p2, 321); - assertTrue(m.has(p1)); - assertTrue(m.has(p2)); - assertFalse(m.has(p3)); assertSame(123, m.get(p1)); assertSame(321, m.get(p2)); fix(p1) fix(p2) - assertTrue(m.has(p1)); - assertTrue(m.has(p2)); - assertFalse(m.has(p3)); assertSame(123, m.get(p1)); assertSame(321, m.get(p2)); - - m.delete(p2); - assertTrue(m.has(p1)); - assertFalse(m.has(p2)); - assertFalse(m.has(p3)); - assertSame(123, m.get(p1)); - assertSame(undefined, m.get(p2)); } -TestMap(Map, Object.seal) -TestMap(Map, Object.freeze) -TestMap(Map, Object.preventExtensions) - -TestMap(WeakMap, Object.seal) -TestMap(WeakMap, Object.freeze) -TestMap(WeakMap, Object.preventExtensions) +TestWeakMap(Object.seal) +TestWeakMap(Object.freeze) +TestWeakMap(Object.preventExtensions) diff --git a/deps/v8/test/mjsunit/harmony/proxies.js b/deps/v8/test/mjsunit/harmony/proxies.js index 1ce7a32d8..ad8d86a5d 100644 --- a/deps/v8/test/mjsunit/harmony/proxies.js +++ b/deps/v8/test/mjsunit/harmony/proxies.js @@ -28,6 +28,9 @@ // Flags: --harmony-proxies +// TODO(rossberg): for-in not implemented on proxies. + + // Helper. function TestWithProxies(test, x, y, z) { @@ -135,10 +138,6 @@ function TestGet2(create, handler) { assertEquals("b", key) assertEquals(42, p[99]) assertEquals("99", key) - assertEquals(42, (function(n) { return p[n] })("c")) - assertEquals("c", key) - assertEquals(42, (function(n) { return p[n] })(101)) - assertEquals("101", key) var o = Object.create(p, {x: {value: 88}}) assertEquals(42, o.a) @@ -149,11 +148,6 @@ function TestGet2(create, handler) { assertEquals("99", key) assertEquals(88, o.x) assertEquals(88, o["x"]) - assertEquals(42, (function(n) { return o[n] })("c")) - assertEquals("c", key) - assertEquals(42, (function(n) { return o[n] })(101)) - assertEquals("101", key) - assertEquals(88, (function(n) { return o[n] })("x")) } TestGet({ @@ -207,10 +201,6 @@ function TestGetCall2(create, handler) { assertEquals(55, p[101].call(p)) assertEquals(55, p.withargs(45, 5)) assertEquals(55, p.withargs.call(p, 11, 22)) - assertEquals(55, (function(n) { return p[n]() })("f")) - assertEquals(55, (function(n) { return p[n].call(p) })("f")) - assertEquals(55, (function(n) { return p[n](15, 20) })("withargs")) - assertEquals(55, (function(n) { return p[n].call(p, 13, 21) })("withargs")) assertEquals("6655", "66" + p) // calls p.toString var o = Object.create(p, {g: {value: function(x) { return x + 88 }}}) @@ -226,13 +216,6 @@ function TestGetCall2(create, handler) { assertEquals(90, o.g(2)) assertEquals(91, o.g.call(o, 3)) assertEquals(92, o.g.call(p, 4)) - assertEquals(55, (function(n) { return o[n]() })("f")) - assertEquals(55, (function(n) { return o[n].call(o) })("f")) - assertEquals(55, (function(n) { return o[n](15, 20) })("withargs")) - assertEquals(55, (function(n) { return o[n].call(o, 13, 21) })("withargs")) - assertEquals(93, (function(n) { return o[n](5) })("g")) - assertEquals(94, (function(n) { return o[n].call(o, 6) })("g")) - assertEquals(95, (function(n) { return o[n].call(p, 7) })("g")) assertEquals("6655", "66" + o) // calls o.toString } @@ -299,15 +282,14 @@ function TestGetThrow2(create, handler) { assertThrows(function(){ p.a }, "myexn") assertThrows(function(){ p["b"] }, "myexn") assertThrows(function(){ p[3] }, "myexn") - assertThrows(function(){ (function(n) { p[n] })("c") }, "myexn") - assertThrows(function(){ (function(n) { p[n] })(99) }, "myexn") var o = Object.create(p, {x: {value: 88}, '4': {value: 89}}) assertThrows(function(){ o.a }, "myexn") assertThrows(function(){ o["b"] }, "myexn") assertThrows(function(){ o[3] }, "myexn") - assertThrows(function(){ (function(n) { o[n] })("c") }, "myexn") - assertThrows(function(){ (function(n) { o[n] })(99) }, "myexn") + assertEquals(88, o.x) + assertEquals(88, o["x"]) + assertEquals(89, o[4]) } TestGetThrow({ @@ -371,13 +353,6 @@ function TestSet2(create, handler) { assertEquals(44, p[77] = 44) assertEquals("77", key) assertEquals(44, val) - - assertEquals(45, (function(n) { return p[n] = 45 })("c")) - assertEquals("c", key) - assertEquals(45, val) - assertEquals(46, (function(n) { return p[n] = 46 })(99)) - assertEquals("99", key) - assertEquals(46, val) } TestSet({ @@ -459,8 +434,6 @@ function TestSetThrow2(create, handler) { assertThrows(function(){ p.a = 42 }, "myexn") assertThrows(function(){ p["b"] = 42 }, "myexn") assertThrows(function(){ p[22] = 42 }, "myexn") - assertThrows(function(){ (function(n) { p[n] = 45 })("c") }, "myexn") - assertThrows(function(){ (function(n) { p[n] = 46 })(99) }, "myexn") } TestSetThrow({ @@ -746,17 +719,17 @@ function TestDefine2(create, handler) { assertEquals("zzz", key) assertEquals(0, Object.getOwnPropertyNames(desc).length) - var d = create({ - get: function(r, k) { return (k === "value") ? 77 : void 0 }, - getOwnPropertyNames: function() { return ["value"] }, - enumerate: function() { return ["value"] } - }) - assertEquals(1, Object.getOwnPropertyNames(d).length) - assertEquals(77, d.value) - assertEquals(p, Object.defineProperty(p, "p", d)) - assertEquals("p", key) - assertEquals(1, Object.getOwnPropertyNames(desc).length) - assertEquals(77, desc.value) +// TODO(rossberg): This test requires for-in on proxies. +// var d = create({ +// get: function(r, k) { return (k === "value") ? 77 : void 0 }, +// getOwnPropertyNames: function() { return ["value"] } +// }) +// assertEquals(1, Object.getOwnPropertyNames(d).length) +// assertEquals(77, d.value) +// assertEquals(p, Object.defineProperty(p, "p", d)) +// assertEquals("p", key) +// assertEquals(1, Object.getOwnPropertyNames(desc).length) +// assertEquals(77, desc.value) var props = { '11': {}, @@ -801,16 +774,17 @@ function TestDefineThrow2(create, handler) { assertThrows(function(){ Object.defineProperty(p, "a", {value: 44})}, "myexn") assertThrows(function(){ Object.defineProperty(p, 0, {value: 44})}, "myexn") - var d1 = create({ - get: function(r, k) { throw "myexn" }, - getOwnPropertyNames: function() { return ["value"] } - }) - assertThrows(function(){ Object.defineProperty(p, "p", d1) }, "myexn") - var d2 = create({ - get: function(r, k) { return 77 }, - getOwnPropertyNames: function() { throw "myexn" } - }) - assertThrows(function(){ Object.defineProperty(p, "p", d2) }, "myexn") +// TODO(rossberg): These tests require for-in on proxies. +// var d1 = create({ +// get: function(r, k) { throw "myexn" }, +// getOwnPropertyNames: function() { return ["value"] } +// }) +// assertThrows(function(){ Object.defineProperty(p, "p", d1) }, "myexn") +// var d2 = create({ +// get: function(r, k) { return 77 }, +// getOwnPropertyNames: function() { throw "myexn" } +// }) +// assertThrows(function(){ Object.defineProperty(p, "p", d2) }, "myexn") var props = {bla: {get value() { throw "otherexn" }}} assertThrows(function(){ Object.defineProperties(p, props) }, "otherexn") @@ -1494,7 +1468,7 @@ function TestPrototype() { var p1 = Proxy.create({}) var p2 = Proxy.create({}, o1) var p3 = Proxy.create({}, p2) - var p4 = Proxy.create({}, null) + var p4 = Proxy.create({}, 666) var o2 = Object.create(p3) assertSame(Object.getPrototypeOf(o1), Object.prototype) @@ -1632,9 +1606,7 @@ TestKeys(["[object Object]"], { TestKeys(["a", "0"], { getOwnPropertyNames: function() { return ["a", 23, "zz", "", 0] }, - getOwnPropertyDescriptor: function(k) { - return k == "" ? undefined : {enumerable: k.length == 1} - } + getOwnPropertyDescriptor: function(k) { return {enumerable: k.length == 1} } }) TestKeys(["23", "zz", ""], { @@ -1648,12 +1620,10 @@ TestKeys(["23", "zz", ""], { TestKeys(["a", "b", "c", "5"], { get getOwnPropertyNames() { - return function() { return ["0", 4, "a", "b", "c", 5, "ety"] } + return function() { return ["0", 4, "a", "b", "c", 5] } }, get getOwnPropertyDescriptor() { - return function(k) { - return k == "ety" ? undefined : {enumerable: k >= "44"} - } + return function(k) { return {enumerable: k >= "44"} } } }) diff --git a/deps/v8/test/mjsunit/harmony/weakmaps.js b/deps/v8/test/mjsunit/harmony/weakmaps.js new file mode 100644 index 000000000..7b5dcaf0c --- /dev/null +++ b/deps/v8/test/mjsunit/harmony/weakmaps.js @@ -0,0 +1,167 @@ +// Copyright 2011 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Flags: --harmony-weakmaps --expose-gc + + +// Test valid getter and setter calls +var m = new WeakMap; +assertDoesNotThrow(function () { m.get(new Object) }); +assertDoesNotThrow(function () { m.set(new Object) }); +assertDoesNotThrow(function () { m.has(new Object) }); +assertDoesNotThrow(function () { m.delete(new Object) }); + + +// Test invalid getter and setter calls +var m = new WeakMap; +assertThrows(function () { m.get(undefined) }, TypeError); +assertThrows(function () { m.set(undefined, 0) }, TypeError); +assertThrows(function () { m.get(0) }, TypeError); +assertThrows(function () { m.set(0, 0) }, TypeError); +assertThrows(function () { m.get('a-key') }, TypeError); +assertThrows(function () { m.set('a-key', 0) }, TypeError); + + +// Test expected mapping behavior +var m = new WeakMap; +function TestMapping(map, key, value) { + map.set(key, value); + assertSame(value, map.get(key)); +} +TestMapping(m, new Object, 23); +TestMapping(m, new Object, 'the-value'); +TestMapping(m, new Object, new Object); + + +// Test expected querying behavior +var m = new WeakMap; +var key = new Object; +TestMapping(m, key, 'to-be-present'); +assertTrue(m.has(key)); +assertFalse(m.has(new Object)); +TestMapping(m, key, undefined); +assertFalse(m.has(key)); +assertFalse(m.has(new Object)); + + +// Test expected deletion behavior +var m = new WeakMap; +var key = new Object; +TestMapping(m, key, 'to-be-deleted'); +assertTrue(m.delete(key)); +assertFalse(m.delete(key)); +assertFalse(m.delete(new Object)); +assertSame(m.get(key), undefined); + + +// Test GC of map with entry +var m = new WeakMap; +var key = new Object; +m.set(key, 'not-collected'); +gc(); +assertSame('not-collected', m.get(key)); + + +// Test GC of map with chained entries +var m = new WeakMap; +var head = new Object; +for (key = head, i = 0; i < 10; i++, key = m.get(key)) { + m.set(key, new Object); +} +gc(); +var count = 0; +for (key = head; key != undefined; key = m.get(key)) { + count++; +} +assertEquals(11, count); + + +// Test property attribute [[Enumerable]] +var m = new WeakMap; +function props(x) { + var array = []; + for (var p in x) array.push(p); + return array.sort(); +} +assertArrayEquals([], props(WeakMap)); +assertArrayEquals([], props(WeakMap.prototype)); +assertArrayEquals([], props(m)); + + +// Test arbitrary properties on weak maps +var m = new WeakMap; +function TestProperty(map, property, value) { + map[property] = value; + assertEquals(value, map[property]); +} +for (i = 0; i < 20; i++) { + TestProperty(m, i, 'val' + i); + TestProperty(m, 'foo' + i, 'bar' + i); +} +TestMapping(m, new Object, 'foobar'); + + +// Test direct constructor call +var m = WeakMap(); +assertTrue(m instanceof WeakMap); + + +// Test some common JavaScript idioms +var m = new WeakMap; +assertTrue(m instanceof WeakMap); +assertTrue(WeakMap.prototype.set instanceof Function) +assertTrue(WeakMap.prototype.get instanceof Function) +assertTrue(WeakMap.prototype.has instanceof Function) +assertTrue(WeakMap.prototype.delete instanceof Function) + + +// Regression test for WeakMap prototype. +assertTrue(WeakMap.prototype.constructor === WeakMap) +assertTrue(Object.getPrototypeOf(WeakMap.prototype) === Object.prototype) + + +// Regression test for issue 1617: The prototype of the WeakMap constructor +// needs to be unique (i.e. different from the one of the Object constructor). +assertFalse(WeakMap.prototype === Object.prototype); +var o = Object.create({}); +assertFalse("get" in o); +assertFalse("set" in o); +assertEquals(undefined, o.get); +assertEquals(undefined, o.set); +var o = Object.create({}, { myValue: { + value: 10, + enumerable: false, + configurable: true, + writable: true +}}); +assertEquals(10, o.myValue); + + +// Stress Test +// There is a proposed stress-test available at the es-discuss mailing list +// which cannot be reasonably automated. Check it out by hand if you like: +// https://mail.mozilla.org/pipermail/es-discuss/2011-May/014096.html diff --git a/deps/v8/test/mjsunit/mjsunit.status b/deps/v8/test/mjsunit/mjsunit.status index 8a1b68bfb..941e0e8cc 100644 --- a/deps/v8/test/mjsunit/mjsunit.status +++ b/deps/v8/test/mjsunit/mjsunit.status @@ -65,20 +65,6 @@ regress/regress-524: (PASS || TIMEOUT), SKIP if $mode == debug debug-liveedit-check-stack: SKIP debug-liveedit-patch-positions-replace: SKIP -# Liveedit messes with the frame hights - see bug 1791 -debug-liveedit-1: SKIP -debug-liveedit-2: SKIP -debug-liveedit-3: SKIP -debug-liveedit-breakpoints: SKIP -debug-liveedit-check-stack: SKIP -debug-liveedit-diff: SKIP -debug-liveedit-newsource: SKIP -debug-liveedit-patch-positions: SKIP -debug-liveedit-patch-positions-replace: SKIP -debug-liveedit-utils: SKIP - - - ############################################################################## [ $arch == arm ] diff --git a/deps/v8/test/mjsunit/object-define-properties.js b/deps/v8/test/mjsunit/object-define-properties.js index 6d5032e04..128df694d 100644 --- a/deps/v8/test/mjsunit/object-define-properties.js +++ b/deps/v8/test/mjsunit/object-define-properties.js @@ -54,19 +54,3 @@ var x = Object.defineProperties(obj, desc); assertEquals(x.foo, 10); assertEquals(x.bar, 42); - - -// Make sure that all property descriptors are calculated before any -// modifications are done. - -var object = {}; - -assertThrows(function() { - Object.defineProperties(object, { - foo: { value: 1 }, - bar: { value: 2, get: function() { return 3; } } - }); - }, TypeError); - -assertEquals(undefined, object.foo); -assertEquals(undefined, object.bar); diff --git a/deps/v8/test/mjsunit/optimized-typeof.js b/deps/v8/test/mjsunit/optimized-typeof.js deleted file mode 100644 index b0c0725c5..000000000 --- a/deps/v8/test/mjsunit/optimized-typeof.js +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Flags: --allow-natives-syntax - -function typeofDirectly() { - return typeof({}) === "undefined"; -} - -typeofDirectly(); -typeofDirectly(); -%OptimizeFunctionOnNextCall(typeofDirectly); -typeofDirectly(); - -function typeofViaVariable() { - var foo = typeof({}) - return foo === "undefined"; -} - -typeofViaVariable(); -typeofViaVariable(); -%OptimizeFunctionOnNextCall(typeofViaVariable); -typeofViaVariable(); diff --git a/deps/v8/test/mjsunit/regexp-static.js b/deps/v8/test/mjsunit/regexp-static.js index 8f283f6ce..0f849687c 100644 --- a/deps/v8/test/mjsunit/regexp-static.js +++ b/deps/v8/test/mjsunit/regexp-static.js @@ -25,6 +25,18 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Test that we throw exceptions when calling test and exec with no +// input. This is not part of the spec, but we do it for +// compatibility with JSC. +assertThrows("/a/.test()"); +assertThrows("/a/.exec()"); + +// Test that we do not throw exceptions once the static RegExp.input +// field has been set. +RegExp.input = "a"; +assertDoesNotThrow("/a/.test()"); +assertDoesNotThrow("/a/.exec()"); + // Test the (deprecated as of JS 1.5) properties of the RegExp function. var re = /((\d+)\.(\d+))/; var s = 'abc123.456def'; @@ -154,8 +166,3 @@ assertTrue(typeof RegExp.input == typeof String(), "RegExp.input coerces values var foo = "lsdfj sldkfj sdklfj læsdfjl sdkfjlsdk fjsdl fjsdljskdj flsj flsdkj flskd regexp: /foobar/\nldkfj sdlkfj sdkl"; assertTrue(/^([a-z]+): (.*)/.test(foo.substring(foo.indexOf("regexp:"))), "regexp: setup"); assertEquals("regexp", RegExp.$1, "RegExp.$1"); - - -// Check that calling with no argument is the same as calling with undefined. -assertTrue(/^undefined$/.test()); -assertEquals(["undefined"], /^undefined$/.exec()); diff --git a/deps/v8/test/mjsunit/regress/regress-100409.js b/deps/v8/test/mjsunit/regress/regress-100409.js deleted file mode 100644 index c29250f28..000000000 --- a/deps/v8/test/mjsunit/regress/regress-100409.js +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Flags: --allow-natives-syntax - -function outer () { - var val = 0; - - function foo () { - val = 0; - val; - var z = false; - var y = true; - if (!z) { - while (z = !z) { - if (y) val++; - } - } - return val++; - } - - return foo; -} - - -var foo = outer(); - -assertEquals(1, foo()); -assertEquals(1, foo()); - %OptimizeFunctionOnNextCall(foo); -assertEquals(1, foo()); diff --git a/deps/v8/test/mjsunit/regress/regress-100702.js b/deps/v8/test/mjsunit/regress/regress-100702.js deleted file mode 100644 index 46494ab71..000000000 --- a/deps/v8/test/mjsunit/regress/regress-100702.js +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Regression test for correct handling of non-object receiver values -// passed to built-in array functions. - -String.prototype.isThatMe = function () { - assertFalse(this === str); -}; - -var str = "abc"; -str.isThatMe(); -str.isThatMe.call(str); - -var arr = [1]; -arr.forEach("".isThatMe, str); -arr.filter("".isThatMe, str); -arr.some("".isThatMe, str); -arr.every("".isThatMe, str); -arr.map("".isThatMe, str); diff --git a/deps/v8/test/mjsunit/regress/regress-1229.js b/deps/v8/test/mjsunit/regress/regress-1229.js index c0dcba912..e16d278b3 100644 --- a/deps/v8/test/mjsunit/regress/regress-1229.js +++ b/deps/v8/test/mjsunit/regress/regress-1229.js @@ -35,10 +35,10 @@ function foo(x, y, z) { assertEquals(3, z); } -var foob = foo.bind({}, 1); +var bound_arg = [1]; function f(y, z) { - return %NewObjectFromBound(foob); + return %NewObjectFromBound(foo, bound_arg); } // Check that %NewObjectFromBound looks at correct frame for inlined function. diff --git a/deps/v8/test/mjsunit/regress/regress-1521.js b/deps/v8/test/mjsunit/regress/regress-1521.js index 3149f05a5..415db6780 100644 --- a/deps/v8/test/mjsunit/regress/regress-1521.js +++ b/deps/v8/test/mjsunit/regress/regress-1521.js @@ -24,8 +24,6 @@ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// -// Flags: --allow-natives-syntax // Optimized variable access inside through a catch context should work. function test(x) { @@ -46,3 +44,4 @@ function test(x) { } test(3); + diff --git a/deps/v8/test/mjsunit/stack-traces-2.js b/deps/v8/test/mjsunit/stack-traces-2.js deleted file mode 100644 index 165c4dfce..000000000 --- a/deps/v8/test/mjsunit/stack-traces-2.js +++ /dev/null @@ -1,87 +0,0 @@ -// Copyright 2011 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Flags: --builtins-in-stack-traces - - -// Poisonous object that throws a reference error if attempted converted to -// a primitive values. -var thrower = { valueOf: function() { FAIL; }, - toString: function() { FAIL; } }; - -// Tests that a native constructor function is included in the -// stack trace. -function testTraceNativeConstructor(nativeFunc) { - var nativeFuncName = nativeFunc.name; - try { - new nativeFunc(thrower); - assertUnreachable(nativeFuncName); - } catch (e) { - assertTrue(e.stack.indexOf(nativeFuncName) >= 0, nativeFuncName); - } -} - -// Tests that a native conversion function is included in the -// stack trace. -function testTraceNativeConversion(nativeFunc) { - var nativeFuncName = nativeFunc.name; - try { - nativeFunc(thrower); - assertUnreachable(nativeFuncName); - } catch (e) { - assertTrue(e.stack.indexOf(nativeFuncName) >= 0, nativeFuncName); - } -} - - -function testNotOmittedBuiltin(throwing, included) { - try { - throwing(); - assertUnreachable(included); - } catch (e) { - assertTrue(e.stack.indexOf(included) >= 0, included); - } -} - - -testTraceNativeConversion(String); // Does ToString on argument. -testTraceNativeConversion(Number); // Does ToNumber on argument. -testTraceNativeConversion(RegExp); // Does ToString on argument. - -testTraceNativeConstructor(String); // Does ToString on argument. -testTraceNativeConstructor(Number); // Does ToNumber on argument. -testTraceNativeConstructor(RegExp); // Does ToString on argument. -testTraceNativeConstructor(Date); // Does ToNumber on argument. - -// QuickSort has builtins object as receiver, and is non-native -// builtin. Should not be omitted with the --builtins-in-stack-traces flag. -testNotOmittedBuiltin(function(){ [thrower, 2].sort(function (a,b) { - (b < a) - (a < b); }); - }, "QuickSort"); - -// Not omitted even though ADD from runtime.js is a non-native builtin. -testNotOmittedBuiltin(function(){ thrower + 2; }, "ADD");
\ No newline at end of file diff --git a/deps/v8/test/mjsunit/stack-traces.js b/deps/v8/test/mjsunit/stack-traces.js index 536e71bbb..47a5cc594 100644 --- a/deps/v8/test/mjsunit/stack-traces.js +++ b/deps/v8/test/mjsunit/stack-traces.js @@ -194,46 +194,6 @@ function testErrorsDuringFormatting() { } -// Poisonous object that throws a reference error if attempted converted to -// a primitive values. -var thrower = { valueOf: function() { FAIL; }, - toString: function() { FAIL; } }; - -// Tests that a native constructor function is included in the -// stack trace. -function testTraceNativeConstructor(nativeFunc) { - var nativeFuncName = nativeFunc.name; - try { - new nativeFunc(thrower); - assertUnreachable(nativeFuncName); - } catch (e) { - assertTrue(e.stack.indexOf(nativeFuncName) >= 0, nativeFuncName); - } -} - -// Tests that a native conversion function is included in the -// stack trace. -function testTraceNativeConversion(nativeFunc) { - var nativeFuncName = nativeFunc.name; - try { - nativeFunc(thrower); - assertUnreachable(nativeFuncName); - } catch (e) { - assertTrue(e.stack.indexOf(nativeFuncName) >= 0, nativeFuncName); - } -} - - -function testOmittedBuiltin(throwing, omitted) { - try { - throwing(); - assertUnreachable(omitted); - } catch (e) { - assertTrue(e.stack.indexOf(omitted) < 0, omitted); - } -} - - testTrace("testArrayNative", testArrayNative, ["Array.map (native)"]); testTrace("testNested", testNested, ["at one", "at two", "at three"]); testTrace("testMethodNameInference", testMethodNameInference, ["at Foo.bar"]); @@ -257,21 +217,3 @@ testTrace("testStrippedCustomError", testStrippedCustomError, ["hep-hey"], testCallerCensorship(); testUnintendedCallerCensorship(); testErrorsDuringFormatting(); - -testTraceNativeConversion(String); // Does ToString on argument. -testTraceNativeConversion(Number); // Does ToNumber on argument. -testTraceNativeConversion(RegExp); // Does ToString on argument. - -testTraceNativeConstructor(String); // Does ToString on argument. -testTraceNativeConstructor(Number); // Does ToNumber on argument. -testTraceNativeConstructor(RegExp); // Does ToString on argument. -testTraceNativeConstructor(Date); // Does ToNumber on argument. - -// Omitted because QuickSort has builtins object as receiver, and is non-native -// builtin. -testOmittedBuiltin(function(){ [thrower, 2].sort(function (a,b) { - (b < a) - (a < b); }); - }, "QuickSort"); - -// Omitted because ADD from runtime.js is non-native builtin. -testOmittedBuiltin(function(){ thrower + 2; }, "ADD");
\ No newline at end of file diff --git a/deps/v8/test/mjsunit/strict-mode.js b/deps/v8/test/mjsunit/strict-mode.js index 9c9bdfd52..30234ba6f 100644 --- a/deps/v8/test/mjsunit/strict-mode.js +++ b/deps/v8/test/mjsunit/strict-mode.js @@ -1051,20 +1051,14 @@ function CheckPillDescriptor(func, name) { } assertThrows(function() { strict.caller; }, TypeError); assertThrows(function() { strict.arguments; }, TypeError); - assertThrows(function() { strict.caller = 42; }, TypeError); - assertThrows(function() { strict.arguments = 42; }, TypeError); var another = new Function("'use strict'"); assertThrows(function() { another.caller; }, TypeError); assertThrows(function() { another.arguments; }, TypeError); - assertThrows(function() { another.caller = 42; }, TypeError); - assertThrows(function() { another.arguments = 42; }, TypeError); var third = (function() { "use strict"; return function() {}; })(); assertThrows(function() { third.caller; }, TypeError); assertThrows(function() { third.arguments; }, TypeError); - assertThrows(function() { third.caller = 42; }, TypeError); - assertThrows(function() { third.arguments = 42; }, TypeError); CheckPillDescriptor(strict, "caller"); CheckPillDescriptor(strict, "arguments"); diff --git a/deps/v8/test/mjsunit/to_number_order.js b/deps/v8/test/mjsunit/to_number_order.js index 50e4bc762..d17e60005 100644 --- a/deps/v8/test/mjsunit/to_number_order.js +++ b/deps/v8/test/mjsunit/to_number_order.js @@ -161,7 +161,7 @@ assertEquals("fiskfisk", x, "Compare objects b >= b valueOf order"); x = ""; assertFalse(a > b, "Compare objects a > b"); -assertEquals("hestfisk", x, "Compare objects a > b valueOf order"); +assertEquals("fiskhest", x, "Compare objects a > b valueOf order"); x = ""; assertFalse(a > void(0), "Compare objects a > undefined"); @@ -195,7 +195,7 @@ function identical_object_comparison() { x = ""; assertFalse(a > b, "Compare objects a > b"); - assertEquals("hestfisk", x, "Compare objects a > b valueOf order"); + assertEquals("fiskhest", x, "Compare objects a > b valueOf order"); x = ""; assertFalse(a > void(0), "Compare objects a > undefined"); diff --git a/deps/v8/test/mozilla/mozilla.status b/deps/v8/test/mozilla/mozilla.status index e31a630b8..6a5c08640 100644 --- a/deps/v8/test/mozilla/mozilla.status +++ b/deps/v8/test/mozilla/mozilla.status @@ -300,11 +300,6 @@ js1_2/regexp/RegExp_multiline_as_array: FAIL_OK js1_2/regexp/beginLine: FAIL_OK js1_2/regexp/endLine: FAIL_OK -# We no longer let calls to test and exec with no argument implicitly -# use the previous input. -js1_2/regexp/RegExp_input: FAIL_OK -js1_2/regexp/RegExp_input_as_array: FAIL_OK - # To be compatible with safari typeof a regexp yields 'function'; # in firefox it yields 'object'. @@ -415,6 +410,12 @@ js1_5/extensions/regress-435345-01: FAIL_OK js1_5/extensions/regress-455413: FAIL_OK +# The spec specifies reverse evaluation order for < and >=. +# See section 11.8.2 and 11.8.5. +# We implement the spec here but the test tests the more straigtforward order. +ecma_3/Operators/order-01: FAIL_OK + + # Uses Mozilla-specific QName, XML, XMLList and Iterator. js1_5/Regress/regress-407323: FAIL_OK js1_5/Regress/regress-407957: FAIL_OK diff --git a/deps/v8/test/sputnik/sputnik.status b/deps/v8/test/sputnik/sputnik.status index 135540e7c..99db598af 100644 --- a/deps/v8/test/sputnik/sputnik.status +++ b/deps/v8/test/sputnik/sputnik.status @@ -30,6 +30,10 @@ def FAIL_OK = FAIL, OKAY ############################### BUGS ################################### +# A bound function should fail on access to 'caller' and 'arguments'. +S15.3.4.5_A1: FAIL +S15.3.4.5_A2: FAIL + # '__proto__' should be treated as a normal property in JSON. S15.12.2_A1: FAIL @@ -42,6 +46,12 @@ S15.8.2.16_A7: PASS || FAIL_OK S15.8.2.18_A7: PASS || FAIL_OK S15.8.2.13_A23: PASS || FAIL_OK +# We allow calls to regexp exec() with no arguments to fail for +# compatibility reasons. +S15.10.6.2_A1_T16: FAIL_OK +S15.10.6.2_A12: FAIL_OK +S15.10.6.3_A1_T16: FAIL_OK + # Sputnik tests (r97) assume RegExp.prototype is an Object, not a RegExp. S15.10.6_A2: FAIL_OK @@ -152,10 +162,6 @@ S11.1.5_A4.2: FAIL_OK S9.9_A1: FAIL_OK S9.9_A2: FAIL_OK -# The expected evaluation order of comparison operations changed. -S11.8.2_A2.3_T1: FAIL_OK -S11.8.3_A2.3_T1: FAIL_OK - # Calls builtins without an explicit receiver which means that # undefined is passed to the builtin. The tests expect the global # object to be passed which was true in ES3 but not in ES5. @@ -181,10 +187,6 @@ S15.1.1.2_A2_T1: FAIL_OK # Infinity S15.1.1.3_A2_T1: FAIL_OK # undefined S15.1.1.3_A2_T2: FAIL_OK # undefined -# Function.prototype.apply can handle arbitrary object as argument list. -S15.3.4.3_A6_T1: FAIL_OK -S15.3.4.3_A6_T4: FAIL_OK - # Array.prototype.to[Locale]String is generic in ES5. S15.4.4.2_A2_T1: FAIL_OK S15.4.4.3_A2_T1: FAIL_OK diff --git a/deps/v8/test/test262/test262.status b/deps/v8/test/test262/test262.status index 3eefbd746..1a619547d 100644 --- a/deps/v8/test/test262/test262.status +++ b/deps/v8/test/test262/test262.status @@ -30,6 +30,10 @@ def FAIL_OK = FAIL, OKAY ############################### BUGS ################################### +# A bound function should fail on access to 'caller' and 'arguments'. +S15.3.4.5_A1: FAIL +S15.3.4.5_A2: FAIL + # '__proto__' should be treated as a normal property in JSON. S15.12.2_A1: FAIL @@ -39,6 +43,22 @@ S8.7_A5_T2: FAIL # V8 Bug: http://code.google.com/p/v8/issues/detail?id=1624 S10.4.2.1_A1: FAIL +# V8 Bug: http://code.google.com/p/v8/issues/detail?id=1752 +S11.8.2_A2.3_T1: FAIL +S11.8.3_A2.3_T1: FAIL +11.8.2-1: FAIL +11.8.2-2: FAIL +11.8.2-3: FAIL +11.8.2-4: FAIL +11.8.3-1: FAIL +11.8.3-2: FAIL +11.8.3-3: FAIL +11.8.3-4: FAIL +11.8.3-5: FAIL + +# V8 Bug. +S13.2.3_A1: FAIL + # V8 Bug: http://code.google.com/p/v8/issues/detail?id=1530 S15.3.3.1_A4: FAIL @@ -145,6 +165,12 @@ S15.8.2.16_A7: PASS || FAIL_OK S15.8.2.18_A7: PASS || FAIL_OK S15.8.2.13_A23: PASS || FAIL_OK +# We allow calls to regexp exec() with no arguments to fail for +# compatibility reasons. +S15.10.6.2_A1_T16: FAIL_OK +S15.10.6.2_A12: FAIL_OK +S15.10.6.3_A1_T16: FAIL_OK + # Sputnik tests (r97) assume RegExp.prototype is an Object, not a RegExp. S15.10.6_A2: FAIL_OK @@ -316,6 +342,80 @@ S15.4.4.3_A2_T1: FAIL_OK ######################### UNANALYZED FAILURES ########################## +# Bug? Object.defineProperty - Update [[Enumerable]] attribute of 'name' +# property to true successfully when [[Enumerable]] attribute of 'name' +# is false and [[Configurable]] attribute of 'name' is true, the 'desc' +# is a generic descriptor which only contains [[Enumerable]] attribute +# as true, 'name' property is an index data property (8.12.9 step 8) +15.2.3.6-4-82-18: FAIL +# Bug? Object.defineProperty - Update [[Enumerable]] attribute of 'name' +# property to false successfully when [[Enumerable]] and [[Configurable]] +# attributes of 'name' property are true, the 'desc' is a generic +# descriptor which only contains [Enumerable]] attribute as false and +# 'name' property is an index accessor property (8.12.9 step 8) +15.2.3.6-4-82-19: FAIL +# Bug? Object.defineProperty - Update [[Enumerable]] attribute of 'name' +# property to false successfully when [[Enumerable]] and [[Configurable]] +# attributes of 'name' property are true, the 'desc' is a generic +# descriptor which contains [Enumerable]] attribute as false and +# [[Configurable]] property is true, 'name' property is an index accessor +# property (8.12.9 step 8) +15.2.3.6-4-82-20: FAIL +# Bug? Object.defineProperty - Update [[Configurable]] attribute of 'name' +# property to false successfully when [[Enumerable]] and [[Configurable]] +# attributes of 'name' property are true, the 'desc' is a generic +# descriptor which only contains [[Configurable]] attribute as false, +# 'name' property is an index accessor property (8.12.9 step 8) +15.2.3.6-4-82-21: FAIL +# Bug? Object.defineProperty - Update [[Configurable]] attribute of 'name' +# property to false successfully when [[Enumerable]] and [[Configurable]] +# attributes of 'name' property are true, the 'desc' is a generic +# descriptor which contains [[Enumerable]] attribute as true and +# [[Configurable]] attribute is false, 'name' property is an index accessor +# property (8.12.9 step 8) +15.2.3.6-4-82-22: FAIL +# Bug? Object.defineProperty - Update [[Enumerable]] and [[Configurable]] +# attributes of 'name' property to false successfully when [[Enumerable]] +# and [[Configurable]] attributes of 'name' property are true, the 'desc' +# is a generic descriptor which contains [[Enumerable]] and +# [[Configurable]] attributes as false, 'name' property is an index +# accessor property (8.12.9 step 8) +15.2.3.6-4-82-23: FAIL +# Bug? Object.defineProperty - Update [[Enumerable]] attributes of 'name' +# property to true successfully when [[Enumerable]] attribute of 'name' is +# false and [[Configurable]] attribute of 'name' is true, the 'desc' is a +# generic descriptor which only contains [[Enumerable]] attribute as true, +# 'name' property is an index accessor property (8.12.9 step 8) +15.2.3.6-4-82-24: FAIL +# Bug? Object.defineProperty - 'O' is an Array, 'name' is an array index named +# property, 'desc' is accessor descriptor, test updating all attribute +# values of 'name' (15.4.5.1 step 4.c) +15.2.3.6-4-209: FAIL +# Bug? Object.defineProperty - 'O' is an Array, 'name' is an array index named +# property, name is accessor property and 'desc' is accessor descriptor, +# test updating the [[Enumerable]] attribute value of 'name' (15.4.5.1 step +# 4.c) +15.2.3.6-4-271: FAIL +# Bug? Object.defineProperty - 'O' is an Array, 'name' is an array index named +# property, name is accessor property and 'desc' is accessor descriptor, +# test updating the [[Configurable]] attribute value of 'name' (15.4.5.1 +# step 4.c) +15.2.3.6-4-272: FAIL +# Bug? Object.defineProperty - 'O' is an Array, 'name' is an array index named +# property, name is accessor property and 'desc' is accessor descriptor, +# test updating multiple attribute values of 'name' (15.4.5.1 step 4.c) +15.2.3.6-4-273: FAIL +# Bug? Object.defineProperty - 'O' is an Arguments object of a function that has +# formal parameters, 'name' is own accessor property of 'O' which is also +# defined in [[ParameterMap]] of 'O', and 'desc' is accessor descriptor, +# test updating multiple attribute values of 'name' (10.6 +# [[DefineOwnProperty]] step 3 and 5.a.i) +15.2.3.6-4-291-1: FAIL +# Bug? Object.defineProperty - 'O' is an Arguments object, 'name' is own +# accessor property of 'O', and 'desc' is accessor descriptor, test +# updating multiple attribute values of 'name' (10.6 [[DefineOwnProperty]] +# step 3) +15.2.3.6-4-291: FAIL # Bug? Object.defineProperty - 'O' is an Arguments object of a function that has # formal parameters, 'name' is own property of 'O' which is also defined in # [[ParameterMap]] of 'O', and 'desc' is data descriptor, test updating @@ -354,6 +454,11 @@ S15.4.4.3_A2_T1: FAIL_OK # updating the [[Configurable]] attribute value of 'name' which is defined # as non-configurable (10.6 [[DefineOwnProperty]] step 4 and step 5b) 15.2.3.6-4-296-1: FAIL +# Bug? Object.defineProperty - 'O' is an Arguments object, 'name' is an index +# named accessor property of 'O' but not defined in [[ParameterMap]] of +# 'O', and 'desc' is accessor descriptor, test updating multiple attribute +# values of 'name' (10.6 [[DefineOwnProperty]] step 3) +15.2.3.6-4-303: FAIL # Bug? ES5 Attributes - indexed property 'P' with attributes [[Writable]]: true, # [[Enumerable]]: true, [[Configurable]]: false is writable using simple # assignment, 'O' is an Arguments object @@ -414,6 +519,30 @@ S15.4.4.3_A2_T1: FAIL_OK 15.2.3.6-4-623: FAIL # Bug? ES5 Attributes - all attributes in Date.prototype.toJSON are correct 15.2.3.6-4-624: FAIL +# Bug? Object.defineProperties - 'O' is an Array, 'P' is an array index named +# property, 'desc' is accessor descriptor, test updating all attribute +# values of 'P' (15.4.5.1 step 4.c) +15.2.3.7-6-a-205: FAIL +# Bug? Object.defineProperties - 'O' is an Array, 'P' is an array index named +# property that already exists on 'O' is accessor property and 'desc' is +# accessor descriptor, test updating the [[Enumerable]] attribute value of +# 'P' (15.4.5.1 step 4.c) +15.2.3.7-6-a-260: FAIL +# Bug? Object.defineProperties - 'O' is an Array, 'P' is an array index named +# property that already exists on 'O' is accessor property and 'desc' is +# accessor descriptor, test updating the [[Configurable]] attribute value +# of 'P' (15.4.5.1 step 4.c) +15.2.3.7-6-a-261: FAIL +# Bug? Object.defineProperties - 'O' is an Array, 'P' is an array index named +# property that already exists on 'O' is accessor property and 'desc' is +# accessor descriptor, test updating multiple attribute values of 'P' +# (15.4.5.1 step 4.c) +15.2.3.7-6-a-262: FAIL +# Bug? Object.defineProperties - 'O' is an Arguments object, 'P' is own accessor +# property of 'O' which is also defined in [[ParameterMap]] of 'O', and +# 'desc' is accessor descriptor, test updating multiple attribute values of +# 'P' (10.6 [[DefineOwnProperty]] step 3) +15.2.3.7-6-a-280: FAIL # Bug? Object.defineProperties - 'O' is an Arguments object, 'P' is own data # property of 'O' which is also defined in [[ParameterMap]] of 'O', and # 'desc' is data descriptor, test updating multiple attribute values of 'P' @@ -442,6 +571,32 @@ S15.4.4.3_A2_T1: FAIL_OK # 'P' which is defined as non-configurable (10.6 [[DefineOwnProperty]] step # 4) 15.2.3.7-6-a-285: FAIL +# Bug? Object.defineProperties - 'O' is an Arguments object, 'P' is an array +# index named accessor property of 'O' but not defined in [[ParameterMap]] +# of 'O', and 'desc' is accessor descriptor, test updating multiple +# attribute values of 'P' (10.6 [[DefineOwnProperty]] step 3) +15.2.3.7-6-a-292: FAIL +# Bug? Strict Mode - 'this' value is a string which cannot be converted to +# wrapper objects when the function is called with an array of arguments +15.3.4.3-1-s: FAIL +# Bug? Strict Mode - 'this' value is a number which cannot be converted to +# wrapper objects when the function is called with an array of arguments +15.3.4.3-2-s: FAIL +# Bug? Strict Mode - 'this' value is a boolean which cannot be converted to +# wrapper objects when the function is called with an array of arguments +15.3.4.3-3-s: FAIL +# Bug? Function.prototype.bind - [[Get]] attribute of 'caller' property in 'F' +# is thrower +15.3.4.5-20-2: FAIL +# Bug? Function.prototype.bind - [[Set]] attribute of 'caller' property in 'F' +# is thrower +15.3.4.5-20-3: FAIL +# Bug? Function.prototype.bind - [[Get]] attribute of 'arguments' property in +# 'F' is thrower +15.3.4.5-21-2: FAIL +# Bug? Function.prototype.bind - [[Set]] attribute of 'arguments' property in +# 'F' is thrower +15.3.4.5-21-3: FAIL # Bug? Array.prototype.indexOf - decreasing length of array does not delete # non-configurable properties 15.4.4.14-9-a-19: FAIL @@ -460,9 +615,24 @@ S15.4.4.3_A2_T1: FAIL_OK # Bug? Array.prototype.map - decreasing length of array does not delete # non-configurable properties 15.4.4.19-8-b-16: FAIL +# Bug? Array.prototype.filter - properties can be added to prototype after +# current position are visited on an Array-like object +15.4.4.20-9-b-6: FAIL # Bug? Array.prototype.filter - decreasing length of array does not delete # non-configurable properties 15.4.4.20-9-b-16: FAIL +# Bug? Array.prototype.filter - element to be retrieved is own data property +# that overrides an inherited accessor property on an Array +15.4.4.20-9-c-i-6: FAIL +# Bug? Array.prototype.filter - element to be retrieved is own accessor property +# that overrides an inherited accessor property on an Array +15.4.4.20-9-c-i-14: FAIL +# Bug? Array.prototype.filter - element to be retrieved is inherited accessor +# property on an Array +15.4.4.20-9-c-i-16: FAIL +# Bug? Array.prototype.filter - element to be retrieved is inherited accessor +# property without a get function on an Array +15.4.4.20-9-c-i-22: FAIL # Bug? Array.prototype.reduce - decreasing length of array in step 8 does not # delete non-configurable properties 15.4.4.21-9-b-16: FAIL @@ -496,6 +666,9 @@ S15.4.4.3_A2_T1: FAIL_OK # Bug? Date.prototype.toISOString - value of year is Infinity # Date.prototype.toISOString throw the RangeError 15.9.5.43-0-15: FAIL +# Bug? Error.prototype.toString return the value of 'msg' when 'name' is empty +# string and 'msg' isn't undefined +15.11.4.4-8-1: FAIL ############################ SKIPPED TESTS ############################# diff --git a/deps/v8/tools/gyp/v8.gyp b/deps/v8/tools/gyp/v8.gyp index 34ad4c43b..481293007 100644 --- a/deps/v8/tools/gyp/v8.gyp +++ b/deps/v8/tools/gyp/v8.gyp @@ -709,7 +709,7 @@ 'experimental_library_files': [ '../../src/macros.py', '../../src/proxy.js', - '../../src/collection.js', + '../../src/weakmap.js', ], }, 'actions': [ |