vendor/v8/src/x64/full-codegen-x64.cc in mustang-0.0.1 vs vendor/v8/src/x64/full-codegen-x64.cc in mustang-0.1.0
- old
+ new
@@ -196,56 +196,63 @@
__ Push(Smi::FromInt(scope()->num_parameters()));
// Arguments to ArgumentsAccessStub:
// function, receiver address, parameter count.
// The stub will rewrite receiver and parameter count if the previous
// stack frame was an arguments adapter frame.
- ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
+ ArgumentsAccessStub stub(
+ is_strict_mode() ? ArgumentsAccessStub::NEW_STRICT
+ : ArgumentsAccessStub::NEW_NON_STRICT);
__ CallStub(&stub);
- // Store new arguments object in both "arguments" and ".arguments" slots.
- __ movq(rcx, rax);
- Move(arguments->AsSlot(), rax, rbx, rdx);
- Slot* dot_arguments_slot = scope()->arguments_shadow()->AsSlot();
- Move(dot_arguments_slot, rcx, rbx, rdx);
- }
- { Comment cmnt(masm_, "[ Declarations");
- // For named function expressions, declare the function name as a
- // constant.
- if (scope()->is_function_scope() && scope()->function() != NULL) {
- EmitDeclaration(scope()->function(), Variable::CONST, NULL);
+ Variable* arguments_shadow = scope()->arguments_shadow();
+ if (arguments_shadow != NULL) {
+ // Store new arguments object in both "arguments" and ".arguments" slots.
+ __ movq(rcx, rax);
+ Move(arguments_shadow->AsSlot(), rcx, rbx, rdx);
}
- // Visit all the explicit declarations unless there is an illegal
- // redeclaration.
- if (scope()->HasIllegalRedeclaration()) {
- scope()->VisitIllegalRedeclaration(this);
- } else {
- VisitDeclarations(scope()->declarations());
- }
+ Move(arguments->AsSlot(), rax, rbx, rdx);
}
if (FLAG_trace) {
__ CallRuntime(Runtime::kTraceEnter, 0);
}
- { Comment cmnt(masm_, "[ Stack check");
- PrepareForBailout(info->function(), NO_REGISTERS);
- NearLabel ok;
- __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
- __ j(above_equal, &ok);
- StackCheckStub stub;
- __ CallStub(&stub);
- __ bind(&ok);
- }
+ // Visit the declarations and body unless there is an illegal
+ // redeclaration.
+ if (scope()->HasIllegalRedeclaration()) {
+ Comment cmnt(masm_, "[ Declarations");
+ scope()->VisitIllegalRedeclaration(this);
+ } else {
+ { Comment cmnt(masm_, "[ Declarations");
+ // For named function expressions, declare the function name as a
+ // constant.
+ if (scope()->is_function_scope() && scope()->function() != NULL) {
+ EmitDeclaration(scope()->function(), Variable::CONST, NULL);
+ }
+ VisitDeclarations(scope()->declarations());
+ }
- { Comment cmnt(masm_, "[ Body");
- ASSERT(loop_depth() == 0);
- VisitStatements(function()->body());
- ASSERT(loop_depth() == 0);
+ { Comment cmnt(masm_, "[ Stack check");
+ PrepareForBailout(info->function(), NO_REGISTERS);
+ NearLabel ok;
+ __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
+ __ j(above_equal, &ok);
+ StackCheckStub stub;
+ __ CallStub(&stub);
+ __ bind(&ok);
+ }
+
+ { Comment cmnt(masm_, "[ Body");
+ ASSERT(loop_depth() == 0);
+ VisitStatements(function()->body());
+ ASSERT(loop_depth() == 0);
+ }
}
+ // Always emit a 'return undefined' in case control fell off the end of
+ // the body.
{ Comment cmnt(masm_, "[ return <undefined>;");
- // Emit a 'return undefined' in case control fell off the end of the body.
__ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
EmitReturnSequence();
}
}
@@ -265,10 +272,17 @@
// Record a mapping of this PC offset to the OSR id. This is used to find
// the AST id from the unoptimized code in order to use it as a key into
// the deoptimization input data found in the optimized code.
RecordStackCheck(stmt->OsrEntryId());
+ // Loop stack checks can be patched to perform on-stack replacement. In
+ // order to decide whether or not to perform OSR we embed the loop depth
+ // in a test instruction after the call so we can extract it from the OSR
+ // builtin.
+ ASSERT(loop_depth() > 0);
+ __ testl(rax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
+
__ bind(&ok);
PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
// Record a mapping of the OSR id to this PC. This is used if the OSR
// entry becomes the target of a bailout. We don't expect it to be, but
// we want it to work if it is.
@@ -316,17 +330,10 @@
#endif
}
}
-FullCodeGenerator::ConstantOperand FullCodeGenerator::GetConstantOperand(
- Token::Value op, Expression* left, Expression* right) {
- ASSERT(ShouldInlineSmiCase(op));
- return kNoConstants;
-}
-
-
void FullCodeGenerator::EffectContext::Plug(Slot* slot) const {
}
void FullCodeGenerator::AccumulatorValueContext::Plug(Slot* slot) const {
@@ -472,27 +479,27 @@
void FullCodeGenerator::AccumulatorValueContext::Plug(
Label* materialize_true,
Label* materialize_false) const {
NearLabel done;
__ bind(materialize_true);
- __ Move(result_register(), Factory::true_value());
+ __ Move(result_register(), isolate()->factory()->true_value());
__ jmp(&done);
__ bind(materialize_false);
- __ Move(result_register(), Factory::false_value());
+ __ Move(result_register(), isolate()->factory()->false_value());
__ bind(&done);
}
void FullCodeGenerator::StackValueContext::Plug(
Label* materialize_true,
Label* materialize_false) const {
NearLabel done;
__ bind(materialize_true);
- __ Push(Factory::true_value());
+ __ Push(isolate()->factory()->true_value());
__ jmp(&done);
__ bind(materialize_false);
- __ Push(Factory::false_value());
+ __ Push(isolate()->factory()->false_value());
__ bind(&done);
}
void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
@@ -541,12 +548,12 @@
__ j(equal, if_false);
__ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
__ j(equal, if_true);
__ CompareRoot(result_register(), Heap::kFalseValueRootIndex);
__ j(equal, if_false);
- ASSERT_EQ(0, kSmiTag);
- __ SmiCompare(result_register(), Smi::FromInt(0));
+ STATIC_ASSERT(kSmiTag == 0);
+ __ Cmp(result_register(), Smi::FromInt(0));
__ j(equal, if_false);
Condition is_smi = masm_->CheckSmi(result_register());
__ j(is_smi, if_true);
// Call the ToBoolean stub for all other cases.
@@ -731,11 +738,13 @@
}
ASSERT(prop->key()->AsLiteral() != NULL &&
prop->key()->AsLiteral()->handle()->IsSmi());
__ Move(rcx, prop->key()->AsLiteral()->handle());
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
+ Handle<Code> ic = is_strict_mode()
+ ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
+ : isolate()->builtins()->KeyedStoreIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET);
}
}
}
@@ -748,11 +757,12 @@
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
// Call the runtime to declare the globals.
__ push(rsi); // The context is the first argument.
__ Push(pairs);
__ Push(Smi::FromInt(is_eval() ? 1 : 0));
- __ CallRuntime(Runtime::kDeclareGlobals, 3);
+ __ Push(Smi::FromInt(strict_mode_flag()));
+ __ CallRuntime(Runtime::kDeclareGlobals, 4);
// Return value is ignored.
}
void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
@@ -827,10 +837,11 @@
// Compile all the case bodies.
for (int i = 0; i < clauses->length(); i++) {
Comment cmnt(masm_, "[ Case body");
CaseClause* clause = clauses->at(i);
__ bind(clause->body_target()->entry_label());
+ PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
VisitStatements(clause->statements());
}
__ bind(nested_statement.break_target());
PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
@@ -849,11 +860,13 @@
// ignore null and undefined in contrast to the specification; see
// ECMA-262 section 12.6.4.
VisitForAccumulatorValue(stmt->enumerable());
__ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
__ j(equal, &exit);
- __ CompareRoot(rax, Heap::kNullValueRootIndex);
+ Register null_value = rdi;
+ __ LoadRoot(null_value, Heap::kNullValueRootIndex);
+ __ cmpq(rax, null_value);
__ j(equal, &exit);
// Convert the object to a JS object.
Label convert, done_convert;
__ JumpIfSmi(rax, &convert);
@@ -863,16 +876,65 @@
__ push(rax);
__ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
__ bind(&done_convert);
__ push(rax);
- // BUG(867): Check cache validity in generated code. This is a fast
- // case for the JSObject::IsSimpleEnum cache validity checks. If we
- // cannot guarantee cache validity, call the runtime system to check
- // cache validity or get the property names in a fixed array.
+ // Check cache validity in generated code. This is a fast case for
+ // the JSObject::IsSimpleEnum cache validity checks. If we cannot
+ // guarantee cache validity, call the runtime system to check cache
+ // validity or get the property names in a fixed array.
+ Label next, call_runtime;
+ Register empty_fixed_array_value = r8;
+ __ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
+ Register empty_descriptor_array_value = r9;
+ __ LoadRoot(empty_descriptor_array_value,
+ Heap::kEmptyDescriptorArrayRootIndex);
+ __ movq(rcx, rax);
+ __ bind(&next);
+ // Check that there are no elements. Register rcx contains the
+ // current JS object we've reached through the prototype chain.
+ __ cmpq(empty_fixed_array_value,
+ FieldOperand(rcx, JSObject::kElementsOffset));
+ __ j(not_equal, &call_runtime);
+
+ // Check that instance descriptors are not empty so that we can
+ // check for an enum cache. Leave the map in rbx for the subsequent
+ // prototype load.
+ __ movq(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
+ __ movq(rdx, FieldOperand(rbx, Map::kInstanceDescriptorsOffset));
+ __ cmpq(rdx, empty_descriptor_array_value);
+ __ j(equal, &call_runtime);
+
+ // Check that there is an enum cache in the non-empty instance
+ // descriptors (rdx). This is the case if the next enumeration
+ // index field does not contain a smi.
+ __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumerationIndexOffset));
+ __ JumpIfSmi(rdx, &call_runtime);
+
+ // For all objects but the receiver, check that the cache is empty.
+ NearLabel check_prototype;
+ __ cmpq(rcx, rax);
+ __ j(equal, &check_prototype);
+ __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumCacheBridgeCacheOffset));
+ __ cmpq(rdx, empty_fixed_array_value);
+ __ j(not_equal, &call_runtime);
+
+ // Load the prototype from the map and loop if non-null.
+ __ bind(&check_prototype);
+ __ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
+ __ cmpq(rcx, null_value);
+ __ j(not_equal, &next);
+
+ // The enum cache is valid. Load the map of the object being
+ // iterated over and use the cache for the iteration.
+ NearLabel use_cache;
+ __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset));
+ __ jmp(&use_cache);
+
// Get the set of properties to enumerate.
+ __ bind(&call_runtime);
__ push(rax); // Duplicate the enumerable object on the stack.
__ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
// If we got a map from the runtime call, we can do a fast
// modification check. Otherwise, we got a fixed array, and we have
@@ -881,10 +943,11 @@
__ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
Heap::kMetaMapRootIndex);
__ j(not_equal, &fixed_array);
// We got a map in register rax. Get the enumeration cache from it.
+ __ bind(&use_cache);
__ movq(rcx, FieldOperand(rax, Map::kInstanceDescriptorsOffset));
__ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumerationIndexOffset));
__ movq(rdx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
// Setup the four remaining stack slots.
@@ -932,11 +995,11 @@
// anymore. If the property has been removed while iterating, we
// just skip it.
__ push(rcx); // Enumerable.
__ push(rbx); // Current entry.
__ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
- __ SmiCompare(rax, Smi::FromInt(0));
+ __ Cmp(rax, Smi::FromInt(0));
__ j(equal, loop_statement.continue_target());
__ movq(rbx, rax);
// Update the 'each' property or variable from the possibly filtered
// entry in register rbx.
@@ -969,21 +1032,29 @@
void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
bool pretenure) {
// Use the fast case closure allocation code that allocates in new
- // space for nested functions that don't need literals cloning.
- if (scope()->is_function_scope() &&
- info->num_literals() == 0 &&
- !pretenure) {
- FastNewClosureStub stub;
+ // space for nested functions that don't need literals cloning. If
+ // we're running with the --always-opt or the --prepare-always-opt
+ // flag, we need to use the runtime function so that the new function
+ // we are creating here gets a chance to have its code optimized and
+ // doesn't just get a copy of the existing unoptimized code.
+ if (!FLAG_always_opt &&
+ !FLAG_prepare_always_opt &&
+ !pretenure &&
+ scope()->is_function_scope() &&
+ info->num_literals() == 0) {
+ FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode);
__ Push(info);
__ CallStub(&stub);
} else {
__ push(rsi);
__ Push(info);
- __ Push(pretenure ? Factory::true_value() : Factory::false_value());
+ __ Push(pretenure
+ ? isolate()->factory()->true_value()
+ : isolate()->factory()->false_value());
__ CallRuntime(Runtime::kNewClosure, 3);
}
context()->Plug(rax);
}
@@ -1048,11 +1119,11 @@
// All extension objects were empty and it is safe to use a global
// load IC call.
__ movq(rax, GlobalObjectOperand());
__ Move(rcx, slot->var()->name());
- Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
? RelocInfo::CODE_TARGET
: RelocInfo::CODE_TARGET_CONTEXT;
EmitCallIC(ic, mode);
}
@@ -1080,12 +1151,15 @@
}
}
// Check that last extension is NULL.
__ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
__ j(not_equal, slow);
- __ movq(temp, ContextOperand(context, Context::FCONTEXT_INDEX));
- return ContextOperand(temp, slot->index());
+
+ // This function is used only for loads, not stores, so it's safe to
+ // return an rsi-based operand (the write barrier cannot be allowed to
+ // destroy the rsi register).
+ return ContextOperand(context, slot->index());
}
void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
Slot* slot,
@@ -1128,11 +1202,12 @@
// object using keyed load.
__ movq(rdx,
ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
slow));
__ Move(rax, key_literal->handle());
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+ Handle<Code> ic =
+ isolate()->builtins()->KeyedLoadIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET);
__ jmp(done);
}
}
}
@@ -1151,11 +1226,11 @@
Comment cmnt(masm_, "Global variable");
// Use inline caching. Variable name is passed in rcx and the global
// object on the stack.
__ Move(rcx, var->name());
__ movq(rax, GlobalObjectOperand());
- Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
context()->Plug(rax);
} else if (slot != NULL && slot->type() == Slot::LOOKUP) {
Label done, slow;
@@ -1214,11 +1289,11 @@
// Load the key.
__ Move(rax, key_literal->handle());
// Do a keyed property load.
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+ Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET);
context()->Plug(rax);
}
}
@@ -1281,11 +1356,17 @@
Comment cmnt(masm_, "[ ObjectLiteral");
__ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ push(FieldOperand(rdi, JSFunction::kLiteralsOffset));
__ Push(Smi::FromInt(expr->literal_index()));
__ Push(expr->constant_properties());
- __ Push(Smi::FromInt(expr->fast_elements() ? 1 : 0));
+ int flags = expr->fast_elements()
+ ? ObjectLiteral::kFastElements
+ : ObjectLiteral::kNoFlags;
+ flags |= expr->has_function()
+ ? ObjectLiteral::kHasFunction
+ : ObjectLiteral::kNoFlags;
+ __ Push(Smi::FromInt(flags));
if (expr->depth() > 1) {
__ CallRuntime(Runtime::kCreateObjectLiteral, 4);
} else {
__ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
}
@@ -1319,11 +1400,11 @@
if (key->handle()->IsSymbol()) {
VisitForAccumulatorValue(value);
__ Move(rcx, key->handle());
__ movq(rdx, Operand(rsp, 0));
if (property->emit_store()) {
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
+ Handle<Code> ic = isolate()->builtins()->StoreIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET);
PrepareForBailoutForId(key->id(), NO_REGISTERS);
}
break;
}
@@ -1331,11 +1412,12 @@
case ObjectLiteral::Property::PROTOTYPE:
__ push(Operand(rsp, 0)); // Duplicate receiver.
VisitForStackValue(key);
VisitForStackValue(value);
if (property->emit_store()) {
- __ CallRuntime(Runtime::kSetProperty, 3);
+ __ Push(Smi::FromInt(NONE)); // PropertyAttributes
+ __ CallRuntime(Runtime::kSetProperty, 4);
} else {
__ Drop(3);
}
break;
case ObjectLiteral::Property::SETTER:
@@ -1349,10 +1431,16 @@
__ CallRuntime(Runtime::kDefineAccessor, 4);
break;
}
}
+ if (expr->has_function()) {
+ ASSERT(result_saved);
+ __ push(Operand(rsp, 0));
+ __ CallRuntime(Runtime::kToFastProperties, 1);
+ }
+
if (result_saved) {
context()->PlugTOS();
} else {
context()->Plug(rax);
}
@@ -1367,15 +1455,16 @@
__ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
__ Push(Smi::FromInt(expr->literal_index()));
__ Push(expr->constant_elements());
- if (expr->constant_elements()->map() == Heap::fixed_cow_array_map()) {
+ if (expr->constant_elements()->map() ==
+ isolate()->heap()->fixed_cow_array_map()) {
FastCloneShallowArrayStub stub(
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
__ CallStub(&stub);
- __ IncrementCounter(&Counters::cow_arrays_created_stub, 1);
+ __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
} else if (expr->depth() > 1) {
__ CallRuntime(Runtime::kCreateArrayLiteral, 3);
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
__ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
} else {
@@ -1485,40 +1574,33 @@
}
break;
}
}
+ // For compound assignments we need another deoptimization point after the
+ // variable/property load.
if (expr->is_compound()) {
{ AccumulatorValueContext context(this);
switch (assign_type) {
case VARIABLE:
EmitVariableLoad(expr->target()->AsVariableProxy()->var());
+ PrepareForBailout(expr->target(), TOS_REG);
break;
case NAMED_PROPERTY:
EmitNamedPropertyLoad(property);
+ PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
break;
case KEYED_PROPERTY:
EmitKeyedPropertyLoad(property);
+ PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
break;
}
}
- // For property compound assignments we need another deoptimization
- // point after the property load.
- if (property != NULL) {
- PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
- }
-
Token::Value op = expr->binary_op();
- ConstantOperand constant = ShouldInlineSmiCase(op)
- ? GetConstantOperand(op, expr->target(), expr->value())
- : kNoConstants;
- ASSERT(constant == kRightConstant || constant == kNoConstants);
- if (constant == kNoConstants) {
- __ push(rax); // Left operand goes on the stack.
- VisitForAccumulatorValue(expr->value());
- }
+ __ push(rax); // Left operand goes on the stack.
+ VisitForAccumulatorValue(expr->value());
OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
? OVERWRITE_RIGHT
: NO_OVERWRITE;
SetSourcePosition(expr->position() + 1);
@@ -1526,12 +1608,11 @@
if (ShouldInlineSmiCase(op)) {
EmitInlineSmiBinaryOp(expr,
op,
mode,
expr->target(),
- expr->value(),
- constant);
+ expr->value());
} else {
EmitBinaryOp(op, mode);
}
// Deoptimization point in case the binary operation may have side effects.
PrepareForBailout(expr->binary_operation(), TOS_REG);
@@ -1562,30 +1643,27 @@
void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
SetSourcePosition(prop->position());
Literal* key = prop->key()->AsLiteral();
__ Move(rcx, key->handle());
- Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET);
}
void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
SetSourcePosition(prop->position());
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+ Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET);
}
void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
Token::Value op,
OverwriteMode mode,
Expression* left,
- Expression* right,
- ConstantOperand constant) {
- ASSERT(constant == kNoConstants); // Only handled case.
-
+ Expression* right) {
// Do combined smi check of the operands. Left operand is on the
// stack (popped into rdx). Right operand is in rax but moved into
// rcx to make the shifts easier.
NearLabel done, stub_call, smi_case;
__ pop(rdx);
@@ -1678,11 +1756,13 @@
__ push(rax); // Preserve value.
VisitForAccumulatorValue(prop->obj());
__ movq(rdx, rax);
__ pop(rax); // Restore value.
__ Move(rcx, prop->key()->AsLiteral()->handle());
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
+ Handle<Code> ic = is_strict_mode()
+ ? isolate()->builtins()->StoreIC_Initialize_Strict()
+ : isolate()->builtins()->StoreIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET);
break;
}
case KEYED_PROPERTY: {
__ push(rax); // Preserve value.
@@ -1699,11 +1779,13 @@
VisitForAccumulatorValue(prop->key());
__ movq(rcx, rax);
__ pop(rdx);
}
__ pop(rax); // Restore value.
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
+ Handle<Code> ic = is_strict_mode()
+ ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
+ : isolate()->builtins()->KeyedStoreIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET);
break;
}
}
PrepareForBailoutForId(bailout_ast_id, TOS_REG);
@@ -1723,66 +1805,85 @@
// Assignment to a global variable. Use inline caching for the
// assignment. Right-hand-side value is passed in rax, variable name in
// rcx, and the global object on the stack.
__ Move(rcx, var->name());
__ movq(rdx, GlobalObjectOperand());
- Handle<Code> ic(Builtins::builtin(is_strict()
- ? Builtins::StoreIC_Initialize_Strict
- : Builtins::StoreIC_Initialize));
+ Handle<Code> ic = is_strict_mode()
+ ? isolate()->builtins()->StoreIC_Initialize_Strict()
+ : isolate()->builtins()->StoreIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
- } else if (var->mode() != Variable::CONST || op == Token::INIT_CONST) {
- // Perform the assignment for non-const variables and for initialization
- // of const variables. Const assignments are simply skipped.
- Label done;
+ } else if (op == Token::INIT_CONST) {
+ // Like var declarations, const declarations are hoisted to function
+ // scope. However, unlike var initializers, const initializers are able
+ // to drill a hole to that function context, even from inside a 'with'
+ // context. We thus bypass the normal static scope lookup.
Slot* slot = var->AsSlot();
+ Label skip;
switch (slot->type()) {
case Slot::PARAMETER:
+ // No const parameters.
+ UNREACHABLE();
+ break;
case Slot::LOCAL:
- if (op == Token::INIT_CONST) {
- // Detect const reinitialization by checking for the hole value.
- __ movq(rdx, Operand(rbp, SlotOffset(slot)));
- __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
- __ j(not_equal, &done);
- }
+ __ movq(rdx, Operand(rbp, SlotOffset(slot)));
+ __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
+ __ j(not_equal, &skip);
+ __ movq(Operand(rbp, SlotOffset(slot)), rax);
+ break;
+ case Slot::CONTEXT: {
+ __ movq(rcx, ContextOperand(rsi, Context::FCONTEXT_INDEX));
+ __ movq(rdx, ContextOperand(rcx, slot->index()));
+ __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
+ __ j(not_equal, &skip);
+ __ movq(ContextOperand(rcx, slot->index()), rax);
+ int offset = Context::SlotOffset(slot->index());
+ __ movq(rdx, rax); // Preserve the stored value in eax.
+ __ RecordWrite(rcx, offset, rdx, rbx);
+ break;
+ }
+ case Slot::LOOKUP:
+ __ push(rax);
+ __ push(rsi);
+ __ Push(var->name());
+ __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
+ break;
+ }
+ __ bind(&skip);
+
+ } else if (var->mode() != Variable::CONST) {
+ // Perform the assignment for non-const variables. Const assignments
+ // are simply skipped.
+ Slot* slot = var->AsSlot();
+ switch (slot->type()) {
+ case Slot::PARAMETER:
+ case Slot::LOCAL:
// Perform the assignment.
__ movq(Operand(rbp, SlotOffset(slot)), rax);
break;
case Slot::CONTEXT: {
MemOperand target = EmitSlotSearch(slot, rcx);
- if (op == Token::INIT_CONST) {
- // Detect const reinitialization by checking for the hole value.
- __ movq(rdx, target);
- __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
- __ j(not_equal, &done);
- }
// Perform the assignment and issue the write barrier.
__ movq(target, rax);
// The value of the assignment is in rax. RecordWrite clobbers its
// register arguments.
__ movq(rdx, rax);
- int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
+ int offset = Context::SlotOffset(slot->index());
__ RecordWrite(rcx, offset, rdx, rbx);
break;
}
case Slot::LOOKUP:
- // Call the runtime for the assignment. The runtime will ignore
- // const reinitialization.
+ // Call the runtime for the assignment.
__ push(rax); // Value.
__ push(rsi); // Context.
__ Push(var->name());
- if (op == Token::INIT_CONST) {
- // The runtime will ignore const redeclaration.
- __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
- } else {
- __ CallRuntime(Runtime::kStoreContextSlot, 3);
- }
+ __ Push(Smi::FromInt(strict_mode_flag()));
+ __ CallRuntime(Runtime::kStoreContextSlot, 4);
break;
}
- __ bind(&done);
}
}
void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
@@ -1807,11 +1908,13 @@
if (expr->ends_initialization_block()) {
__ movq(rdx, Operand(rsp, 0));
} else {
__ pop(rdx);
}
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
+ Handle<Code> ic = is_strict_mode()
+ ? isolate()->builtins()->StoreIC_Initialize_Strict()
+ : isolate()->builtins()->StoreIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET);
// If the assignment ends an initialization block, revert to fast case.
if (expr->ends_initialization_block()) {
__ push(rax); // Result of assignment, saved even if not needed.
@@ -1845,11 +1948,13 @@
} else {
__ pop(rdx);
}
// Record source code position before IC call.
SetSourcePosition(expr->position());
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
+ Handle<Code> ic = is_strict_mode()
+ ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
+ : isolate()->builtins()->KeyedStoreIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET);
// If the assignment ends an initialization block, revert to fast case.
if (expr->ends_initialization_block()) {
__ pop(rdx);
@@ -1896,11 +2001,12 @@
}
// Record source position for debugger.
SetSourcePosition(expr->position());
// Call the IC initialization code.
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
- Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, in_loop);
+ Handle<Code> ic =
+ ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
EmitCallIC(ic, mode);
RecordJSReturnSite(expr);
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
context()->Plug(rax);
@@ -1929,11 +2035,12 @@
}
// Record source position for debugger.
SetSourcePosition(expr->position());
// Call the IC initialization code.
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
- Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arg_count, in_loop);
+ Handle<Code> ic =
+ ISOLATE->stub_cache()->ComputeKeyedCallInitialize(arg_count, in_loop);
__ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize)); // Key.
EmitCallIC(ic, mode);
RecordJSReturnSite(expr);
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
@@ -1961,10 +2068,31 @@
// Discard the function left on TOS.
context()->DropAndPlug(1, rax);
}
+void FullCodeGenerator::EmitResolvePossiblyDirectEval(ResolveEvalFlag flag,
+ int arg_count) {
+ // Push copy of the first argument or undefined if it doesn't exist.
+ if (arg_count > 0) {
+ __ push(Operand(rsp, arg_count * kPointerSize));
+ } else {
+ __ PushRoot(Heap::kUndefinedValueRootIndex);
+ }
+
+ // Push the receiver of the enclosing function and do runtime call.
+ __ push(Operand(rbp, (2 + scope()->num_parameters()) * kPointerSize));
+
+ // Push the strict mode flag.
+ __ Push(Smi::FromInt(strict_mode_flag()));
+
+ __ CallRuntime(flag == SKIP_CONTEXT_LOOKUP
+ ? Runtime::kResolvePossiblyDirectEvalNoLookup
+ : Runtime::kResolvePossiblyDirectEval, 4);
+}
+
+
void FullCodeGenerator::VisitCall(Call* expr) {
#ifdef DEBUG
// We want to verify that RecordJSReturnSite gets called on all paths
// through this function. Avoid early returns.
expr->return_is_recorded_ = false;
@@ -1988,26 +2116,35 @@
// Push the arguments.
for (int i = 0; i < arg_count; i++) {
VisitForStackValue(args->at(i));
}
- // Push copy of the function - found below the arguments.
- __ push(Operand(rsp, (arg_count + 1) * kPointerSize));
+ // If we know that eval can only be shadowed by eval-introduced
+ // variables we attempt to load the global eval function directly
+ // in generated code. If we succeed, there is no need to perform a
+ // context lookup in the runtime system.
+ Label done;
+ if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
+ Label slow;
+ EmitLoadGlobalSlotCheckExtensions(var->AsSlot(),
+ NOT_INSIDE_TYPEOF,
+ &slow);
+ // Push the function and resolve eval.
+ __ push(rax);
+ EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count);
+ __ jmp(&done);
+ __ bind(&slow);
+ }
- // Push copy of the first argument or undefined if it doesn't exist.
- if (arg_count > 0) {
- __ push(Operand(rsp, arg_count * kPointerSize));
- } else {
- __ PushRoot(Heap::kUndefinedValueRootIndex);
+ // Push copy of the function (found below the arguments) and
+ // resolve eval.
+ __ push(Operand(rsp, (arg_count + 1) * kPointerSize));
+ EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count);
+ if (done.is_linked()) {
+ __ bind(&done);
}
- // Push the receiver of the enclosing function and do runtime call.
- __ push(Operand(rbp, (2 + scope()->num_parameters()) * kPointerSize));
- // Push the strict mode flag.
- __ Push(Smi::FromInt(strict_mode_flag()));
- __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 4);
-
// The runtime call returns a pair of values in rax (function) and
// rdx (receiver). Touch up the stack with the right values.
__ movq(Operand(rsp, (arg_count + 0) * kPointerSize), rdx);
__ movq(Operand(rsp, (arg_count + 1) * kPointerSize), rax);
}
@@ -2092,11 +2229,11 @@
__ Move(rax, prop->key()->AsLiteral()->handle());
// Record source code position for IC call.
SetSourcePosition(prop->position());
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+ Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET);
// Push result (function).
__ push(rax);
// Push Global receiver.
__ movq(rcx, GlobalObjectOperand());
@@ -2108,19 +2245,10 @@
}
EmitKeyedCallWithIC(expr, prop->key(), RelocInfo::CODE_TARGET);
}
}
} else {
- // Call to some other expression. If the expression is an anonymous
- // function literal not called in a loop, mark it as one that should
- // also use the full code generator.
- FunctionLiteral* lit = fun->AsFunctionLiteral();
- if (lit != NULL &&
- lit->name()->Equals(Heap::empty_string()) &&
- loop_depth() == 0) {
- lit->set_try_full_codegen(true);
- }
{ PreservePositionScope scope(masm()->positions_recorder());
VisitForStackValue(fun);
}
// Load global receiver object.
__ movq(rbx, GlobalObjectOperand());
@@ -2160,11 +2288,12 @@
// Load function and argument count into rdi and rax.
__ Set(rax, arg_count);
__ movq(rdi, Operand(rsp, arg_count * kPointerSize));
- Handle<Code> construct_builtin(Builtins::builtin(Builtins::JSConstructCall));
+ Handle<Code> construct_builtin =
+ isolate()->builtins()->JSConstructCall();
__ Call(construct_builtin, RelocInfo::CONSTRUCT_CALL);
context()->Plug(rax);
}
@@ -2294,15 +2423,75 @@
Label* if_false = NULL;
Label* fall_through = NULL;
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
- // Just indicate false, as %_IsStringWrapperSafeForDefaultValueOf() is only
- // used in a few functions in runtime.js which should not normally be hit by
- // this compiler.
+ if (FLAG_debug_code) __ AbortIfSmi(rax);
+
+ // Check whether this map has already been checked to be safe for default
+ // valueOf.
+ __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
+ __ testb(FieldOperand(rbx, Map::kBitField2Offset),
+ Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
+ __ j(not_zero, if_true);
+
+ // Check for fast case object. Generate false result for slow case object.
+ __ movq(rcx, FieldOperand(rax, JSObject::kPropertiesOffset));
+ __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
+ __ CompareRoot(rcx, Heap::kHashTableMapRootIndex);
+ __ j(equal, if_false);
+
+ // Look for valueOf symbol in the descriptor array, and indicate false if
+ // found. The type is not checked, so if it is a transition it is a false
+ // negative.
+ __ movq(rbx, FieldOperand(rbx, Map::kInstanceDescriptorsOffset));
+ __ movq(rcx, FieldOperand(rbx, FixedArray::kLengthOffset));
+ // rbx: descriptor array
+ // rcx: length of descriptor array
+ // Calculate the end of the descriptor array.
+ SmiIndex index = masm_->SmiToIndex(rdx, rcx, kPointerSizeLog2);
+ __ lea(rcx,
+ Operand(
+ rbx, index.reg, index.scale, FixedArray::kHeaderSize));
+ // Calculate location of the first key name.
+ __ addq(rbx,
+ Immediate(FixedArray::kHeaderSize +
+ DescriptorArray::kFirstIndex * kPointerSize));
+ // Loop through all the keys in the descriptor array. If one of these is the
+ // symbol valueOf the result is false.
+ Label entry, loop;
+ __ jmp(&entry);
+ __ bind(&loop);
+ __ movq(rdx, FieldOperand(rbx, 0));
+ __ Cmp(rdx, FACTORY->value_of_symbol());
+ __ j(equal, if_false);
+ __ addq(rbx, Immediate(kPointerSize));
+ __ bind(&entry);
+ __ cmpq(rbx, rcx);
+ __ j(not_equal, &loop);
+
+ // Reload map as register rbx was used as temporary above.
+ __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
+
+ // If a valueOf property is not found on the object check that it's
+ // prototype is the un-modified String prototype. If not result is false.
+ __ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
+ __ testq(rcx, Immediate(kSmiTagMask));
+ __ j(zero, if_false);
+ __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
+ __ movq(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalContextOffset));
+ __ cmpq(rcx,
+ ContextOperand(rdx, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
+ __ j(not_equal, if_false);
+ // Set the bit in the map to indicate that it has been checked safe for
+ // default valueOf and set true result.
+ __ or_(FieldOperand(rbx, Map::kBitField2Offset),
+ Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
+ __ jmp(if_true);
+
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
- __ jmp(if_false);
context()->Plug(if_true, if_false);
}
void FullCodeGenerator::EmitIsFunction(ZoneList<Expression*>* args) {
@@ -2382,19 +2571,19 @@
// Get the frame pointer for the calling frame.
__ movq(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
// Skip the arguments adaptor frame if it exists.
Label check_frame_marker;
- __ SmiCompare(Operand(rax, StandardFrameConstants::kContextOffset),
- Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+ __ Cmp(Operand(rax, StandardFrameConstants::kContextOffset),
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(not_equal, &check_frame_marker);
__ movq(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset));
// Check the marker in the calling frame.
__ bind(&check_frame_marker);
- __ SmiCompare(Operand(rax, StandardFrameConstants::kMarkerOffset),
- Smi::FromInt(StackFrame::CONSTRUCT));
+ __ Cmp(Operand(rax, StandardFrameConstants::kMarkerOffset),
+ Smi::FromInt(StackFrame::CONSTRUCT));
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(equal, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
}
@@ -2444,12 +2633,12 @@
// Get the number of formal parameters.
__ Move(rax, Smi::FromInt(scope()->num_parameters()));
// Check if the calling frame is an arguments adaptor frame.
__ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
- __ SmiCompare(Operand(rbx, StandardFrameConstants::kContextOffset),
- Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+ __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(not_equal, &exit);
// Arguments adaptor case: Read the arguments length from the
// adaptor frame.
__ movq(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
@@ -2493,16 +2682,16 @@
__ movq(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset));
__ jmp(&done);
// Functions have class 'Function'.
__ bind(&function);
- __ Move(rax, Factory::function_class_symbol());
+ __ Move(rax, isolate()->factory()->function_class_symbol());
__ jmp(&done);
// Objects with a non-function constructor have class 'Object'.
__ bind(&non_function_constructor);
- __ Move(rax, Factory::Object_symbol());
+ __ Move(rax, isolate()->factory()->Object_symbol());
__ jmp(&done);
// Non-JS objects have class null.
__ bind(&null);
__ LoadRoot(rax, Heap::kNullValueRootIndex);
@@ -2552,12 +2741,17 @@
__ bind(&heapnumber_allocated);
// Return a random uint32 number in rax.
// The fresh HeapNumber is in rbx, which is callee-save on both x64 ABIs.
- __ PrepareCallCFunction(0);
- __ CallCFunction(ExternalReference::random_uint32_function(), 0);
+ __ PrepareCallCFunction(1);
+#ifdef _WIN64
+ __ LoadAddress(rcx, ExternalReference::isolate_address());
+#else
+ __ LoadAddress(rdi, ExternalReference::isolate_address());
+#endif
+ __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
// Convert 32 random bits in rax to 0.(32 random bits) in a double
// by computing:
// ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
__ movl(rcx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
@@ -2619,11 +2813,12 @@
void FullCodeGenerator::EmitMathPow(ZoneList<Expression*>* args) {
// Load the arguments on the stack and call the runtime function.
ASSERT(args->length() == 2);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
- __ CallRuntime(Runtime::kMath_pow, 2);
+ MathPowStub stub;
+ __ CallStub(&stub);
context()->Plug(rax);
}
void FullCodeGenerator::EmitSetValueOf(ZoneList<Expression*>* args) {
@@ -2803,31 +2998,34 @@
}
void FullCodeGenerator::EmitMathSin(ZoneList<Expression*>* args) {
// Load the argument on the stack and call the stub.
- TranscendentalCacheStub stub(TranscendentalCache::SIN);
+ TranscendentalCacheStub stub(TranscendentalCache::SIN,
+ TranscendentalCacheStub::TAGGED);
ASSERT(args->length() == 1);
VisitForStackValue(args->at(0));
__ CallStub(&stub);
context()->Plug(rax);
}
void FullCodeGenerator::EmitMathCos(ZoneList<Expression*>* args) {
// Load the argument on the stack and call the stub.
- TranscendentalCacheStub stub(TranscendentalCache::COS);
+ TranscendentalCacheStub stub(TranscendentalCache::COS,
+ TranscendentalCacheStub::TAGGED);
ASSERT(args->length() == 1);
VisitForStackValue(args->at(0));
__ CallStub(&stub);
context()->Plug(rax);
}
void FullCodeGenerator::EmitMathLog(ZoneList<Expression*>* args) {
// Load the argument on the stack and call the stub.
- TranscendentalCacheStub stub(TranscendentalCache::LOG);
+ TranscendentalCacheStub stub(TranscendentalCache::LOG,
+ TranscendentalCacheStub::TAGGED);
ASSERT(args->length() == 1);
VisitForStackValue(args->at(0));
__ CallStub(&stub);
context()->Plug(rax);
}
@@ -2875,11 +3073,77 @@
void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) {
ASSERT(args->length() == 3);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
VisitForStackValue(args->at(2));
+ Label done;
+ Label slow_case;
+ Register object = rax;
+ Register index_1 = rbx;
+ Register index_2 = rcx;
+ Register elements = rdi;
+ Register temp = rdx;
+ __ movq(object, Operand(rsp, 2 * kPointerSize));
+ // Fetch the map and check if array is in fast case.
+ // Check that object doesn't require security checks and
+ // has no indexed interceptor.
+ __ CmpObjectType(object, JS_ARRAY_TYPE, temp);
+ __ j(not_equal, &slow_case);
+ __ testb(FieldOperand(temp, Map::kBitFieldOffset),
+ Immediate(KeyedLoadIC::kSlowCaseBitFieldMask));
+ __ j(not_zero, &slow_case);
+
+ // Check the object's elements are in fast case and writable.
+ __ movq(elements, FieldOperand(object, JSObject::kElementsOffset));
+ __ CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
+ Heap::kFixedArrayMapRootIndex);
+ __ j(not_equal, &slow_case);
+
+ // Check that both indices are smis.
+ __ movq(index_1, Operand(rsp, 1 * kPointerSize));
+ __ movq(index_2, Operand(rsp, 0 * kPointerSize));
+ __ JumpIfNotBothSmi(index_1, index_2, &slow_case);
+
+ // Check that both indices are valid.
+ // The JSArray length field is a smi since the array is in fast case mode.
+ __ movq(temp, FieldOperand(object, JSArray::kLengthOffset));
+ __ SmiCompare(temp, index_1);
+ __ j(below_equal, &slow_case);
+ __ SmiCompare(temp, index_2);
+ __ j(below_equal, &slow_case);
+
+ __ SmiToInteger32(index_1, index_1);
+ __ SmiToInteger32(index_2, index_2);
+ // Bring addresses into index1 and index2.
+ __ lea(index_1, FieldOperand(elements, index_1, times_pointer_size,
+ FixedArray::kHeaderSize));
+ __ lea(index_2, FieldOperand(elements, index_2, times_pointer_size,
+ FixedArray::kHeaderSize));
+
+ // Swap elements. Use object and temp as scratch registers.
+ __ movq(object, Operand(index_1, 0));
+ __ movq(temp, Operand(index_2, 0));
+ __ movq(Operand(index_2, 0), object);
+ __ movq(Operand(index_1, 0), temp);
+
+ Label new_space;
+ __ InNewSpace(elements, temp, equal, &new_space);
+
+ __ movq(object, elements);
+ __ RecordWriteHelper(object, index_1, temp);
+ __ RecordWriteHelper(elements, index_2, temp);
+
+ __ bind(&new_space);
+ // We are done. Drop elements from the stack, and return undefined.
+ __ addq(rsp, Immediate(3 * kPointerSize));
+ __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
+ __ jmp(&done);
+
+ __ bind(&slow_case);
__ CallRuntime(Runtime::kSwapElements, 3);
+
+ __ bind(&done);
context()->Plug(rax);
}
void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) {
@@ -2887,11 +3151,11 @@
ASSERT_NE(NULL, args->at(0)->AsLiteral());
int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
Handle<FixedArray> jsfunction_result_caches(
- Top::global_context()->jsfunction_result_caches());
+ isolate()->global_context()->jsfunction_result_caches());
if (jsfunction_result_caches->length() <= cache_id) {
__ Abort("Attempt to use undefined cache.");
__ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
context()->Plug(rax);
return;
@@ -2964,14 +3228,14 @@
__ j(not_equal, &fail);
__ movq(tmp, FieldOperand(left, JSRegExp::kDataOffset));
__ cmpq(tmp, FieldOperand(right, JSRegExp::kDataOffset));
__ j(equal, &ok);
__ bind(&fail);
- __ Move(rax, Factory::false_value());
+ __ Move(rax, isolate()->factory()->false_value());
__ jmp(&done);
__ bind(&ok);
- __ Move(rax, Factory::true_value());
+ __ Move(rax, isolate()->factory()->true_value());
__ bind(&done);
context()->Plug(rax);
}
@@ -2998,23 +3262,307 @@
}
void FullCodeGenerator::EmitGetCachedArrayIndex(ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
-
VisitForAccumulatorValue(args->at(0));
+ if (FLAG_debug_code) {
+ __ AbortIfNotString(rax);
+ }
+
__ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
ASSERT(String::kHashShift >= kSmiTagSize);
__ IndexFromHash(rax, rax);
context()->Plug(rax);
}
void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) {
- context()->Plug(Heap::kUndefinedValueRootIndex);
+ Label bailout, return_result, done, one_char_separator, long_separator,
+ non_trivial_array, not_size_one_array, loop,
+ loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
+ ASSERT(args->length() == 2);
+ // We will leave the separator on the stack until the end of the function.
+ VisitForStackValue(args->at(1));
+ // Load this to rax (= array)
+ VisitForAccumulatorValue(args->at(0));
+ // All aliases of the same register have disjoint lifetimes.
+ Register array = rax;
+ Register elements = no_reg; // Will be rax.
+
+ Register index = rdx;
+
+ Register string_length = rcx;
+
+ Register string = rsi;
+
+ Register scratch = rbx;
+
+ Register array_length = rdi;
+ Register result_pos = no_reg; // Will be rdi.
+
+ Operand separator_operand = Operand(rsp, 2 * kPointerSize);
+ Operand result_operand = Operand(rsp, 1 * kPointerSize);
+ Operand array_length_operand = Operand(rsp, 0 * kPointerSize);
+ // Separator operand is already pushed. Make room for the two
+ // other stack fields, and clear the direction flag in anticipation
+ // of calling CopyBytes.
+ __ subq(rsp, Immediate(2 * kPointerSize));
+ __ cld();
+ // Check that the array is a JSArray
+ __ JumpIfSmi(array, &bailout);
+ __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
+ __ j(not_equal, &bailout);
+
+ // Check that the array has fast elements.
+ __ testb(FieldOperand(scratch, Map::kBitField2Offset),
+ Immediate(1 << Map::kHasFastElements));
+ __ j(zero, &bailout);
+
+ // Array has fast elements, so its length must be a smi.
+ // If the array has length zero, return the empty string.
+ __ movq(array_length, FieldOperand(array, JSArray::kLengthOffset));
+ __ SmiCompare(array_length, Smi::FromInt(0));
+ __ j(not_zero, &non_trivial_array);
+ __ LoadRoot(rax, Heap::kEmptyStringRootIndex);
+ __ jmp(&return_result);
+
+ // Save the array length on the stack.
+ __ bind(&non_trivial_array);
+ __ SmiToInteger32(array_length, array_length);
+ __ movl(array_length_operand, array_length);
+
+ // Save the FixedArray containing array's elements.
+ // End of array's live range.
+ elements = array;
+ __ movq(elements, FieldOperand(array, JSArray::kElementsOffset));
+ array = no_reg;
+
+
+ // Check that all array elements are sequential ASCII strings, and
+ // accumulate the sum of their lengths, as a smi-encoded value.
+ __ Set(index, 0);
+ __ Set(string_length, 0);
+ // Loop condition: while (index < array_length).
+ // Live loop registers: index(int32), array_length(int32), string(String*),
+ // scratch, string_length(int32), elements(FixedArray*).
+ if (FLAG_debug_code) {
+ __ cmpq(index, array_length);
+ __ Assert(below, "No empty arrays here in EmitFastAsciiArrayJoin");
+ }
+ __ bind(&loop);
+ __ movq(string, FieldOperand(elements,
+ index,
+ times_pointer_size,
+ FixedArray::kHeaderSize));
+ __ JumpIfSmi(string, &bailout);
+ __ movq(scratch, FieldOperand(string, HeapObject::kMapOffset));
+ __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
+ __ andb(scratch, Immediate(
+ kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
+ __ cmpb(scratch, Immediate(kStringTag | kAsciiStringTag | kSeqStringTag));
+ __ j(not_equal, &bailout);
+ __ AddSmiField(string_length,
+ FieldOperand(string, SeqAsciiString::kLengthOffset));
+ __ j(overflow, &bailout);
+ __ incl(index);
+ __ cmpl(index, array_length);
+ __ j(less, &loop);
+
+ // Live registers:
+ // string_length: Sum of string lengths.
+ // elements: FixedArray of strings.
+ // index: Array length.
+ // array_length: Array length.
+
+ // If array_length is 1, return elements[0], a string.
+ __ cmpl(array_length, Immediate(1));
+ __ j(not_equal, ¬_size_one_array);
+ __ movq(rax, FieldOperand(elements, FixedArray::kHeaderSize));
+ __ jmp(&return_result);
+
+ __ bind(¬_size_one_array);
+
+ // End of array_length live range.
+ result_pos = array_length;
+ array_length = no_reg;
+
+ // Live registers:
+ // string_length: Sum of string lengths.
+ // elements: FixedArray of strings.
+ // index: Array length.
+
+ // Check that the separator is a sequential ASCII string.
+ __ movq(string, separator_operand);
+ __ JumpIfSmi(string, &bailout);
+ __ movq(scratch, FieldOperand(string, HeapObject::kMapOffset));
+ __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
+ __ andb(scratch, Immediate(
+ kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
+ __ cmpb(scratch, Immediate(kStringTag | kAsciiStringTag | kSeqStringTag));
+ __ j(not_equal, &bailout);
+
+ // Live registers:
+ // string_length: Sum of string lengths.
+ // elements: FixedArray of strings.
+ // index: Array length.
+ // string: Separator string.
+
+ // Add (separator length times (array_length - 1)) to string_length.
+ __ SmiToInteger32(scratch,
+ FieldOperand(string, SeqAsciiString::kLengthOffset));
+ __ decl(index);
+ __ imull(scratch, index);
+ __ j(overflow, &bailout);
+ __ addl(string_length, scratch);
+ __ j(overflow, &bailout);
+
+ // Live registers and stack values:
+ // string_length: Total length of result string.
+ // elements: FixedArray of strings.
+ __ AllocateAsciiString(result_pos, string_length, scratch,
+ index, string, &bailout);
+ __ movq(result_operand, result_pos);
+ __ lea(result_pos, FieldOperand(result_pos, SeqAsciiString::kHeaderSize));
+
+ __ movq(string, separator_operand);
+ __ SmiCompare(FieldOperand(string, SeqAsciiString::kLengthOffset),
+ Smi::FromInt(1));
+ __ j(equal, &one_char_separator);
+ __ j(greater, &long_separator);
+
+
+ // Empty separator case:
+ __ Set(index, 0);
+ __ movl(scratch, array_length_operand);
+ __ jmp(&loop_1_condition);
+ // Loop condition: while (index < array_length).
+ __ bind(&loop_1);
+ // Each iteration of the loop concatenates one string to the result.
+ // Live values in registers:
+ // index: which element of the elements array we are adding to the result.
+ // result_pos: the position to which we are currently copying characters.
+ // elements: the FixedArray of strings we are joining.
+ // scratch: array length.
+
+ // Get string = array[index].
+ __ movq(string, FieldOperand(elements, index,
+ times_pointer_size,
+ FixedArray::kHeaderSize));
+ __ SmiToInteger32(string_length,
+ FieldOperand(string, String::kLengthOffset));
+ __ lea(string,
+ FieldOperand(string, SeqAsciiString::kHeaderSize));
+ __ CopyBytes(result_pos, string, string_length);
+ __ incl(index);
+ __ bind(&loop_1_condition);
+ __ cmpl(index, scratch);
+ __ j(less, &loop_1); // Loop while (index < array_length).
+ __ jmp(&done);
+
+ // Generic bailout code used from several places.
+ __ bind(&bailout);
+ __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
+ __ jmp(&return_result);
+
+
+ // One-character separator case
+ __ bind(&one_char_separator);
+ // Get the separator ascii character value.
+ // Register "string" holds the separator.
+ __ movzxbl(scratch, FieldOperand(string, SeqAsciiString::kHeaderSize));
+ __ Set(index, 0);
+ // Jump into the loop after the code that copies the separator, so the first
+ // element is not preceded by a separator
+ __ jmp(&loop_2_entry);
+ // Loop condition: while (index < length).
+ __ bind(&loop_2);
+ // Each iteration of the loop concatenates one string to the result.
+ // Live values in registers:
+ // elements: The FixedArray of strings we are joining.
+ // index: which element of the elements array we are adding to the result.
+ // result_pos: the position to which we are currently copying characters.
+ // scratch: Separator character.
+
+ // Copy the separator character to the result.
+ __ movb(Operand(result_pos, 0), scratch);
+ __ incq(result_pos);
+
+ __ bind(&loop_2_entry);
+ // Get string = array[index].
+ __ movq(string, FieldOperand(elements, index,
+ times_pointer_size,
+ FixedArray::kHeaderSize));
+ __ SmiToInteger32(string_length,
+ FieldOperand(string, String::kLengthOffset));
+ __ lea(string,
+ FieldOperand(string, SeqAsciiString::kHeaderSize));
+ __ CopyBytes(result_pos, string, string_length);
+ __ incl(index);
+ __ cmpl(index, array_length_operand);
+ __ j(less, &loop_2); // End while (index < length).
+ __ jmp(&done);
+
+
+ // Long separator case (separator is more than one character).
+ __ bind(&long_separator);
+
+ // Make elements point to end of elements array, and index
+ // count from -array_length to zero, so we don't need to maintain
+ // a loop limit.
+ __ movl(index, array_length_operand);
+ __ lea(elements, FieldOperand(elements, index, times_pointer_size,
+ FixedArray::kHeaderSize));
+ __ neg(index);
+
+ // Replace separator string with pointer to its first character, and
+ // make scratch be its length.
+ __ movq(string, separator_operand);
+ __ SmiToInteger32(scratch,
+ FieldOperand(string, String::kLengthOffset));
+ __ lea(string,
+ FieldOperand(string, SeqAsciiString::kHeaderSize));
+ __ movq(separator_operand, string);
+
+ // Jump into the loop after the code that copies the separator, so the first
+ // element is not preceded by a separator
+ __ jmp(&loop_3_entry);
+ // Loop condition: while (index < length).
+ __ bind(&loop_3);
+ // Each iteration of the loop concatenates one string to the result.
+ // Live values in registers:
+ // index: which element of the elements array we are adding to the result.
+ // result_pos: the position to which we are currently copying characters.
+ // scratch: Separator length.
+ // separator_operand (rsp[0x10]): Address of first char of separator.
+
+ // Copy the separator to the result.
+ __ movq(string, separator_operand);
+ __ movl(string_length, scratch);
+ __ CopyBytes(result_pos, string, string_length, 2);
+
+ __ bind(&loop_3_entry);
+ // Get string = array[index].
+ __ movq(string, Operand(elements, index, times_pointer_size, 0));
+ __ SmiToInteger32(string_length,
+ FieldOperand(string, String::kLengthOffset));
+ __ lea(string,
+ FieldOperand(string, SeqAsciiString::kHeaderSize));
+ __ CopyBytes(result_pos, string, string_length);
+ __ incq(index);
+ __ j(not_equal, &loop_3); // Loop while (index < 0).
+
+ __ bind(&done);
+ __ movq(rax, result_operand);
+
+ __ bind(&return_result);
+ // Drop temp values from the stack, and restore context register.
+ __ addq(rsp, Immediate(3 * kPointerSize));
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
+ context()->Plug(rax);
}
void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
Handle<String> name = expr->name();
@@ -3041,11 +3589,12 @@
if (expr->is_jsruntime()) {
// Call the JS runtime function using a call IC.
__ Move(rcx, expr->name());
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
- Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, in_loop);
+ Handle<Code> ic =
+ ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
EmitCallIC(ic, RelocInfo::CODE_TARGET);
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
} else {
__ CallRuntime(expr->function(), arg_count);
@@ -3257,11 +3806,15 @@
}
}
// We need a second deoptimization point after loading the value
// in case evaluating the property load my have a side effect.
- PrepareForBailout(expr->increment(), TOS_REG);
+ if (assign_type == VARIABLE) {
+ PrepareForBailout(expr->expression(), TOS_REG);
+ } else {
+ PrepareForBailout(expr->increment(), TOS_REG);
+ }
// Call ToNumber only if operand is not a smi.
NearLabel no_conversion;
Condition is_smi;
is_smi = masm_->CheckSmi(rax);
@@ -3353,11 +3906,13 @@
}
break;
case NAMED_PROPERTY: {
__ Move(rcx, prop->key()->AsLiteral()->handle());
__ pop(rdx);
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
+ Handle<Code> ic = is_strict_mode()
+ ? isolate()->builtins()->StoreIC_Initialize_Strict()
+ : isolate()->builtins()->StoreIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET);
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
context()->PlugTOS();
@@ -3368,11 +3923,13 @@
break;
}
case KEYED_PROPERTY: {
__ pop(rcx);
__ pop(rdx);
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
+ Handle<Code> ic = is_strict_mode()
+ ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
+ : isolate()->builtins()->KeyedStoreIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET);
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
context()->PlugTOS();
@@ -3393,11 +3950,11 @@
if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) {
Comment cmnt(masm_, "Global variable");
__ Move(rcx, proxy->name());
__ movq(rax, GlobalObjectOperand());
- Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
// Use a regular load, not a contextual load, to avoid a reference
// error.
EmitCallIC(ic, RelocInfo::CODE_TARGET);
PrepareForBailout(expr, TOS_REG);
context()->Plug(rax);
@@ -3446,66 +4003,53 @@
{ AccumulatorValueContext context(this);
VisitForTypeofValue(left_unary->expression());
}
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
- if (check->Equals(Heap::number_symbol())) {
- Condition is_smi = masm_->CheckSmi(rax);
- __ j(is_smi, if_true);
+ if (check->Equals(isolate()->heap()->number_symbol())) {
+ __ JumpIfSmi(rax, if_true);
__ movq(rax, FieldOperand(rax, HeapObject::kMapOffset));
__ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
Split(equal, if_true, if_false, fall_through);
- } else if (check->Equals(Heap::string_symbol())) {
- Condition is_smi = masm_->CheckSmi(rax);
- __ j(is_smi, if_false);
+ } else if (check->Equals(isolate()->heap()->string_symbol())) {
+ __ JumpIfSmi(rax, if_false);
// Check for undetectable objects => false.
- __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
+ __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
+ __ j(above_equal, if_false);
__ testb(FieldOperand(rdx, Map::kBitFieldOffset),
Immediate(1 << Map::kIsUndetectable));
- __ j(not_zero, if_false);
- __ CmpInstanceType(rdx, FIRST_NONSTRING_TYPE);
- Split(below, if_true, if_false, fall_through);
- } else if (check->Equals(Heap::boolean_symbol())) {
+ Split(zero, if_true, if_false, fall_through);
+ } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
__ CompareRoot(rax, Heap::kTrueValueRootIndex);
__ j(equal, if_true);
__ CompareRoot(rax, Heap::kFalseValueRootIndex);
Split(equal, if_true, if_false, fall_through);
- } else if (check->Equals(Heap::undefined_symbol())) {
+ } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
__ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
__ j(equal, if_true);
- Condition is_smi = masm_->CheckSmi(rax);
- __ j(is_smi, if_false);
+ __ JumpIfSmi(rax, if_false);
// Check for undetectable objects => true.
__ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
__ testb(FieldOperand(rdx, Map::kBitFieldOffset),
Immediate(1 << Map::kIsUndetectable));
Split(not_zero, if_true, if_false, fall_through);
- } else if (check->Equals(Heap::function_symbol())) {
- Condition is_smi = masm_->CheckSmi(rax);
- __ j(is_smi, if_false);
- __ CmpObjectType(rax, JS_FUNCTION_TYPE, rdx);
- __ j(equal, if_true);
- // Regular expressions => 'function' (they are callable).
- __ CmpInstanceType(rdx, JS_REGEXP_TYPE);
- Split(equal, if_true, if_false, fall_through);
- } else if (check->Equals(Heap::object_symbol())) {
- Condition is_smi = masm_->CheckSmi(rax);
- __ j(is_smi, if_false);
+ } else if (check->Equals(isolate()->heap()->function_symbol())) {
+ __ JumpIfSmi(rax, if_false);
+ __ CmpObjectType(rax, FIRST_FUNCTION_CLASS_TYPE, rdx);
+ Split(above_equal, if_true, if_false, fall_through);
+ } else if (check->Equals(isolate()->heap()->object_symbol())) {
+ __ JumpIfSmi(rax, if_false);
__ CompareRoot(rax, Heap::kNullValueRootIndex);
__ j(equal, if_true);
- // Regular expressions => 'function', not 'object'.
- __ CmpObjectType(rax, JS_REGEXP_TYPE, rdx);
- __ j(equal, if_false);
+ __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rdx);
+ __ j(below, if_false);
+ __ CmpInstanceType(rdx, FIRST_FUNCTION_CLASS_TYPE);
+ __ j(above_equal, if_false);
// Check for undetectable objects => false.
__ testb(FieldOperand(rdx, Map::kBitFieldOffset),
Immediate(1 << Map::kIsUndetectable));
- __ j(not_zero, if_false);
- // Check for JS objects => true.
- __ CmpInstanceType(rdx, FIRST_JS_OBJECT_TYPE);
- __ j(below, if_false);
- __ CmpInstanceType(rdx, LAST_JS_OBJECT_TYPE);
- Split(below_equal, if_true, if_false, fall_through);
+ Split(zero, if_true, if_false, fall_through);
} else {
if (if_false != fall_through) __ jmp(if_false);
}
return true;
@@ -3670,22 +4214,23 @@
void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
ASSERT(mode == RelocInfo::CODE_TARGET ||
mode == RelocInfo::CODE_TARGET_CONTEXT);
+ Counters* counters = isolate()->counters();
switch (ic->kind()) {
case Code::LOAD_IC:
- __ IncrementCounter(&Counters::named_load_full, 1);
+ __ IncrementCounter(counters->named_load_full(), 1);
break;
case Code::KEYED_LOAD_IC:
- __ IncrementCounter(&Counters::keyed_load_full, 1);
+ __ IncrementCounter(counters->keyed_load_full(), 1);
break;
case Code::STORE_IC:
- __ IncrementCounter(&Counters::named_store_full, 1);
+ __ IncrementCounter(counters->named_store_full(), 1);
break;
case Code::KEYED_STORE_IC:
- __ IncrementCounter(&Counters::keyed_store_full, 1);
+ __ IncrementCounter(counters->keyed_store_full(), 1);
default:
break;
}
__ call(ic, mode);
@@ -3713,9 +4258,26 @@
}
}
void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
+ Counters* counters = isolate()->counters();
+ switch (ic->kind()) {
+ case Code::LOAD_IC:
+ __ IncrementCounter(counters->named_load_full(), 1);
+ break;
+ case Code::KEYED_LOAD_IC:
+ __ IncrementCounter(counters->keyed_load_full(), 1);
+ break;
+ case Code::STORE_IC:
+ __ IncrementCounter(counters->named_store_full(), 1);
+ break;
+ case Code::KEYED_STORE_IC:
+ __ IncrementCounter(counters->keyed_store_full(), 1);
+ default:
+ break;
+ }
+
__ call(ic, RelocInfo::CODE_TARGET);
if (patch_site != NULL && patch_site->is_bound()) {
patch_site->EmitPatchInfo();
} else {
__ nop(); // Signals no inlined code.