vendor/v8/src/x64/builtins-x64.cc in mustang-0.0.1 vs vendor/v8/src/x64/builtins-x64.cc in mustang-0.1.0
- old
+ new
@@ -1,6 +1,6 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
@@ -67,11 +67,11 @@
}
// JumpToExternalReference expects rax to contain the number of arguments
// including the receiver and the extra arguments.
__ addq(rax, Immediate(num_extra_args + 1));
- __ JumpToExternalReference(ExternalReference(id), 1);
+ __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1);
}
void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
// ----------- S t a t e -------------
@@ -96,11 +96,11 @@
// rax: number of arguments
__ bind(&non_function_call);
// Set expected number of arguments to zero (not changing rax).
__ movq(rbx, Immediate(0));
__ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
- __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
+ __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET);
}
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
@@ -125,11 +125,11 @@
if (FLAG_inline_new) {
Label undo_allocation;
#ifdef ENABLE_DEBUGGER_SUPPORT
ExternalReference debug_step_in_fp =
- ExternalReference::debug_step_in_fp_address();
+ ExternalReference::debug_step_in_fp_address(masm->isolate());
__ movq(kScratchRegister, debug_step_in_fp);
__ cmpq(Operand(kScratchRegister, 0), Immediate(0));
__ j(not_equal, &rt_call);
#endif
@@ -337,12 +337,12 @@
__ j(greater_equal, &loop);
// Call the function.
if (is_api_function) {
__ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
- Handle<Code> code = Handle<Code>(
- Builtins::builtin(Builtins::HandleApiCallConstruct));
+ Handle<Code> code =
+ masm->isolate()->builtins()->HandleApiCallConstruct();
ParameterCount expected(0);
__ InvokeCode(code, expected, expected,
RelocInfo::CODE_TARGET, CALL_FUNCTION);
} else {
ParameterCount actual(rax);
@@ -377,11 +377,12 @@
// Remove caller arguments from the stack and return.
__ pop(rcx);
SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
__ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
__ push(rcx);
- __ IncrementCounter(&Counters::constructed_objects, 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->constructed_objects(), 1);
__ ret(0);
}
void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
@@ -490,11 +491,11 @@
__ j(not_equal, &loop);
// Invoke the code.
if (is_construct) {
// Expects rdi to hold function pointer.
- __ Call(Handle<Code>(Builtins::builtin(Builtins::JSConstructCall)),
+ __ Call(masm->isolate()->builtins()->JSConstructCall(),
RelocInfo::CODE_TARGET);
} else {
ParameterCount actual(rax);
// Function must be in rdi.
__ InvokeFunction(rdi, actual, CALL_FUNCTION);
@@ -628,11 +629,11 @@
// 1. Make sure we have at least one argument.
{ Label done;
__ testq(rax, rax);
__ j(not_zero, &done);
__ pop(rbx);
- __ Push(Factory::undefined_value());
+ __ Push(FACTORY->undefined_value());
__ push(rbx);
__ incq(rax);
__ bind(&done);
}
@@ -731,11 +732,11 @@
{ Label function;
__ testq(rdi, rdi);
__ j(not_zero, &function);
__ Set(rbx, 0);
__ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
- __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
+ __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET);
__ bind(&function);
}
// 5b. Get the code to call from the function and check that the number of
@@ -746,11 +747,11 @@
FieldOperand(rdx,
SharedFunctionInfo::kFormalParameterCountOffset));
__ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
__ cmpq(rax, rbx);
__ j(not_equal,
- Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
+ masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET);
ParameterCount expected(0);
__ InvokeCode(rdx, expected, expected, JUMP_FUNCTION);
}
@@ -861,11 +862,12 @@
__ jmp(&entry);
__ bind(&loop);
__ movq(rdx, Operand(rbp, kArgumentsOffset)); // load arguments
// Use inline caching to speed up access to arguments.
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+ Handle<Code> ic =
+ masm->isolate()->builtins()->KeyedLoadIC_Initialize();
__ Call(ic, RelocInfo::CODE_TARGET);
// It is important that we do not have a test instruction after the
// call. A test instruction after the call is used to indicate that
// we have generated an inline version of the keyed load. In this
// case, we know that we are not generating a test instruction next.
@@ -933,19 +935,19 @@
// result: JSObject
// scratch1: initial map
// scratch2: start of next object
__ movq(FieldOperand(result, JSObject::kMapOffset), scratch1);
__ Move(FieldOperand(result, JSArray::kPropertiesOffset),
- Factory::empty_fixed_array());
+ FACTORY->empty_fixed_array());
// Field JSArray::kElementsOffset is initialized later.
__ Move(FieldOperand(result, JSArray::kLengthOffset), Smi::FromInt(0));
// If no storage is requested for the elements array just set the empty
// fixed array.
if (initial_capacity == 0) {
__ Move(FieldOperand(result, JSArray::kElementsOffset),
- Factory::empty_fixed_array());
+ FACTORY->empty_fixed_array());
return;
}
// Calculate the location of the elements array and set elements array member
// of the JSArray.
@@ -958,19 +960,19 @@
// stored as a smi.
// result: JSObject
// scratch1: elements array
// scratch2: start of next object
__ Move(FieldOperand(scratch1, HeapObject::kMapOffset),
- Factory::fixed_array_map());
+ FACTORY->fixed_array_map());
__ Move(FieldOperand(scratch1, FixedArray::kLengthOffset),
Smi::FromInt(initial_capacity));
// Fill the FixedArray with the hole value. Inline the code if short.
// Reconsider loop unfolding if kPreallocatedArrayElements gets changed.
static const int kLoopUnfoldLimit = 4;
ASSERT(kPreallocatedArrayElements <= kLoopUnfoldLimit);
- __ Move(scratch3, Factory::the_hole_value());
+ __ Move(scratch3, FACTORY->the_hole_value());
if (initial_capacity <= kLoopUnfoldLimit) {
// Use a scratch register here to have only one reloc info when unfolding
// the loop.
for (int i = 0; i < initial_capacity; i++) {
__ movq(FieldOperand(scratch1,
@@ -1050,11 +1052,11 @@
// elements_array: initial map
// elements_array_end: start of next object
// array_size: size of array (smi)
__ bind(&allocated);
__ movq(FieldOperand(result, JSObject::kMapOffset), elements_array);
- __ Move(elements_array, Factory::empty_fixed_array());
+ __ Move(elements_array, FACTORY->empty_fixed_array());
__ movq(FieldOperand(result, JSArray::kPropertiesOffset), elements_array);
// Field JSArray::kElementsOffset is initialized later.
__ movq(FieldOperand(result, JSArray::kLengthOffset), array_size);
// Calculate the location of the elements array and set elements array member
@@ -1069,11 +1071,11 @@
// result: JSObject
// elements_array: elements array
// elements_array_end: start of next object
// array_size: size of array (smi)
__ Move(FieldOperand(elements_array, JSObject::kMapOffset),
- Factory::fixed_array_map());
+ FACTORY->fixed_array_map());
Label not_empty_2, fill_array;
__ SmiTest(array_size);
__ j(not_zero, ¬_empty_2);
// Length of the FixedArray is the number of pre-allocated elements even
// though the actual JSArray has length 0.
@@ -1090,11 +1092,11 @@
// elements_array: elements array
// elements_array_end: start of next object
__ bind(&fill_array);
if (fill_with_hole) {
Label loop, entry;
- __ Move(scratch, Factory::the_hole_value());
+ __ Move(scratch, FACTORY->the_hole_value());
__ lea(elements_array, Operand(elements_array,
FixedArray::kHeaderSize - kHeapObjectTag));
__ jmp(&entry);
__ bind(&loop);
__ movq(Operand(elements_array, 0), scratch);
@@ -1135,11 +1137,12 @@
rcx,
rdx,
r8,
kPreallocatedArrayElements,
call_generic_code);
- __ IncrementCounter(&Counters::array_function_native, 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->array_function_native(), 1);
__ movq(rax, rbx);
__ ret(kPointerSize);
// Check for one argument. Bail out if argument is not smi or if it is
// negative.
@@ -1166,11 +1169,11 @@
rcx,
r8,
r9,
true,
call_generic_code);
- __ IncrementCounter(&Counters::array_function_native, 1);
+ __ IncrementCounter(counters->array_function_native(), 1);
__ movq(rax, rbx);
__ ret(2 * kPointerSize);
// Handle construction of an array from a list of arguments.
__ bind(&argc_two_or_more);
@@ -1188,11 +1191,11 @@
rcx,
r8,
r9,
false,
call_generic_code);
- __ IncrementCounter(&Counters::array_function_native, 1);
+ __ IncrementCounter(counters->array_function_native(), 1);
// rax: argc
// rbx: JSArray
// rcx: elements_array
// r8: elements_array_end (untagged)
@@ -1246,11 +1249,11 @@
// Get the Array function.
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi);
if (FLAG_debug_code) {
- // Initial map for the builtin Array function shoud be a map.
+ // Initial map for the builtin Array functions should be maps.
__ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
ASSERT(kSmiTag == 0);
Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
__ Check(not_smi, "Unexpected initial map for Array function");
@@ -1262,12 +1265,12 @@
ArrayNativeCode(masm, &generic_array_code);
// Jump to the generic array code in case the specialized code cannot handle
// the construction.
__ bind(&generic_array_code);
- Code* code = Builtins::builtin(Builtins::ArrayCodeGeneric);
- Handle<Code> array_code(code);
+ Handle<Code> array_code =
+ masm->isolate()->builtins()->ArrayCodeGeneric();
__ Jump(array_code, RelocInfo::CODE_TARGET);
}
void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
@@ -1278,15 +1281,12 @@
// -- rsp[8] : last argument
// -----------------------------------
Label generic_constructor;
if (FLAG_debug_code) {
- // The array construct code is only set for the builtin Array function which
- // does always have a map.
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rbx);
- __ cmpq(rdi, rbx);
- __ Check(equal, "Unexpected Array function");
+ // The array construct code is only set for the builtin and internal
+ // Array functions which always have a map.
// Initial map for the builtin Array function should be a map.
__ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
ASSERT(kSmiTag == 0);
Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
@@ -1299,12 +1299,12 @@
ArrayNativeCode(masm, &generic_constructor);
// Jump to the generic construct code in case the specialized code cannot
// handle the construction.
__ bind(&generic_constructor);
- Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
- Handle<Code> generic_construct_stub(code);
+ Handle<Code> generic_construct_stub =
+ masm->isolate()->builtins()->JSConstructStubGeneric();
__ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
}
void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
@@ -1354,10 +1354,11 @@
// -- rbx : expected number of arguments
// -- rdx : code entry to call
// -----------------------------------
Label invoke, dont_adapt_arguments;
- __ IncrementCounter(&Counters::arguments_adaptors, 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->arguments_adaptors(), 1);
Label enough, too_few;
__ cmpq(rax, rbx);
__ j(less, &too_few);
__ cmpq(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));