vendor/v8/src/arm/ic-arm.cc in libv8-3.10.8.0 vs vendor/v8/src/arm/ic-arm.cc in libv8-3.11.8.0

- old
+ new

@@ -772,11 +772,11 @@ // Check that the object is some kind of JSObject. __ CompareObjectType(object, scratch1, scratch2, FIRST_JS_RECEIVER_TYPE); __ b(lt, slow_case); // Check that the key is a positive smi. - __ tst(key, Operand(0x8000001)); + __ tst(key, Operand(0x80000001)); __ b(ne, slow_case); // Load the elements into scratch1 and check its map. Handle<Map> arguments_map(heap->non_strict_arguments_elements_map()); __ ldr(scratch1, FieldMemOperand(object, JSObject::kElementsOffset)); @@ -1247,11 +1247,11 @@ // -- lr : return address // ----------------------------------- // Must return the modified receiver in r0. if (!FLAG_trace_elements_transitions) { Label fail; - ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &fail); + ElementsTransitionGenerator::GenerateSmiToDouble(masm, &fail); __ mov(r0, r2); __ Ret(); __ bind(&fail); } @@ -1460,31 +1460,31 @@ // Transition the array appropriately depending on the value type. __ ldr(r4, FieldMemOperand(value, HeapObject::kMapOffset)); __ CompareRoot(r4, Heap::kHeapNumberMapRootIndex); __ b(ne, &non_double_value); - // Value is a double. Transition FAST_SMI_ONLY_ELEMENTS -> + // Value is a double. Transition FAST_SMI_ELEMENTS -> // FAST_DOUBLE_ELEMENTS and complete the store. - __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS, + __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, FAST_DOUBLE_ELEMENTS, receiver_map, r4, &slow); ASSERT(receiver_map.is(r3)); // Transition code expects map in r3 - ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &slow); + ElementsTransitionGenerator::GenerateSmiToDouble(masm, &slow); __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); __ jmp(&fast_double_without_map_check); __ bind(&non_double_value); - // Value is not a double, FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS - __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS, + // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS + __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, FAST_ELEMENTS, receiver_map, r4, &slow); ASSERT(receiver_map.is(r3)); // Transition code expects map in r3 - ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm); + ElementsTransitionGenerator::GenerateMapChangeElementsTransition(masm); __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); __ jmp(&finish_object_store); __ bind(&transition_double_elements); // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a @@ -1688,16 +1688,16 @@ } #endif // Activate inlined smi code. if (previous_state == UNINITIALIZED) { - PatchInlinedSmiCode(address()); + PatchInlinedSmiCode(address(), ENABLE_INLINED_SMI_CHECK); } } -void PatchInlinedSmiCode(Address address) { +void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) { Address cmp_instruction_address = address + Assembler::kCallTargetAddressOffset; // If the instruction following the call is not a cmp rx, #yyy, nothing // was inlined. @@ -1727,37 +1727,34 @@ Address patch_address = cmp_instruction_address - delta * Instruction::kInstrSize; Instr instr_at_patch = Assembler::instr_at(patch_address); Instr branch_instr = Assembler::instr_at(patch_address + Instruction::kInstrSize); - ASSERT(Assembler::IsCmpRegister(instr_at_patch)); - ASSERT_EQ(Assembler::GetRn(instr_at_patch).code(), - Assembler::GetRm(instr_at_patch).code()); + // This is patching a conditional "jump if not smi/jump if smi" site. + // Enabling by changing from + // cmp rx, rx + // b eq/ne, <target> + // to + // tst rx, #kSmiTagMask + // b ne/eq, <target> + // and vice-versa to be disabled again. + CodePatcher patcher(patch_address, 2); + Register reg = Assembler::GetRn(instr_at_patch); + if (check == ENABLE_INLINED_SMI_CHECK) { + ASSERT(Assembler::IsCmpRegister(instr_at_patch)); + ASSERT_EQ(Assembler::GetRn(instr_at_patch).code(), + Assembler::GetRm(instr_at_patch).code()); + patcher.masm()->tst(reg, Operand(kSmiTagMask)); + } else { + ASSERT(check == DISABLE_INLINED_SMI_CHECK); + ASSERT(Assembler::IsTstImmediate(instr_at_patch)); + patcher.masm()->cmp(reg, reg); + } ASSERT(Assembler::IsBranch(branch_instr)); if (Assembler::GetCondition(branch_instr) == eq) { - // This is patching a "jump if not smi" site to be active. - // Changing - // cmp rx, rx - // b eq, <target> - // to - // tst rx, #kSmiTagMask - // b ne, <target> - CodePatcher patcher(patch_address, 2); - Register reg = Assembler::GetRn(instr_at_patch); - patcher.masm()->tst(reg, Operand(kSmiTagMask)); patcher.EmitCondition(ne); } else { ASSERT(Assembler::GetCondition(branch_instr) == ne); - // This is patching a "jump if smi" site to be active. - // Changing - // cmp rx, rx - // b ne, <target> - // to - // tst rx, #kSmiTagMask - // b eq, <target> - CodePatcher patcher(patch_address, 2); - Register reg = Assembler::GetRn(instr_at_patch); - patcher.masm()->tst(reg, Operand(kSmiTagMask)); patcher.EmitCondition(eq); } }