Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1735)

Side by Side Diff: src/arm/code-stubs-arm.cc

Issue 1144063002: Cleanup interface descriptors to reflect that vectors are part of loads. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fixes for test failures. Created 5 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | src/arm/debug-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_ARM 7 #if V8_TARGET_ARCH_ARM
8 8
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/bootstrapper.h" 10 #include "src/bootstrapper.h"
(...skipping 1471 matching lines...) Expand 10 before | Expand all | Expand 10 after
1482 __ Ret(HasArgsInRegisters() ? 0 : 2); 1482 __ Ret(HasArgsInRegisters() ? 0 : 2);
1483 } 1483 }
1484 } 1484 }
1485 1485
1486 1486
1487 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { 1487 void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
1488 Label miss; 1488 Label miss;
1489 Register receiver = LoadDescriptor::ReceiverRegister(); 1489 Register receiver = LoadDescriptor::ReceiverRegister();
1490 // Ensure that the vector and slot registers won't be clobbered before 1490 // Ensure that the vector and slot registers won't be clobbered before
1491 // calling the miss handler. 1491 // calling the miss handler.
1492 DCHECK(!AreAliased(r4, r5, VectorLoadICDescriptor::VectorRegister(), 1492 DCHECK(!AreAliased(r4, r5, LoadWithVectorDescriptor::VectorRegister(),
1493 VectorLoadICDescriptor::SlotRegister())); 1493 LoadWithVectorDescriptor::SlotRegister()));
1494 1494
1495 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, r4, 1495 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, r4,
1496 r5, &miss); 1496 r5, &miss);
1497 __ bind(&miss); 1497 __ bind(&miss);
1498 PropertyAccessCompiler::TailCallBuiltin( 1498 PropertyAccessCompiler::TailCallBuiltin(
1499 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC)); 1499 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
1500 } 1500 }
1501 1501
1502 1502
1503 void LoadIndexedStringStub::Generate(MacroAssembler* masm) { 1503 void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
1504 // Return address is in lr. 1504 // Return address is in lr.
1505 Label miss; 1505 Label miss;
1506 1506
1507 Register receiver = LoadDescriptor::ReceiverRegister(); 1507 Register receiver = LoadDescriptor::ReceiverRegister();
1508 Register index = LoadDescriptor::NameRegister(); 1508 Register index = LoadDescriptor::NameRegister();
1509 Register scratch = r5; 1509 Register scratch = r5;
1510 Register result = r0; 1510 Register result = r0;
1511 DCHECK(!scratch.is(receiver) && !scratch.is(index)); 1511 DCHECK(!scratch.is(receiver) && !scratch.is(index));
1512 DCHECK(!scratch.is(VectorLoadICDescriptor::VectorRegister()) && 1512 DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()) &&
1513 result.is(VectorLoadICDescriptor::SlotRegister())); 1513 result.is(LoadWithVectorDescriptor::SlotRegister()));
1514 1514
1515 // StringCharAtGenerator doesn't use the result register until it's passed 1515 // StringCharAtGenerator doesn't use the result register until it's passed
1516 // the different miss possibilities. If it did, we would have a conflict 1516 // the different miss possibilities. If it did, we would have a conflict
1517 // when FLAG_vector_ics is true. 1517 // when FLAG_vector_ics is true.
1518 StringCharAtGenerator char_at_generator(receiver, index, scratch, result, 1518 StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
1519 &miss, // When not a string. 1519 &miss, // When not a string.
1520 &miss, // When not a number. 1520 &miss, // When not a number.
1521 &miss, // When index out of range. 1521 &miss, // When index out of range.
1522 STRING_INDEX_IS_ARRAY_INDEX, 1522 STRING_INDEX_IS_ARRAY_INDEX,
1523 RECEIVER_IS_STRING); 1523 RECEIVER_IS_STRING);
(...skipping 1421 matching lines...) Expand 10 before | Expand all | Expand 10 after
2945 // Index is not a smi. 2945 // Index is not a smi.
2946 __ bind(&index_not_smi_); 2946 __ bind(&index_not_smi_);
2947 // If index is a heap number, try converting it to an integer. 2947 // If index is a heap number, try converting it to an integer.
2948 __ CheckMap(index_, 2948 __ CheckMap(index_,
2949 result_, 2949 result_,
2950 Heap::kHeapNumberMapRootIndex, 2950 Heap::kHeapNumberMapRootIndex,
2951 index_not_number_, 2951 index_not_number_,
2952 DONT_DO_SMI_CHECK); 2952 DONT_DO_SMI_CHECK);
2953 call_helper.BeforeCall(masm); 2953 call_helper.BeforeCall(masm);
2954 if (embed_mode == PART_OF_IC_HANDLER) { 2954 if (embed_mode == PART_OF_IC_HANDLER) {
2955 __ Push(VectorLoadICDescriptor::VectorRegister(), 2955 __ Push(LoadWithVectorDescriptor::VectorRegister(),
2956 VectorLoadICDescriptor::SlotRegister(), object_, index_); 2956 LoadWithVectorDescriptor::SlotRegister(), object_, index_);
2957 } else { 2957 } else {
2958 // index_ is consumed by runtime conversion function. 2958 // index_ is consumed by runtime conversion function.
2959 __ Push(object_, index_); 2959 __ Push(object_, index_);
2960 } 2960 }
2961 if (index_flags_ == STRING_INDEX_IS_NUMBER) { 2961 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
2962 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); 2962 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
2963 } else { 2963 } else {
2964 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); 2964 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
2965 // NumberToSmi discards numbers that are not exact integers. 2965 // NumberToSmi discards numbers that are not exact integers.
2966 __ CallRuntime(Runtime::kNumberToSmi, 1); 2966 __ CallRuntime(Runtime::kNumberToSmi, 1);
2967 } 2967 }
2968 // Save the conversion result before the pop instructions below 2968 // Save the conversion result before the pop instructions below
2969 // have a chance to overwrite it. 2969 // have a chance to overwrite it.
2970 __ Move(index_, r0); 2970 __ Move(index_, r0);
2971 if (embed_mode == PART_OF_IC_HANDLER) { 2971 if (embed_mode == PART_OF_IC_HANDLER) {
2972 __ Pop(VectorLoadICDescriptor::VectorRegister(), 2972 __ Pop(LoadWithVectorDescriptor::VectorRegister(),
2973 VectorLoadICDescriptor::SlotRegister(), object_); 2973 LoadWithVectorDescriptor::SlotRegister(), object_);
2974 } else { 2974 } else {
2975 __ pop(object_); 2975 __ pop(object_);
2976 } 2976 }
2977 // Reload the instance type. 2977 // Reload the instance type.
2978 __ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); 2978 __ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
2979 __ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); 2979 __ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
2980 call_helper.AfterCall(masm); 2980 call_helper.AfterCall(masm);
2981 // If index is still not a smi, it must be out of range. 2981 // If index is still not a smi, it must be out of range.
2982 __ JumpIfNotSmi(index_, index_out_of_range_); 2982 __ JumpIfNotSmi(index_, index_out_of_range_);
2983 // Otherwise, return to the fast path. 2983 // Otherwise, return to the fast path.
(...skipping 1371 matching lines...) Expand 10 before | Expand all | Expand 10 after
4355 __ add(r1, r1, Operand(1)); 4355 __ add(r1, r1, Operand(1));
4356 } 4356 }
4357 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); 4357 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
4358 __ mov(r1, Operand(r1, LSL, kPointerSizeLog2)); 4358 __ mov(r1, Operand(r1, LSL, kPointerSizeLog2));
4359 __ add(sp, sp, r1); 4359 __ add(sp, sp, r1);
4360 __ Ret(); 4360 __ Ret();
4361 } 4361 }
4362 4362
4363 4363
4364 void LoadICTrampolineStub::Generate(MacroAssembler* masm) { 4364 void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
4365 EmitLoadTypeFeedbackVector(masm, VectorLoadICDescriptor::VectorRegister()); 4365 EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister());
4366 VectorRawLoadStub stub(isolate(), state()); 4366 LoadICStub stub(isolate(), state());
4367 stub.GenerateForTrampoline(masm); 4367 stub.GenerateForTrampoline(masm);
4368 } 4368 }
4369 4369
4370 4370
4371 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { 4371 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
4372 EmitLoadTypeFeedbackVector(masm, VectorLoadICDescriptor::VectorRegister()); 4372 EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister());
4373 VectorRawKeyedLoadStub stub(isolate()); 4373 KeyedLoadICStub stub(isolate());
4374 stub.GenerateForTrampoline(masm); 4374 stub.GenerateForTrampoline(masm);
4375 } 4375 }
4376 4376
4377 4377
4378 void CallICTrampolineStub::Generate(MacroAssembler* masm) { 4378 void CallICTrampolineStub::Generate(MacroAssembler* masm) {
4379 EmitLoadTypeFeedbackVector(masm, r2); 4379 EmitLoadTypeFeedbackVector(masm, r2);
4380 CallICStub stub(isolate(), state()); 4380 CallICStub stub(isolate(), state());
4381 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); 4381 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
4382 } 4382 }
4383 4383
4384 4384
4385 void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) { 4385 void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) {
4386 EmitLoadTypeFeedbackVector(masm, r2); 4386 EmitLoadTypeFeedbackVector(masm, r2);
4387 CallIC_ArrayStub stub(isolate(), state()); 4387 CallIC_ArrayStub stub(isolate(), state());
4388 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); 4388 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
4389 } 4389 }
4390 4390
4391 4391
4392 void VectorRawLoadStub::Generate(MacroAssembler* masm) { 4392 void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }
4393 GenerateImpl(masm, false);
4394 }
4395 4393
4396 4394
4397 void VectorRawLoadStub::GenerateForTrampoline(MacroAssembler* masm) { 4395 void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
4398 GenerateImpl(masm, true); 4396 GenerateImpl(masm, true);
4399 } 4397 }
4400 4398
4401 4399
4402 static void HandleArrayCases(MacroAssembler* masm, Register receiver, 4400 static void HandleArrayCases(MacroAssembler* masm, Register receiver,
4403 Register key, Register vector, Register slot, 4401 Register key, Register vector, Register slot,
4404 Register feedback, Register receiver_map, 4402 Register feedback, Register receiver_map,
4405 Register scratch1, Register scratch2, 4403 Register scratch1, Register scratch2,
4406 bool is_polymorphic, Label* miss) { 4404 bool is_polymorphic, Label* miss) {
4407 // feedback initially contains the feedback array 4405 // feedback initially contains the feedback array
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
4481 __ cmp(cached_map, receiver_map); 4479 __ cmp(cached_map, receiver_map);
4482 __ b(ne, try_array); 4480 __ b(ne, try_array);
4483 Register handler = feedback; 4481 Register handler = feedback;
4484 __ add(handler, vector, Operand::PointerOffsetFromSmiKey(slot)); 4482 __ add(handler, vector, Operand::PointerOffsetFromSmiKey(slot));
4485 __ ldr(handler, 4483 __ ldr(handler,
4486 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize)); 4484 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize));
4487 __ add(pc, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); 4485 __ add(pc, handler, Operand(Code::kHeaderSize - kHeapObjectTag));
4488 } 4486 }
4489 4487
4490 4488
4491 void VectorRawLoadStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { 4489 void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4492 Register receiver = VectorLoadICDescriptor::ReceiverRegister(); // r1 4490 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // r1
4493 Register name = VectorLoadICDescriptor::NameRegister(); // r2 4491 Register name = LoadWithVectorDescriptor::NameRegister(); // r2
4494 Register vector = VectorLoadICDescriptor::VectorRegister(); // r3 4492 Register vector = LoadWithVectorDescriptor::VectorRegister(); // r3
4495 Register slot = VectorLoadICDescriptor::SlotRegister(); // r0 4493 Register slot = LoadWithVectorDescriptor::SlotRegister(); // r0
4496 Register feedback = r4; 4494 Register feedback = r4;
4497 Register receiver_map = r5; 4495 Register receiver_map = r5;
4498 Register scratch1 = r8; 4496 Register scratch1 = r8;
4499 4497
4500 __ add(feedback, vector, Operand::PointerOffsetFromSmiKey(slot)); 4498 __ add(feedback, vector, Operand::PointerOffsetFromSmiKey(slot));
4501 __ ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); 4499 __ ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
4502 4500
4503 // Try to quickly handle the monomorphic case without knowing for sure 4501 // Try to quickly handle the monomorphic case without knowing for sure
4504 // if we have a weak cell in feedback. We do know it's safe to look 4502 // if we have a weak cell in feedback. We do know it's safe to look
4505 // at WeakCell::kValueOffset. 4503 // at WeakCell::kValueOffset.
(...skipping 22 matching lines...) Expand all
4528 __ bind(&miss); 4526 __ bind(&miss);
4529 LoadIC::GenerateMiss(masm); 4527 LoadIC::GenerateMiss(masm);
4530 4528
4531 4529
4532 __ bind(&load_smi_map); 4530 __ bind(&load_smi_map);
4533 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); 4531 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
4534 __ jmp(&compare_map); 4532 __ jmp(&compare_map);
4535 } 4533 }
4536 4534
4537 4535
4538 void VectorRawKeyedLoadStub::Generate(MacroAssembler* masm) { 4536 void KeyedLoadICStub::Generate(MacroAssembler* masm) {
4539 GenerateImpl(masm, false); 4537 GenerateImpl(masm, false);
4540 } 4538 }
4541 4539
4542 4540
4543 void VectorRawKeyedLoadStub::GenerateForTrampoline(MacroAssembler* masm) { 4541 void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
4544 GenerateImpl(masm, true); 4542 GenerateImpl(masm, true);
4545 } 4543 }
4546 4544
4547 4545
4548 void VectorRawKeyedLoadStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { 4546 void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4549 Register receiver = VectorLoadICDescriptor::ReceiverRegister(); // r1 4547 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // r1
4550 Register key = VectorLoadICDescriptor::NameRegister(); // r2 4548 Register key = LoadWithVectorDescriptor::NameRegister(); // r2
4551 Register vector = VectorLoadICDescriptor::VectorRegister(); // r3 4549 Register vector = LoadWithVectorDescriptor::VectorRegister(); // r3
4552 Register slot = VectorLoadICDescriptor::SlotRegister(); // r0 4550 Register slot = LoadWithVectorDescriptor::SlotRegister(); // r0
4553 Register feedback = r4; 4551 Register feedback = r4;
4554 Register receiver_map = r5; 4552 Register receiver_map = r5;
4555 Register scratch1 = r8; 4553 Register scratch1 = r8;
4556 4554
4557 __ add(feedback, vector, Operand::PointerOffsetFromSmiKey(slot)); 4555 __ add(feedback, vector, Operand::PointerOffsetFromSmiKey(slot));
4558 __ ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); 4556 __ ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
4559 4557
4560 // Try to quickly handle the monomorphic case without knowing for sure 4558 // Try to quickly handle the monomorphic case without knowing for sure
4561 // if we have a weak cell in feedback. We do know it's safe to look 4559 // if we have a weak cell in feedback. We do know it's safe to look
4562 // at WeakCell::kValueOffset. 4560 // at WeakCell::kValueOffset.
(...skipping 744 matching lines...) Expand 10 before | Expand all | Expand 10 after
5307 kStackUnwindSpace, NULL, 5305 kStackUnwindSpace, NULL,
5308 MemOperand(fp, 6 * kPointerSize), NULL); 5306 MemOperand(fp, 6 * kPointerSize), NULL);
5309 } 5307 }
5310 5308
5311 5309
5312 #undef __ 5310 #undef __
5313 5311
5314 } } // namespace v8::internal 5312 } } // namespace v8::internal
5315 5313
5316 #endif // V8_TARGET_ARCH_ARM 5314 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « no previous file | src/arm/debug-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698