Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(18)

Side by Side Diff: src/arm/lithium-codegen-arm.cc

Issue 149413010: A64: Synchronize with r16024. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/a64
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/lithium-codegen-arm.h ('k') | src/arm/macro-assembler-arm.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
84 code->set_stack_slots(GetStackSlotCount()); 84 code->set_stack_slots(GetStackSlotCount());
85 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); 85 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
86 if (FLAG_weak_embedded_maps_in_optimized_code) { 86 if (FLAG_weak_embedded_maps_in_optimized_code) {
87 RegisterDependentCodeForEmbeddedMaps(code); 87 RegisterDependentCodeForEmbeddedMaps(code);
88 } 88 }
89 PopulateDeoptimizationData(code); 89 PopulateDeoptimizationData(code);
90 info()->CommitDependencies(code); 90 info()->CommitDependencies(code);
91 } 91 }
92 92
93 93
94 void LCodeGen::Abort(const char* reason) { 94 void LCodeGen::Abort(BailoutReason reason) {
95 info()->set_bailout_reason(reason); 95 info()->set_bailout_reason(reason);
96 status_ = ABORTED; 96 status_ = ABORTED;
97 } 97 }
98 98
99 99
100 void LCodeGen::Comment(const char* format, ...) { 100 void LCodeGen::Comment(const char* format, ...) {
101 if (!FLAG_code_comments) return; 101 if (!FLAG_code_comments) return;
102 char buffer[4 * KB]; 102 char buffer[4 * KB];
103 StringBuilder builder(buffer, ARRAY_SIZE(buffer)); 103 StringBuilder builder(buffer, ARRAY_SIZE(buffer));
104 va_list arguments; 104 va_list arguments;
(...skipping 222 matching lines...) Expand 10 before | Expand all | Expand 10 after
327 bool LCodeGen::GenerateDeoptJumpTable() { 327 bool LCodeGen::GenerateDeoptJumpTable() {
328 // Check that the jump table is accessible from everywhere in the function 328 // Check that the jump table is accessible from everywhere in the function
329 // code, i.e. that offsets to the table can be encoded in the 24bit signed 329 // code, i.e. that offsets to the table can be encoded in the 24bit signed
330 // immediate of a branch instruction. 330 // immediate of a branch instruction.
331 // To simplify we consider the code size from the first instruction to the 331 // To simplify we consider the code size from the first instruction to the
332 // end of the jump table. We also don't consider the pc load delta. 332 // end of the jump table. We also don't consider the pc load delta.
333 // Each entry in the jump table generates one instruction and inlines one 333 // Each entry in the jump table generates one instruction and inlines one
334 // 32bit data after it. 334 // 32bit data after it.
335 if (!is_int24((masm()->pc_offset() / Assembler::kInstrSize) + 335 if (!is_int24((masm()->pc_offset() / Assembler::kInstrSize) +
336 deopt_jump_table_.length() * 7)) { 336 deopt_jump_table_.length() * 7)) {
337 Abort("Generated code is too large"); 337 Abort(kGeneratedCodeIsTooLarge);
338 } 338 }
339 339
340 if (deopt_jump_table_.length() > 0) { 340 if (deopt_jump_table_.length() > 0) {
341 Comment(";;; -------------------- Jump table --------------------"); 341 Comment(";;; -------------------- Jump table --------------------");
342 } 342 }
343 Label table_start; 343 Label table_start;
344 __ bind(&table_start); 344 __ bind(&table_start);
345 Label needs_frame; 345 Label needs_frame;
346 for (int i = 0; i < deopt_jump_table_.length(); i++) { 346 for (int i = 0; i < deopt_jump_table_.length(); i++) {
347 __ bind(&deopt_jump_table_[i].label); 347 __ bind(&deopt_jump_table_[i].label);
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
416 return ToRegister(op->index()); 416 return ToRegister(op->index());
417 } else if (op->IsConstantOperand()) { 417 } else if (op->IsConstantOperand()) {
418 LConstantOperand* const_op = LConstantOperand::cast(op); 418 LConstantOperand* const_op = LConstantOperand::cast(op);
419 HConstant* constant = chunk_->LookupConstant(const_op); 419 HConstant* constant = chunk_->LookupConstant(const_op);
420 Handle<Object> literal = constant->handle(); 420 Handle<Object> literal = constant->handle();
421 Representation r = chunk_->LookupLiteralRepresentation(const_op); 421 Representation r = chunk_->LookupLiteralRepresentation(const_op);
422 if (r.IsInteger32()) { 422 if (r.IsInteger32()) {
423 ASSERT(literal->IsNumber()); 423 ASSERT(literal->IsNumber());
424 __ mov(scratch, Operand(static_cast<int32_t>(literal->Number()))); 424 __ mov(scratch, Operand(static_cast<int32_t>(literal->Number())));
425 } else if (r.IsDouble()) { 425 } else if (r.IsDouble()) {
426 Abort("EmitLoadRegister: Unsupported double immediate."); 426 Abort(kEmitLoadRegisterUnsupportedDoubleImmediate);
427 } else { 427 } else {
428 ASSERT(r.IsTagged()); 428 ASSERT(r.IsTagged());
429 __ LoadObject(scratch, literal); 429 __ LoadObject(scratch, literal);
430 } 430 }
431 return scratch; 431 return scratch;
432 } else if (op->IsStackSlot() || op->IsArgument()) { 432 } else if (op->IsStackSlot() || op->IsArgument()) {
433 __ ldr(scratch, ToMemOperand(op)); 433 __ ldr(scratch, ToMemOperand(op));
434 return scratch; 434 return scratch;
435 } 435 }
436 UNREACHABLE(); 436 UNREACHABLE();
(...skipping 17 matching lines...) Expand all
454 HConstant* constant = chunk_->LookupConstant(const_op); 454 HConstant* constant = chunk_->LookupConstant(const_op);
455 Handle<Object> literal = constant->handle(); 455 Handle<Object> literal = constant->handle();
456 Representation r = chunk_->LookupLiteralRepresentation(const_op); 456 Representation r = chunk_->LookupLiteralRepresentation(const_op);
457 if (r.IsInteger32()) { 457 if (r.IsInteger32()) {
458 ASSERT(literal->IsNumber()); 458 ASSERT(literal->IsNumber());
459 __ mov(ip, Operand(static_cast<int32_t>(literal->Number()))); 459 __ mov(ip, Operand(static_cast<int32_t>(literal->Number())));
460 __ vmov(flt_scratch, ip); 460 __ vmov(flt_scratch, ip);
461 __ vcvt_f64_s32(dbl_scratch, flt_scratch); 461 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
462 return dbl_scratch; 462 return dbl_scratch;
463 } else if (r.IsDouble()) { 463 } else if (r.IsDouble()) {
464 Abort("unsupported double immediate"); 464 Abort(kUnsupportedDoubleImmediate);
465 } else if (r.IsTagged()) { 465 } else if (r.IsTagged()) {
466 Abort("unsupported tagged immediate"); 466 Abort(kUnsupportedTaggedImmediate);
467 } 467 }
468 } else if (op->IsStackSlot() || op->IsArgument()) { 468 } else if (op->IsStackSlot() || op->IsArgument()) {
469 // TODO(regis): Why is vldr not taking a MemOperand? 469 // TODO(regis): Why is vldr not taking a MemOperand?
470 // __ vldr(dbl_scratch, ToMemOperand(op)); 470 // __ vldr(dbl_scratch, ToMemOperand(op));
471 MemOperand mem_op = ToMemOperand(op); 471 MemOperand mem_op = ToMemOperand(op);
472 __ vldr(dbl_scratch, mem_op.rn(), mem_op.offset()); 472 __ vldr(dbl_scratch, mem_op.rn(), mem_op.offset());
473 return dbl_scratch; 473 return dbl_scratch;
474 } 474 }
475 UNREACHABLE(); 475 UNREACHABLE();
476 return dbl_scratch; 476 return dbl_scratch;
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
527 LConstantOperand* const_op = LConstantOperand::cast(op); 527 LConstantOperand* const_op = LConstantOperand::cast(op);
528 HConstant* constant = chunk()->LookupConstant(const_op); 528 HConstant* constant = chunk()->LookupConstant(const_op);
529 Representation r = chunk_->LookupLiteralRepresentation(const_op); 529 Representation r = chunk_->LookupLiteralRepresentation(const_op);
530 if (r.IsSmi()) { 530 if (r.IsSmi()) {
531 ASSERT(constant->HasSmiValue()); 531 ASSERT(constant->HasSmiValue());
532 return Operand(Smi::FromInt(constant->Integer32Value())); 532 return Operand(Smi::FromInt(constant->Integer32Value()));
533 } else if (r.IsInteger32()) { 533 } else if (r.IsInteger32()) {
534 ASSERT(constant->HasInteger32Value()); 534 ASSERT(constant->HasInteger32Value());
535 return Operand(constant->Integer32Value()); 535 return Operand(constant->Integer32Value());
536 } else if (r.IsDouble()) { 536 } else if (r.IsDouble()) {
537 Abort("ToOperand Unsupported double immediate."); 537 Abort(kToOperandUnsupportedDoubleImmediate);
538 } 538 }
539 ASSERT(r.IsTagged()); 539 ASSERT(r.IsTagged());
540 return Operand(constant->handle()); 540 return Operand(constant->handle());
541 } else if (op->IsRegister()) { 541 } else if (op->IsRegister()) {
542 return Operand(ToRegister(op)); 542 return Operand(ToRegister(op));
543 } else if (op->IsDoubleRegister()) { 543 } else if (op->IsDoubleRegister()) {
544 Abort("ToOperand IsDoubleRegister unimplemented"); 544 Abort(kToOperandIsDoubleRegisterUnimplemented);
545 return Operand::Zero(); 545 return Operand::Zero();
546 } 546 }
547 // Stack slots not implemented, use ToMemOperand instead. 547 // Stack slots not implemented, use ToMemOperand instead.
548 UNREACHABLE(); 548 UNREACHABLE();
549 return Operand::Zero(); 549 return Operand::Zero();
550 } 550 }
551 551
552 552
553 MemOperand LCodeGen::ToMemOperand(LOperand* op) const { 553 MemOperand LCodeGen::ToMemOperand(LOperand* op) const {
554 ASSERT(!op->IsRegister()); 554 ASSERT(!op->IsRegister());
(...skipping 210 matching lines...) Expand 10 before | Expand all | Expand 10 after
765 void LCodeGen::DeoptimizeIf(Condition cc, 765 void LCodeGen::DeoptimizeIf(Condition cc,
766 LEnvironment* environment, 766 LEnvironment* environment,
767 Deoptimizer::BailoutType bailout_type) { 767 Deoptimizer::BailoutType bailout_type) {
768 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); 768 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
769 ASSERT(environment->HasBeenRegistered()); 769 ASSERT(environment->HasBeenRegistered());
770 int id = environment->deoptimization_index(); 770 int id = environment->deoptimization_index();
771 ASSERT(info()->IsOptimizing() || info()->IsStub()); 771 ASSERT(info()->IsOptimizing() || info()->IsStub());
772 Address entry = 772 Address entry =
773 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); 773 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
774 if (entry == NULL) { 774 if (entry == NULL) {
775 Abort("bailout was not prepared"); 775 Abort(kBailoutWasNotPrepared);
776 return; 776 return;
777 } 777 }
778 778
779 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on ARM. 779 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on ARM.
780 if (FLAG_deopt_every_n_times == 1 && 780 if (FLAG_deopt_every_n_times == 1 &&
781 !info()->IsStub() && 781 !info()->IsStub() &&
782 info()->opt_count() == id) { 782 info()->opt_count() == id) {
783 ASSERT(frame_is_built_); 783 ASSERT(frame_is_built_);
784 __ Call(entry, RelocInfo::RUNTIME_ENTRY); 784 __ Call(entry, RelocInfo::RUNTIME_ENTRY);
785 return; 785 return;
(...skipping 951 matching lines...) Expand 10 before | Expand all | Expand 10 after
1737 __ tst(left, Operand(0x80000000)); 1737 __ tst(left, Operand(0x80000000));
1738 DeoptimizeIf(ne, instr->environment()); 1738 DeoptimizeIf(ne, instr->environment());
1739 } 1739 }
1740 __ Move(result, left); 1740 __ Move(result, left);
1741 } 1741 }
1742 break; 1742 break;
1743 case Token::SHL: 1743 case Token::SHL:
1744 if (shift_count != 0) { 1744 if (shift_count != 0) {
1745 if (instr->hydrogen_value()->representation().IsSmi() && 1745 if (instr->hydrogen_value()->representation().IsSmi() &&
1746 instr->can_deopt()) { 1746 instr->can_deopt()) {
1747 __ mov(result, Operand(left, LSL, shift_count - 1)); 1747 if (shift_count != 1) {
1748 __ SmiTag(result, result, SetCC); 1748 __ mov(result, Operand(left, LSL, shift_count - 1));
1749 __ SmiTag(result, result, SetCC);
1750 } else {
1751 __ SmiTag(result, left, SetCC);
1752 }
1749 DeoptimizeIf(vs, instr->environment()); 1753 DeoptimizeIf(vs, instr->environment());
1750 } else { 1754 } else {
1751 __ mov(result, Operand(left, LSL, shift_count)); 1755 __ mov(result, Operand(left, LSL, shift_count));
1752 } 1756 }
1753 } else { 1757 } else {
1754 __ Move(result, left); 1758 __ Move(result, left);
1755 } 1759 }
1756 break; 1760 break;
1757 default: 1761 default:
1758 UNREACHABLE(); 1762 UNREACHABLE();
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
1815 1819
1816 1820
1817 void LCodeGen::DoConstantD(LConstantD* instr) { 1821 void LCodeGen::DoConstantD(LConstantD* instr) {
1818 ASSERT(instr->result()->IsDoubleRegister()); 1822 ASSERT(instr->result()->IsDoubleRegister());
1819 DwVfpRegister result = ToDoubleRegister(instr->result()); 1823 DwVfpRegister result = ToDoubleRegister(instr->result());
1820 double v = instr->value(); 1824 double v = instr->value();
1821 __ Vmov(result, v, scratch0()); 1825 __ Vmov(result, v, scratch0());
1822 } 1826 }
1823 1827
1824 1828
1829 void LCodeGen::DoConstantE(LConstantE* instr) {
1830 __ mov(ToRegister(instr->result()), Operand(instr->value()));
1831 }
1832
1833
1825 void LCodeGen::DoConstantT(LConstantT* instr) { 1834 void LCodeGen::DoConstantT(LConstantT* instr) {
1826 Handle<Object> value = instr->value(); 1835 Handle<Object> value = instr->value();
1827 AllowDeferredHandleDereference smi_check; 1836 AllowDeferredHandleDereference smi_check;
1828 __ LoadObject(ToRegister(instr->result()), value); 1837 __ LoadObject(ToRegister(instr->result()), value);
1829 } 1838 }
1830 1839
1831 1840
1832 void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) { 1841 void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) {
1833 Register result = ToRegister(instr->result()); 1842 Register result = ToRegister(instr->result());
1834 Register map = ToRegister(instr->value()); 1843 Register map = ToRegister(instr->value());
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
1920 1929
1921 if (FLAG_debug_code) { 1930 if (FLAG_debug_code) {
1922 __ ldr(ip, FieldMemOperand(string, HeapObject::kMapOffset)); 1931 __ ldr(ip, FieldMemOperand(string, HeapObject::kMapOffset));
1923 __ ldrb(ip, FieldMemOperand(ip, Map::kInstanceTypeOffset)); 1932 __ ldrb(ip, FieldMemOperand(ip, Map::kInstanceTypeOffset));
1924 1933
1925 __ and_(ip, ip, Operand(kStringRepresentationMask | kStringEncodingMask)); 1934 __ and_(ip, ip, Operand(kStringRepresentationMask | kStringEncodingMask));
1926 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; 1935 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
1927 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; 1936 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
1928 __ cmp(ip, Operand(encoding == String::ONE_BYTE_ENCODING 1937 __ cmp(ip, Operand(encoding == String::ONE_BYTE_ENCODING
1929 ? one_byte_seq_type : two_byte_seq_type)); 1938 ? one_byte_seq_type : two_byte_seq_type));
1930 __ Check(eq, "Unexpected string type"); 1939 __ Check(eq, kUnexpectedStringType);
1931 } 1940 }
1932 1941
1933 __ add(ip, 1942 __ add(ip,
1934 string, 1943 string,
1935 Operand(SeqString::kHeaderSize - kHeapObjectTag)); 1944 Operand(SeqString::kHeaderSize - kHeapObjectTag));
1936 if (encoding == String::ONE_BYTE_ENCODING) { 1945 if (encoding == String::ONE_BYTE_ENCODING) {
1937 __ strb(value, MemOperand(ip, index)); 1946 __ strb(value, MemOperand(ip, index));
1938 } else { 1947 } else {
1939 // MemOperand with ip as the base register is not allowed for strh, so 1948 // MemOperand with ip as the base register is not allowed for strh, so
1940 // we do the address calculation explicitly. 1949 // we do the address calculation explicitly.
(...skipping 983 matching lines...) Expand 10 before | Expand all | Expand 10 after
2924 ASSERT(ToRegister(instr->value()).is(r0)); 2933 ASSERT(ToRegister(instr->value()).is(r0));
2925 2934
2926 __ mov(r2, Operand(instr->name())); 2935 __ mov(r2, Operand(instr->name()));
2927 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) 2936 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
2928 ? isolate()->builtins()->StoreIC_Initialize_Strict() 2937 ? isolate()->builtins()->StoreIC_Initialize_Strict()
2929 : isolate()->builtins()->StoreIC_Initialize(); 2938 : isolate()->builtins()->StoreIC_Initialize();
2930 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr); 2939 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2931 } 2940 }
2932 2941
2933 2942
2934 void LCodeGen::DoLinkObjectInList(LLinkObjectInList* instr) {
2935 Register object = ToRegister(instr->object());
2936 ExternalReference sites_list_address = instr->GetReference(isolate());
2937
2938 __ mov(ip, Operand(sites_list_address));
2939 __ ldr(ip, MemOperand(ip));
2940 __ str(ip, FieldMemOperand(object,
2941 instr->hydrogen()->store_field().offset()));
2942 __ mov(ip, Operand(sites_list_address));
2943 __ str(object, MemOperand(ip));
2944 }
2945
2946
2947 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { 2943 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
2948 Register context = ToRegister(instr->context()); 2944 Register context = ToRegister(instr->context());
2949 Register result = ToRegister(instr->result()); 2945 Register result = ToRegister(instr->result());
2950 __ ldr(result, ContextOperand(context, instr->slot_index())); 2946 __ ldr(result, ContextOperand(context, instr->slot_index()));
2951 if (instr->hydrogen()->RequiresHoleCheck()) { 2947 if (instr->hydrogen()->RequiresHoleCheck()) {
2952 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 2948 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2953 __ cmp(result, ip); 2949 __ cmp(result, ip);
2954 if (instr->hydrogen()->DeoptimizesOnHole()) { 2950 if (instr->hydrogen()->DeoptimizesOnHole()) {
2955 DeoptimizeIf(eq, instr->environment()); 2951 DeoptimizeIf(eq, instr->environment());
2956 } else { 2952 } else {
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
2995 } 2991 }
2996 2992
2997 __ bind(&skip_assignment); 2993 __ bind(&skip_assignment);
2998 } 2994 }
2999 2995
3000 2996
3001 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { 2997 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
3002 HObjectAccess access = instr->hydrogen()->access(); 2998 HObjectAccess access = instr->hydrogen()->access();
3003 int offset = access.offset(); 2999 int offset = access.offset();
3004 Register object = ToRegister(instr->object()); 3000 Register object = ToRegister(instr->object());
3001
3002 if (access.IsExternalMemory()) {
3003 Register result = ToRegister(instr->result());
3004 __ ldr(result, MemOperand(object, offset));
3005 return;
3006 }
3007
3005 if (instr->hydrogen()->representation().IsDouble()) { 3008 if (instr->hydrogen()->representation().IsDouble()) {
3006 DwVfpRegister result = ToDoubleRegister(instr->result()); 3009 DwVfpRegister result = ToDoubleRegister(instr->result());
3007 __ vldr(result, FieldMemOperand(object, offset)); 3010 __ vldr(result, FieldMemOperand(object, offset));
3008 return; 3011 return;
3009 } 3012 }
3010 3013
3011 Register result = ToRegister(instr->result()); 3014 Register result = ToRegister(instr->result());
3012 if (access.IsInobject()) { 3015 if (access.IsInobject()) {
3013 __ ldr(result, FieldMemOperand(object, offset)); 3016 __ ldr(result, FieldMemOperand(object, offset));
3014 } else { 3017 } else {
(...skipping 175 matching lines...) Expand 10 before | Expand all | Expand 10 after
3190 3193
3191 void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) { 3194 void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) {
3192 Register external_pointer = ToRegister(instr->elements()); 3195 Register external_pointer = ToRegister(instr->elements());
3193 Register key = no_reg; 3196 Register key = no_reg;
3194 ElementsKind elements_kind = instr->elements_kind(); 3197 ElementsKind elements_kind = instr->elements_kind();
3195 bool key_is_constant = instr->key()->IsConstantOperand(); 3198 bool key_is_constant = instr->key()->IsConstantOperand();
3196 int constant_key = 0; 3199 int constant_key = 0;
3197 if (key_is_constant) { 3200 if (key_is_constant) {
3198 constant_key = ToInteger32(LConstantOperand::cast(instr->key())); 3201 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
3199 if (constant_key & 0xF0000000) { 3202 if (constant_key & 0xF0000000) {
3200 Abort("array index constant value too big."); 3203 Abort(kArrayIndexConstantValueTooBig);
3201 } 3204 }
3202 } else { 3205 } else {
3203 key = ToRegister(instr->key()); 3206 key = ToRegister(instr->key());
3204 } 3207 }
3205 int element_size_shift = ElementsKindToShiftSize(elements_kind); 3208 int element_size_shift = ElementsKindToShiftSize(elements_kind);
3206 int shift_size = (instr->hydrogen()->key()->representation().IsSmi()) 3209 int shift_size = (instr->hydrogen()->key()->representation().IsSmi())
3207 ? (element_size_shift - kSmiTagSize) : element_size_shift; 3210 ? (element_size_shift - kSmiTagSize) : element_size_shift;
3208 int additional_offset = instr->additional_index() << element_size_shift; 3211 int additional_offset = instr->additional_index() << element_size_shift;
3209 3212
3210 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS || 3213 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
3274 DwVfpRegister result = ToDoubleRegister(instr->result()); 3277 DwVfpRegister result = ToDoubleRegister(instr->result());
3275 Register scratch = scratch0(); 3278 Register scratch = scratch0();
3276 3279
3277 int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS); 3280 int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS);
3278 int shift_size = (instr->hydrogen()->key()->representation().IsSmi()) 3281 int shift_size = (instr->hydrogen()->key()->representation().IsSmi())
3279 ? (element_size_shift - kSmiTagSize) : element_size_shift; 3282 ? (element_size_shift - kSmiTagSize) : element_size_shift;
3280 int constant_key = 0; 3283 int constant_key = 0;
3281 if (key_is_constant) { 3284 if (key_is_constant) {
3282 constant_key = ToInteger32(LConstantOperand::cast(instr->key())); 3285 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
3283 if (constant_key & 0xF0000000) { 3286 if (constant_key & 0xF0000000) {
3284 Abort("array index constant value too big."); 3287 Abort(kArrayIndexConstantValueTooBig);
3285 } 3288 }
3286 } else { 3289 } else {
3287 key = ToRegister(instr->key()); 3290 key = ToRegister(instr->key());
3288 } 3291 }
3289 3292
3290 int base_offset = (FixedDoubleArray::kHeaderSize - kHeapObjectTag) + 3293 int base_offset = (FixedDoubleArray::kHeaderSize - kHeapObjectTag) +
3291 ((constant_key + instr->additional_index()) << element_size_shift); 3294 ((constant_key + instr->additional_index()) << element_size_shift);
3292 if (!key_is_constant) { 3295 if (!key_is_constant) {
3293 __ add(elements, elements, Operand(key, LSL, shift_size)); 3296 __ add(elements, elements, Operand(key, LSL, shift_size));
3294 } 3297 }
(...skipping 240 matching lines...) Expand 10 before | Expand all | Expand 10 after
3535 ParameterCount actual(receiver); 3538 ParameterCount actual(receiver);
3536 __ InvokeFunction(function, actual, CALL_FUNCTION, 3539 __ InvokeFunction(function, actual, CALL_FUNCTION,
3537 safepoint_generator, CALL_AS_METHOD); 3540 safepoint_generator, CALL_AS_METHOD);
3538 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 3541 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3539 } 3542 }
3540 3543
3541 3544
3542 void LCodeGen::DoPushArgument(LPushArgument* instr) { 3545 void LCodeGen::DoPushArgument(LPushArgument* instr) {
3543 LOperand* argument = instr->value(); 3546 LOperand* argument = instr->value();
3544 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) { 3547 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
3545 Abort("DoPushArgument not implemented for double type."); 3548 Abort(kDoPushArgumentNotImplementedForDoubleType);
3546 } else { 3549 } else {
3547 Register argument_reg = EmitLoadRegister(argument, ip); 3550 Register argument_reg = EmitLoadRegister(argument, ip);
3548 __ push(argument_reg); 3551 __ push(argument_reg);
3549 } 3552 }
3550 } 3553 }
3551 3554
3552 3555
3553 void LCodeGen::DoDrop(LDrop* instr) { 3556 void LCodeGen::DoDrop(LDrop* instr) {
3554 __ Drop(instr->count()); 3557 __ Drop(instr->count());
3555 } 3558 }
(...skipping 199 matching lines...) Expand 10 before | Expand all | Expand 10 after
3755 virtual LInstruction* instr() { return instr_; } 3758 virtual LInstruction* instr() { return instr_; }
3756 private: 3759 private:
3757 LMathAbs* instr_; 3760 LMathAbs* instr_;
3758 }; 3761 };
3759 3762
3760 Representation r = instr->hydrogen()->value()->representation(); 3763 Representation r = instr->hydrogen()->value()->representation();
3761 if (r.IsDouble()) { 3764 if (r.IsDouble()) {
3762 DwVfpRegister input = ToDoubleRegister(instr->value()); 3765 DwVfpRegister input = ToDoubleRegister(instr->value());
3763 DwVfpRegister result = ToDoubleRegister(instr->result()); 3766 DwVfpRegister result = ToDoubleRegister(instr->result());
3764 __ vabs(result, input); 3767 __ vabs(result, input);
3765 } else if (r.IsInteger32()) { 3768 } else if (r.IsSmiOrInteger32()) {
3766 EmitIntegerMathAbs(instr); 3769 EmitIntegerMathAbs(instr);
3767 } else { 3770 } else {
3768 // Representation is tagged. 3771 // Representation is tagged.
3769 DeferredMathAbsTaggedHeapNumber* deferred = 3772 DeferredMathAbsTaggedHeapNumber* deferred =
3770 new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr); 3773 new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr);
3771 Register input = ToRegister(instr->value()); 3774 Register input = ToRegister(instr->value());
3772 // Smi check. 3775 // Smi check.
3773 __ JumpIfNotSmi(input, deferred->entry()); 3776 __ JumpIfNotSmi(input, deferred->entry());
3774 // If smi, handle it directly. 3777 // If smi, handle it directly.
3775 EmitIntegerMathAbs(instr); 3778 EmitIntegerMathAbs(instr);
(...skipping 398 matching lines...) Expand 10 before | Expand all | Expand 10 after
4174 Register base = ToRegister(instr->base_object()); 4177 Register base = ToRegister(instr->base_object());
4175 __ add(result, base, Operand(instr->offset())); 4178 __ add(result, base, Operand(instr->offset()));
4176 } 4179 }
4177 4180
4178 4181
4179 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { 4182 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
4180 Representation representation = instr->representation(); 4183 Representation representation = instr->representation();
4181 4184
4182 Register object = ToRegister(instr->object()); 4185 Register object = ToRegister(instr->object());
4183 Register scratch = scratch0(); 4186 Register scratch = scratch0();
4184
4185 HObjectAccess access = instr->hydrogen()->access(); 4187 HObjectAccess access = instr->hydrogen()->access();
4186 int offset = access.offset(); 4188 int offset = access.offset();
4187 4189
4190 if (access.IsExternalMemory()) {
4191 Register value = ToRegister(instr->value());
4192 __ str(value, MemOperand(object, offset));
4193 return;
4194 }
4195
4188 Handle<Map> transition = instr->transition(); 4196 Handle<Map> transition = instr->transition();
4189 4197
4190 if (FLAG_track_heap_object_fields && representation.IsHeapObject()) { 4198 if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
4191 Register value = ToRegister(instr->value()); 4199 Register value = ToRegister(instr->value());
4192 if (!instr->hydrogen()->value()->type().IsHeapObject()) { 4200 if (!instr->hydrogen()->value()->type().IsHeapObject()) {
4193 __ SmiTst(value); 4201 __ SmiTst(value);
4194 DeoptimizeIf(eq, instr->environment()); 4202 DeoptimizeIf(eq, instr->environment());
4195 } 4203 }
4196 } else if (FLAG_track_double_fields && representation.IsDouble()) { 4204 } else if (FLAG_track_double_fields && representation.IsDouble()) {
4197 ASSERT(transition.is_null()); 4205 ASSERT(transition.is_null());
(...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after
4304 4312
4305 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { 4313 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) {
4306 Register external_pointer = ToRegister(instr->elements()); 4314 Register external_pointer = ToRegister(instr->elements());
4307 Register key = no_reg; 4315 Register key = no_reg;
4308 ElementsKind elements_kind = instr->elements_kind(); 4316 ElementsKind elements_kind = instr->elements_kind();
4309 bool key_is_constant = instr->key()->IsConstantOperand(); 4317 bool key_is_constant = instr->key()->IsConstantOperand();
4310 int constant_key = 0; 4318 int constant_key = 0;
4311 if (key_is_constant) { 4319 if (key_is_constant) {
4312 constant_key = ToInteger32(LConstantOperand::cast(instr->key())); 4320 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
4313 if (constant_key & 0xF0000000) { 4321 if (constant_key & 0xF0000000) {
4314 Abort("array index constant value too big."); 4322 Abort(kArrayIndexConstantValueTooBig);
4315 } 4323 }
4316 } else { 4324 } else {
4317 key = ToRegister(instr->key()); 4325 key = ToRegister(instr->key());
4318 } 4326 }
4319 int element_size_shift = ElementsKindToShiftSize(elements_kind); 4327 int element_size_shift = ElementsKindToShiftSize(elements_kind);
4320 int shift_size = (instr->hydrogen()->key()->representation().IsSmi()) 4328 int shift_size = (instr->hydrogen()->key()->representation().IsSmi())
4321 ? (element_size_shift - kSmiTagSize) : element_size_shift; 4329 ? (element_size_shift - kSmiTagSize) : element_size_shift;
4322 int additional_offset = instr->additional_index() << element_size_shift; 4330 int additional_offset = instr->additional_index() << element_size_shift;
4323 4331
4324 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS || 4332 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
4377 Register key = no_reg; 4385 Register key = no_reg;
4378 Register scratch = scratch0(); 4386 Register scratch = scratch0();
4379 bool key_is_constant = instr->key()->IsConstantOperand(); 4387 bool key_is_constant = instr->key()->IsConstantOperand();
4380 int constant_key = 0; 4388 int constant_key = 0;
4381 4389
4382 // Calculate the effective address of the slot in the array to store the 4390 // Calculate the effective address of the slot in the array to store the
4383 // double value. 4391 // double value.
4384 if (key_is_constant) { 4392 if (key_is_constant) {
4385 constant_key = ToInteger32(LConstantOperand::cast(instr->key())); 4393 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
4386 if (constant_key & 0xF0000000) { 4394 if (constant_key & 0xF0000000) {
4387 Abort("array index constant value too big."); 4395 Abort(kArrayIndexConstantValueTooBig);
4388 } 4396 }
4389 } else { 4397 } else {
4390 key = ToRegister(instr->key()); 4398 key = ToRegister(instr->key());
4391 } 4399 }
4392 int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS); 4400 int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS);
4393 int shift_size = (instr->hydrogen()->key()->representation().IsSmi()) 4401 int shift_size = (instr->hydrogen()->key()->representation().IsSmi())
4394 ? (element_size_shift - kSmiTagSize) : element_size_shift; 4402 ? (element_size_shift - kSmiTagSize) : element_size_shift;
4395 Operand operand = key_is_constant 4403 Operand operand = key_is_constant
4396 ? Operand((constant_key << element_size_shift) + 4404 ? Operand((constant_key << element_size_shift) +
4397 FixedDoubleArray::kHeaderSize - kHeapObjectTag) 4405 FixedDoubleArray::kHeaderSize - kHeapObjectTag)
4398 : Operand(key, LSL, shift_size); 4406 : Operand(key, LSL, shift_size);
4399 __ add(scratch, elements, operand); 4407 __ add(scratch, elements, operand);
4400 if (!key_is_constant) { 4408 if (!key_is_constant) {
4401 __ add(scratch, scratch, 4409 __ add(scratch, scratch,
4402 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag)); 4410 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
4403 } 4411 }
4404 4412
4405 if (instr->NeedsCanonicalization()) { 4413 if (instr->NeedsCanonicalization()) {
4406 // Force a canonical NaN. 4414 // Force a canonical NaN.
4407 if (masm()->emit_debug_code()) { 4415 if (masm()->emit_debug_code()) {
4408 __ vmrs(ip); 4416 __ vmrs(ip);
4409 __ tst(ip, Operand(kVFPDefaultNaNModeControlBit)); 4417 __ tst(ip, Operand(kVFPDefaultNaNModeControlBit));
4410 __ Assert(ne, "Default NaN mode not set"); 4418 __ Assert(ne, kDefaultNaNModeNotSet);
4411 } 4419 }
4412 __ VFPCanonicalizeNaN(value); 4420 __ VFPCanonicalizeNaN(value);
4413 } 4421 }
4414 __ vstr(value, scratch, instr->additional_index() << element_size_shift); 4422 __ vstr(value, scratch, instr->additional_index() << element_size_shift);
4415 } 4423 }
4416 4424
4417 4425
4418 void LCodeGen::DoStoreKeyedFixedArray(LStoreKeyed* instr) { 4426 void LCodeGen::DoStoreKeyedFixedArray(LStoreKeyed* instr) {
4419 Register value = ToRegister(instr->value()); 4427 Register value = ToRegister(instr->value());
4420 Register elements = ToRegister(instr->elements()); 4428 Register elements = ToRegister(instr->elements());
(...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after
4630 __ mov(result, Operand::Zero()); 4638 __ mov(result, Operand::Zero());
4631 4639
4632 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); 4640 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
4633 __ SmiTag(char_code); 4641 __ SmiTag(char_code);
4634 __ push(char_code); 4642 __ push(char_code);
4635 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr); 4643 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr);
4636 __ StoreToSafepointRegisterSlot(r0, result); 4644 __ StoreToSafepointRegisterSlot(r0, result);
4637 } 4645 }
4638 4646
4639 4647
4640 void LCodeGen::DoStringLength(LStringLength* instr) {
4641 Register string = ToRegister(instr->string());
4642 Register result = ToRegister(instr->result());
4643 __ ldr(result, FieldMemOperand(string, String::kLengthOffset));
4644 }
4645
4646
4647 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { 4648 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
4648 LOperand* input = instr->value(); 4649 LOperand* input = instr->value();
4649 ASSERT(input->IsRegister() || input->IsStackSlot()); 4650 ASSERT(input->IsRegister() || input->IsStackSlot());
4650 LOperand* output = instr->result(); 4651 LOperand* output = instr->result();
4651 ASSERT(output->IsDoubleRegister()); 4652 ASSERT(output->IsDoubleRegister());
4652 SwVfpRegister single_scratch = double_scratch0().low(); 4653 SwVfpRegister single_scratch = double_scratch0().low();
4653 if (input->IsStackSlot()) { 4654 if (input->IsStackSlot()) {
4654 Register scratch = scratch0(); 4655 Register scratch = scratch0();
4655 __ ldr(scratch, ToMemOperand(input)); 4656 __ ldr(scratch, ToMemOperand(input));
4656 __ vmov(single_scratch, scratch); 4657 __ vmov(single_scratch, scratch);
(...skipping 672 matching lines...) Expand 10 before | Expand all | Expand 10 after
5329 5330
5330 Register result = ToRegister(instr->result()); 5331 Register result = ToRegister(instr->result());
5331 Register scratch = ToRegister(instr->temp1()); 5332 Register scratch = ToRegister(instr->temp1());
5332 Register scratch2 = ToRegister(instr->temp2()); 5333 Register scratch2 = ToRegister(instr->temp2());
5333 5334
5334 // Allocate memory for the object. 5335 // Allocate memory for the object.
5335 AllocationFlags flags = TAG_OBJECT; 5336 AllocationFlags flags = TAG_OBJECT;
5336 if (instr->hydrogen()->MustAllocateDoubleAligned()) { 5337 if (instr->hydrogen()->MustAllocateDoubleAligned()) {
5337 flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT); 5338 flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
5338 } 5339 }
5339 if (instr->hydrogen()->CanAllocateInOldPointerSpace()) { 5340 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
5340 ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace()); 5341 ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
5342 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
5341 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE); 5343 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
5342 } else if (instr->hydrogen()->CanAllocateInOldDataSpace()) { 5344 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
5345 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
5343 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE); 5346 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
5344 } 5347 }
5345 5348
5346 if (instr->size()->IsConstantOperand()) { 5349 if (instr->size()->IsConstantOperand()) {
5347 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); 5350 int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
5348 __ Allocate(size, result, scratch, scratch2, deferred->entry(), flags); 5351 __ Allocate(size, result, scratch, scratch2, deferred->entry(), flags);
5349 } else { 5352 } else {
5350 Register size = ToRegister(instr->size()); 5353 Register size = ToRegister(instr->size());
5351 __ Allocate(size, 5354 __ Allocate(size,
5352 result, 5355 result,
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
5391 if (instr->size()->IsRegister()) { 5394 if (instr->size()->IsRegister()) {
5392 Register size = ToRegister(instr->size()); 5395 Register size = ToRegister(instr->size());
5393 ASSERT(!size.is(result)); 5396 ASSERT(!size.is(result));
5394 __ SmiTag(size); 5397 __ SmiTag(size);
5395 __ push(size); 5398 __ push(size);
5396 } else { 5399 } else {
5397 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); 5400 int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
5398 __ Push(Smi::FromInt(size)); 5401 __ Push(Smi::FromInt(size));
5399 } 5402 }
5400 5403
5401 if (instr->hydrogen()->CanAllocateInOldPointerSpace()) { 5404 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
5402 ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace()); 5405 ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
5406 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
5403 CallRuntimeFromDeferred(Runtime::kAllocateInOldPointerSpace, 1, instr); 5407 CallRuntimeFromDeferred(Runtime::kAllocateInOldPointerSpace, 1, instr);
5404 } else if (instr->hydrogen()->CanAllocateInOldDataSpace()) { 5408 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
5409 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
5405 CallRuntimeFromDeferred(Runtime::kAllocateInOldDataSpace, 1, instr); 5410 CallRuntimeFromDeferred(Runtime::kAllocateInOldDataSpace, 1, instr);
5406 } else { 5411 } else {
5407 CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr); 5412 CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr);
5408 } 5413 }
5409 __ StoreToSafepointRegisterSlot(r0, result); 5414 __ StoreToSafepointRegisterSlot(r0, result);
5410 } 5415 }
5411 5416
5412 5417
5413 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { 5418 void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
5414 ASSERT(ToRegister(instr->value()).is(r0)); 5419 ASSERT(ToRegister(instr->value()).is(r0));
(...skipping 404 matching lines...) Expand 10 before | Expand all | Expand 10 after
5819 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); 5824 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index));
5820 __ ldr(result, FieldMemOperand(scratch, 5825 __ ldr(result, FieldMemOperand(scratch,
5821 FixedArray::kHeaderSize - kPointerSize)); 5826 FixedArray::kHeaderSize - kPointerSize));
5822 __ bind(&done); 5827 __ bind(&done);
5823 } 5828 }
5824 5829
5825 5830
5826 #undef __ 5831 #undef __
5827 5832
5828 } } // namespace v8::internal 5833 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/arm/lithium-codegen-arm.h ('k') | src/arm/macro-assembler-arm.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698