| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 84 code->set_stack_slots(GetStackSlotCount()); | 84 code->set_stack_slots(GetStackSlotCount()); |
| 85 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); | 85 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); |
| 86 if (FLAG_weak_embedded_maps_in_optimized_code) { | 86 if (FLAG_weak_embedded_maps_in_optimized_code) { |
| 87 RegisterDependentCodeForEmbeddedMaps(code); | 87 RegisterDependentCodeForEmbeddedMaps(code); |
| 88 } | 88 } |
| 89 PopulateDeoptimizationData(code); | 89 PopulateDeoptimizationData(code); |
| 90 info()->CommitDependencies(code); | 90 info()->CommitDependencies(code); |
| 91 } | 91 } |
| 92 | 92 |
| 93 | 93 |
| 94 void LChunkBuilder::Abort(const char* reason) { | 94 void LChunkBuilder::Abort(BailoutReason reason) { |
| 95 info()->set_bailout_reason(reason); | 95 info()->set_bailout_reason(reason); |
| 96 status_ = ABORTED; | 96 status_ = ABORTED; |
| 97 } | 97 } |
| 98 | 98 |
| 99 | 99 |
| 100 void LCodeGen::Comment(const char* format, ...) { | 100 void LCodeGen::Comment(const char* format, ...) { |
| 101 if (!FLAG_code_comments) return; | 101 if (!FLAG_code_comments) return; |
| 102 char buffer[4 * KB]; | 102 char buffer[4 * KB]; |
| 103 StringBuilder builder(buffer, ARRAY_SIZE(buffer)); | 103 StringBuilder builder(buffer, ARRAY_SIZE(buffer)); |
| 104 va_list arguments; | 104 va_list arguments; |
| (...skipping 212 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 317 | 317 |
| 318 | 318 |
| 319 bool LCodeGen::GenerateDeoptJumpTable() { | 319 bool LCodeGen::GenerateDeoptJumpTable() { |
| 320 // Check that the jump table is accessible from everywhere in the function | 320 // Check that the jump table is accessible from everywhere in the function |
| 321 // code, i.e. that offsets to the table can be encoded in the 16bit signed | 321 // code, i.e. that offsets to the table can be encoded in the 16bit signed |
| 322 // immediate of a branch instruction. | 322 // immediate of a branch instruction. |
| 323 // To simplify we consider the code size from the first instruction to the | 323 // To simplify we consider the code size from the first instruction to the |
| 324 // end of the jump table. | 324 // end of the jump table. |
| 325 if (!is_int16((masm()->pc_offset() / Assembler::kInstrSize) + | 325 if (!is_int16((masm()->pc_offset() / Assembler::kInstrSize) + |
| 326 deopt_jump_table_.length() * 12)) { | 326 deopt_jump_table_.length() * 12)) { |
| 327 Abort("Generated code is too large"); | 327 Abort(kGeneratedCodeIsTooLarge); |
| 328 } | 328 } |
| 329 | 329 |
| 330 if (deopt_jump_table_.length() > 0) { | 330 if (deopt_jump_table_.length() > 0) { |
| 331 Comment(";;; -------------------- Jump table --------------------"); | 331 Comment(";;; -------------------- Jump table --------------------"); |
| 332 } | 332 } |
| 333 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | 333 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
| 334 Label table_start; | 334 Label table_start; |
| 335 __ bind(&table_start); | 335 __ bind(&table_start); |
| 336 Label needs_frame; | 336 Label needs_frame; |
| 337 for (int i = 0; i < deopt_jump_table_.length(); i++) { | 337 for (int i = 0; i < deopt_jump_table_.length(); i++) { |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 404 HConstant* constant = chunk_->LookupConstant(const_op); | 404 HConstant* constant = chunk_->LookupConstant(const_op); |
| 405 Handle<Object> literal = constant->handle(); | 405 Handle<Object> literal = constant->handle(); |
| 406 Representation r = chunk_->LookupLiteralRepresentation(const_op); | 406 Representation r = chunk_->LookupLiteralRepresentation(const_op); |
| 407 if (r.IsInteger32()) { | 407 if (r.IsInteger32()) { |
| 408 ASSERT(literal->IsNumber()); | 408 ASSERT(literal->IsNumber()); |
| 409 __ li(scratch, Operand(static_cast<int32_t>(literal->Number()))); | 409 __ li(scratch, Operand(static_cast<int32_t>(literal->Number()))); |
| 410 } else if (r.IsSmi()) { | 410 } else if (r.IsSmi()) { |
| 411 ASSERT(constant->HasSmiValue()); | 411 ASSERT(constant->HasSmiValue()); |
| 412 __ li(scratch, Operand(Smi::FromInt(constant->Integer32Value()))); | 412 __ li(scratch, Operand(Smi::FromInt(constant->Integer32Value()))); |
| 413 } else if (r.IsDouble()) { | 413 } else if (r.IsDouble()) { |
| 414 Abort("EmitLoadRegister: Unsupported double immediate."); | 414 Abort(kEmitLoadRegisterUnsupportedDoubleImmediate); |
| 415 } else { | 415 } else { |
| 416 ASSERT(r.IsTagged()); | 416 ASSERT(r.IsTagged()); |
| 417 __ LoadObject(scratch, literal); | 417 __ LoadObject(scratch, literal); |
| 418 } | 418 } |
| 419 return scratch; | 419 return scratch; |
| 420 } else if (op->IsStackSlot() || op->IsArgument()) { | 420 } else if (op->IsStackSlot() || op->IsArgument()) { |
| 421 __ lw(scratch, ToMemOperand(op)); | 421 __ lw(scratch, ToMemOperand(op)); |
| 422 return scratch; | 422 return scratch; |
| 423 } | 423 } |
| 424 UNREACHABLE(); | 424 UNREACHABLE(); |
| (...skipping 17 matching lines...) Expand all Loading... |
| 442 HConstant* constant = chunk_->LookupConstant(const_op); | 442 HConstant* constant = chunk_->LookupConstant(const_op); |
| 443 Handle<Object> literal = constant->handle(); | 443 Handle<Object> literal = constant->handle(); |
| 444 Representation r = chunk_->LookupLiteralRepresentation(const_op); | 444 Representation r = chunk_->LookupLiteralRepresentation(const_op); |
| 445 if (r.IsInteger32()) { | 445 if (r.IsInteger32()) { |
| 446 ASSERT(literal->IsNumber()); | 446 ASSERT(literal->IsNumber()); |
| 447 __ li(at, Operand(static_cast<int32_t>(literal->Number()))); | 447 __ li(at, Operand(static_cast<int32_t>(literal->Number()))); |
| 448 __ mtc1(at, flt_scratch); | 448 __ mtc1(at, flt_scratch); |
| 449 __ cvt_d_w(dbl_scratch, flt_scratch); | 449 __ cvt_d_w(dbl_scratch, flt_scratch); |
| 450 return dbl_scratch; | 450 return dbl_scratch; |
| 451 } else if (r.IsDouble()) { | 451 } else if (r.IsDouble()) { |
| 452 Abort("unsupported double immediate"); | 452 Abort(kUnsupportedDoubleImmediate); |
| 453 } else if (r.IsTagged()) { | 453 } else if (r.IsTagged()) { |
| 454 Abort("unsupported tagged immediate"); | 454 Abort(kUnsupportedTaggedImmediate); |
| 455 } | 455 } |
| 456 } else if (op->IsStackSlot() || op->IsArgument()) { | 456 } else if (op->IsStackSlot() || op->IsArgument()) { |
| 457 MemOperand mem_op = ToMemOperand(op); | 457 MemOperand mem_op = ToMemOperand(op); |
| 458 __ ldc1(dbl_scratch, mem_op); | 458 __ ldc1(dbl_scratch, mem_op); |
| 459 return dbl_scratch; | 459 return dbl_scratch; |
| 460 } | 460 } |
| 461 UNREACHABLE(); | 461 UNREACHABLE(); |
| 462 return dbl_scratch; | 462 return dbl_scratch; |
| 463 } | 463 } |
| 464 | 464 |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 513 LConstantOperand* const_op = LConstantOperand::cast(op); | 513 LConstantOperand* const_op = LConstantOperand::cast(op); |
| 514 HConstant* constant = chunk()->LookupConstant(const_op); | 514 HConstant* constant = chunk()->LookupConstant(const_op); |
| 515 Representation r = chunk_->LookupLiteralRepresentation(const_op); | 515 Representation r = chunk_->LookupLiteralRepresentation(const_op); |
| 516 if (r.IsSmi()) { | 516 if (r.IsSmi()) { |
| 517 ASSERT(constant->HasSmiValue()); | 517 ASSERT(constant->HasSmiValue()); |
| 518 return Operand(Smi::FromInt(constant->Integer32Value())); | 518 return Operand(Smi::FromInt(constant->Integer32Value())); |
| 519 } else if (r.IsInteger32()) { | 519 } else if (r.IsInteger32()) { |
| 520 ASSERT(constant->HasInteger32Value()); | 520 ASSERT(constant->HasInteger32Value()); |
| 521 return Operand(constant->Integer32Value()); | 521 return Operand(constant->Integer32Value()); |
| 522 } else if (r.IsDouble()) { | 522 } else if (r.IsDouble()) { |
| 523 Abort("ToOperand Unsupported double immediate."); | 523 Abort(kToOperandUnsupportedDoubleImmediate); |
| 524 } | 524 } |
| 525 ASSERT(r.IsTagged()); | 525 ASSERT(r.IsTagged()); |
| 526 return Operand(constant->handle()); | 526 return Operand(constant->handle()); |
| 527 } else if (op->IsRegister()) { | 527 } else if (op->IsRegister()) { |
| 528 return Operand(ToRegister(op)); | 528 return Operand(ToRegister(op)); |
| 529 } else if (op->IsDoubleRegister()) { | 529 } else if (op->IsDoubleRegister()) { |
| 530 Abort("ToOperand IsDoubleRegister unimplemented"); | 530 Abort(kToOperandIsDoubleRegisterUnimplemented); |
| 531 return Operand(0); | 531 return Operand(0); |
| 532 } | 532 } |
| 533 // Stack slots not implemented, use ToMemOperand instead. | 533 // Stack slots not implemented, use ToMemOperand instead. |
| 534 UNREACHABLE(); | 534 UNREACHABLE(); |
| 535 return Operand(0); | 535 return Operand(0); |
| 536 } | 536 } |
| 537 | 537 |
| 538 | 538 |
| 539 MemOperand LCodeGen::ToMemOperand(LOperand* op) const { | 539 MemOperand LCodeGen::ToMemOperand(LOperand* op) const { |
| 540 ASSERT(!op->IsRegister()); | 540 ASSERT(!op->IsRegister()); |
| (...skipping 200 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 741 Deoptimizer::BailoutType bailout_type, | 741 Deoptimizer::BailoutType bailout_type, |
| 742 Register src1, | 742 Register src1, |
| 743 const Operand& src2) { | 743 const Operand& src2) { |
| 744 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 744 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
| 745 ASSERT(environment->HasBeenRegistered()); | 745 ASSERT(environment->HasBeenRegistered()); |
| 746 int id = environment->deoptimization_index(); | 746 int id = environment->deoptimization_index(); |
| 747 ASSERT(info()->IsOptimizing() || info()->IsStub()); | 747 ASSERT(info()->IsOptimizing() || info()->IsStub()); |
| 748 Address entry = | 748 Address entry = |
| 749 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); | 749 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); |
| 750 if (entry == NULL) { | 750 if (entry == NULL) { |
| 751 Abort("bailout was not prepared"); | 751 Abort(kBailoutWasNotPrepared); |
| 752 return; | 752 return; |
| 753 } | 753 } |
| 754 | 754 |
| 755 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on MIPS. | 755 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on MIPS. |
| 756 if (FLAG_deopt_every_n_times == 1 && | 756 if (FLAG_deopt_every_n_times == 1 && |
| 757 !info()->IsStub() && | 757 !info()->IsStub() && |
| 758 info()->opt_count() == id) { | 758 info()->opt_count() == id) { |
| 759 ASSERT(frame_is_built_); | 759 ASSERT(frame_is_built_); |
| 760 __ Call(entry, RelocInfo::RUNTIME_ENTRY); | 760 __ Call(entry, RelocInfo::RUNTIME_ENTRY); |
| 761 return; | 761 return; |
| (...skipping 1001 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1763 | 1763 |
| 1764 if (FLAG_debug_code) { | 1764 if (FLAG_debug_code) { |
| 1765 __ lw(at, FieldMemOperand(string, HeapObject::kMapOffset)); | 1765 __ lw(at, FieldMemOperand(string, HeapObject::kMapOffset)); |
| 1766 __ lbu(at, FieldMemOperand(at, Map::kInstanceTypeOffset)); | 1766 __ lbu(at, FieldMemOperand(at, Map::kInstanceTypeOffset)); |
| 1767 | 1767 |
| 1768 __ And(at, at, Operand(kStringRepresentationMask | kStringEncodingMask)); | 1768 __ And(at, at, Operand(kStringRepresentationMask | kStringEncodingMask)); |
| 1769 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; | 1769 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; |
| 1770 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; | 1770 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; |
| 1771 __ Subu(at, at, Operand(encoding == String::ONE_BYTE_ENCODING | 1771 __ Subu(at, at, Operand(encoding == String::ONE_BYTE_ENCODING |
| 1772 ? one_byte_seq_type : two_byte_seq_type)); | 1772 ? one_byte_seq_type : two_byte_seq_type)); |
| 1773 __ Check(eq, "Unexpected string type", at, Operand(zero_reg)); | 1773 __ Check(eq, kUnexpectedStringType, at, Operand(zero_reg)); |
| 1774 } | 1774 } |
| 1775 | 1775 |
| 1776 __ Addu(scratch, | 1776 __ Addu(scratch, |
| 1777 string, | 1777 string, |
| 1778 Operand(SeqString::kHeaderSize - kHeapObjectTag)); | 1778 Operand(SeqString::kHeaderSize - kHeapObjectTag)); |
| 1779 if (encoding == String::ONE_BYTE_ENCODING) { | 1779 if (encoding == String::ONE_BYTE_ENCODING) { |
| 1780 __ Addu(at, scratch, index); | 1780 __ Addu(at, scratch, index); |
| 1781 __ sb(value, MemOperand(at)); | 1781 __ sb(value, MemOperand(at)); |
| 1782 } else { | 1782 } else { |
| 1783 __ sll(at, index, 1); | 1783 __ sll(at, index, 1); |
| (...skipping 1285 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3069 | 3069 |
| 3070 void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) { | 3070 void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) { |
| 3071 Register external_pointer = ToRegister(instr->elements()); | 3071 Register external_pointer = ToRegister(instr->elements()); |
| 3072 Register key = no_reg; | 3072 Register key = no_reg; |
| 3073 ElementsKind elements_kind = instr->elements_kind(); | 3073 ElementsKind elements_kind = instr->elements_kind(); |
| 3074 bool key_is_constant = instr->key()->IsConstantOperand(); | 3074 bool key_is_constant = instr->key()->IsConstantOperand(); |
| 3075 int constant_key = 0; | 3075 int constant_key = 0; |
| 3076 if (key_is_constant) { | 3076 if (key_is_constant) { |
| 3077 constant_key = ToInteger32(LConstantOperand::cast(instr->key())); | 3077 constant_key = ToInteger32(LConstantOperand::cast(instr->key())); |
| 3078 if (constant_key & 0xF0000000) { | 3078 if (constant_key & 0xF0000000) { |
| 3079 Abort("array index constant value too big."); | 3079 Abort(kArrayIndexConstantValueTooBig); |
| 3080 } | 3080 } |
| 3081 } else { | 3081 } else { |
| 3082 key = ToRegister(instr->key()); | 3082 key = ToRegister(instr->key()); |
| 3083 } | 3083 } |
| 3084 int element_size_shift = ElementsKindToShiftSize(elements_kind); | 3084 int element_size_shift = ElementsKindToShiftSize(elements_kind); |
| 3085 int shift_size = (instr->hydrogen()->key()->representation().IsSmi()) | 3085 int shift_size = (instr->hydrogen()->key()->representation().IsSmi()) |
| 3086 ? (element_size_shift - kSmiTagSize) : element_size_shift; | 3086 ? (element_size_shift - kSmiTagSize) : element_size_shift; |
| 3087 int additional_offset = instr->additional_index() << element_size_shift; | 3087 int additional_offset = instr->additional_index() << element_size_shift; |
| 3088 | 3088 |
| 3089 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS || | 3089 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS || |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3155 DoubleRegister result = ToDoubleRegister(instr->result()); | 3155 DoubleRegister result = ToDoubleRegister(instr->result()); |
| 3156 Register scratch = scratch0(); | 3156 Register scratch = scratch0(); |
| 3157 | 3157 |
| 3158 int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS); | 3158 int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS); |
| 3159 int shift_size = (instr->hydrogen()->key()->representation().IsSmi()) | 3159 int shift_size = (instr->hydrogen()->key()->representation().IsSmi()) |
| 3160 ? (element_size_shift - kSmiTagSize) : element_size_shift; | 3160 ? (element_size_shift - kSmiTagSize) : element_size_shift; |
| 3161 int constant_key = 0; | 3161 int constant_key = 0; |
| 3162 if (key_is_constant) { | 3162 if (key_is_constant) { |
| 3163 constant_key = ToInteger32(LConstantOperand::cast(instr->key())); | 3163 constant_key = ToInteger32(LConstantOperand::cast(instr->key())); |
| 3164 if (constant_key & 0xF0000000) { | 3164 if (constant_key & 0xF0000000) { |
| 3165 Abort("array index constant value too big."); | 3165 Abort(kArrayIndexConstantValueTooBig); |
| 3166 } | 3166 } |
| 3167 } else { | 3167 } else { |
| 3168 key = ToRegister(instr->key()); | 3168 key = ToRegister(instr->key()); |
| 3169 } | 3169 } |
| 3170 | 3170 |
| 3171 int base_offset = (FixedDoubleArray::kHeaderSize - kHeapObjectTag) + | 3171 int base_offset = (FixedDoubleArray::kHeaderSize - kHeapObjectTag) + |
| 3172 ((constant_key + instr->additional_index()) << element_size_shift); | 3172 ((constant_key + instr->additional_index()) << element_size_shift); |
| 3173 if (!key_is_constant) { | 3173 if (!key_is_constant) { |
| 3174 __ sll(scratch, key, shift_size); | 3174 __ sll(scratch, key, shift_size); |
| 3175 __ Addu(elements, elements, scratch); | 3175 __ Addu(elements, elements, scratch); |
| (...skipping 250 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3426 ParameterCount actual(receiver); | 3426 ParameterCount actual(receiver); |
| 3427 __ InvokeFunction(function, actual, CALL_FUNCTION, | 3427 __ InvokeFunction(function, actual, CALL_FUNCTION, |
| 3428 safepoint_generator, CALL_AS_METHOD); | 3428 safepoint_generator, CALL_AS_METHOD); |
| 3429 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 3429 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 3430 } | 3430 } |
| 3431 | 3431 |
| 3432 | 3432 |
| 3433 void LCodeGen::DoPushArgument(LPushArgument* instr) { | 3433 void LCodeGen::DoPushArgument(LPushArgument* instr) { |
| 3434 LOperand* argument = instr->value(); | 3434 LOperand* argument = instr->value(); |
| 3435 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) { | 3435 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) { |
| 3436 Abort("DoPushArgument not implemented for double type."); | 3436 Abort(kDoPushArgumentNotImplementedForDoubleType); |
| 3437 } else { | 3437 } else { |
| 3438 Register argument_reg = EmitLoadRegister(argument, at); | 3438 Register argument_reg = EmitLoadRegister(argument, at); |
| 3439 __ push(argument_reg); | 3439 __ push(argument_reg); |
| 3440 } | 3440 } |
| 3441 } | 3441 } |
| 3442 | 3442 |
| 3443 | 3443 |
| 3444 void LCodeGen::DoDrop(LDrop* instr) { | 3444 void LCodeGen::DoDrop(LDrop* instr) { |
| 3445 __ Drop(instr->count()); | 3445 __ Drop(instr->count()); |
| 3446 } | 3446 } |
| (...skipping 804 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4251 | 4251 |
| 4252 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { | 4252 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { |
| 4253 Register external_pointer = ToRegister(instr->elements()); | 4253 Register external_pointer = ToRegister(instr->elements()); |
| 4254 Register key = no_reg; | 4254 Register key = no_reg; |
| 4255 ElementsKind elements_kind = instr->elements_kind(); | 4255 ElementsKind elements_kind = instr->elements_kind(); |
| 4256 bool key_is_constant = instr->key()->IsConstantOperand(); | 4256 bool key_is_constant = instr->key()->IsConstantOperand(); |
| 4257 int constant_key = 0; | 4257 int constant_key = 0; |
| 4258 if (key_is_constant) { | 4258 if (key_is_constant) { |
| 4259 constant_key = ToInteger32(LConstantOperand::cast(instr->key())); | 4259 constant_key = ToInteger32(LConstantOperand::cast(instr->key())); |
| 4260 if (constant_key & 0xF0000000) { | 4260 if (constant_key & 0xF0000000) { |
| 4261 Abort("array index constant value too big."); | 4261 Abort(kArrayIndexConstantValueTooBig); |
| 4262 } | 4262 } |
| 4263 } else { | 4263 } else { |
| 4264 key = ToRegister(instr->key()); | 4264 key = ToRegister(instr->key()); |
| 4265 } | 4265 } |
| 4266 int element_size_shift = ElementsKindToShiftSize(elements_kind); | 4266 int element_size_shift = ElementsKindToShiftSize(elements_kind); |
| 4267 int shift_size = (instr->hydrogen()->key()->representation().IsSmi()) | 4267 int shift_size = (instr->hydrogen()->key()->representation().IsSmi()) |
| 4268 ? (element_size_shift - kSmiTagSize) : element_size_shift; | 4268 ? (element_size_shift - kSmiTagSize) : element_size_shift; |
| 4269 int additional_offset = instr->additional_index() << element_size_shift; | 4269 int additional_offset = instr->additional_index() << element_size_shift; |
| 4270 | 4270 |
| 4271 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS || | 4271 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS || |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4329 Register scratch = scratch0(); | 4329 Register scratch = scratch0(); |
| 4330 bool key_is_constant = instr->key()->IsConstantOperand(); | 4330 bool key_is_constant = instr->key()->IsConstantOperand(); |
| 4331 int constant_key = 0; | 4331 int constant_key = 0; |
| 4332 Label not_nan; | 4332 Label not_nan; |
| 4333 | 4333 |
| 4334 // Calculate the effective address of the slot in the array to store the | 4334 // Calculate the effective address of the slot in the array to store the |
| 4335 // double value. | 4335 // double value. |
| 4336 if (key_is_constant) { | 4336 if (key_is_constant) { |
| 4337 constant_key = ToInteger32(LConstantOperand::cast(instr->key())); | 4337 constant_key = ToInteger32(LConstantOperand::cast(instr->key())); |
| 4338 if (constant_key & 0xF0000000) { | 4338 if (constant_key & 0xF0000000) { |
| 4339 Abort("array index constant value too big."); | 4339 Abort(kArrayIndexConstantValueTooBig); |
| 4340 } | 4340 } |
| 4341 } else { | 4341 } else { |
| 4342 key = ToRegister(instr->key()); | 4342 key = ToRegister(instr->key()); |
| 4343 } | 4343 } |
| 4344 int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS); | 4344 int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS); |
| 4345 int shift_size = (instr->hydrogen()->key()->representation().IsSmi()) | 4345 int shift_size = (instr->hydrogen()->key()->representation().IsSmi()) |
| 4346 ? (element_size_shift - kSmiTagSize) : element_size_shift; | 4346 ? (element_size_shift - kSmiTagSize) : element_size_shift; |
| 4347 if (key_is_constant) { | 4347 if (key_is_constant) { |
| 4348 __ Addu(scratch, elements, Operand((constant_key << element_size_shift) + | 4348 __ Addu(scratch, elements, Operand((constant_key << element_size_shift) + |
| 4349 FixedDoubleArray::kHeaderSize - kHeapObjectTag)); | 4349 FixedDoubleArray::kHeaderSize - kHeapObjectTag)); |
| (...skipping 1492 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5842 __ Subu(scratch, result, scratch); | 5842 __ Subu(scratch, result, scratch); |
| 5843 __ lw(result, FieldMemOperand(scratch, | 5843 __ lw(result, FieldMemOperand(scratch, |
| 5844 FixedArray::kHeaderSize - kPointerSize)); | 5844 FixedArray::kHeaderSize - kPointerSize)); |
| 5845 __ bind(&done); | 5845 __ bind(&done); |
| 5846 } | 5846 } |
| 5847 | 5847 |
| 5848 | 5848 |
| 5849 #undef __ | 5849 #undef __ |
| 5850 | 5850 |
| 5851 } } // namespace v8::internal | 5851 } } // namespace v8::internal |
| OLD | NEW |