| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 84 code->set_stack_slots(GetStackSlotCount()); | 84 code->set_stack_slots(GetStackSlotCount()); |
| 85 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); | 85 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); |
| 86 if (FLAG_weak_embedded_maps_in_optimized_code) { | 86 if (FLAG_weak_embedded_maps_in_optimized_code) { |
| 87 RegisterDependentCodeForEmbeddedMaps(code); | 87 RegisterDependentCodeForEmbeddedMaps(code); |
| 88 } | 88 } |
| 89 PopulateDeoptimizationData(code); | 89 PopulateDeoptimizationData(code); |
| 90 info()->CommitDependencies(code); | 90 info()->CommitDependencies(code); |
| 91 } | 91 } |
| 92 | 92 |
| 93 | 93 |
| 94 void LChunkBuilder::Abort(BailoutReason reason) { | 94 void LChunkBuilder::Abort(const char* reason) { |
| 95 info()->set_bailout_reason(reason); | 95 info()->set_bailout_reason(reason); |
| 96 status_ = ABORTED; | 96 status_ = ABORTED; |
| 97 } | 97 } |
| 98 | 98 |
| 99 | 99 |
| 100 void LCodeGen::Comment(const char* format, ...) { | 100 void LCodeGen::Comment(const char* format, ...) { |
| 101 if (!FLAG_code_comments) return; | 101 if (!FLAG_code_comments) return; |
| 102 char buffer[4 * KB]; | 102 char buffer[4 * KB]; |
| 103 StringBuilder builder(buffer, ARRAY_SIZE(buffer)); | 103 StringBuilder builder(buffer, ARRAY_SIZE(buffer)); |
| 104 va_list arguments; | 104 va_list arguments; |
| (...skipping 212 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 317 | 317 |
| 318 | 318 |
| 319 bool LCodeGen::GenerateDeoptJumpTable() { | 319 bool LCodeGen::GenerateDeoptJumpTable() { |
| 320 // Check that the jump table is accessible from everywhere in the function | 320 // Check that the jump table is accessible from everywhere in the function |
| 321 // code, i.e. that offsets to the table can be encoded in the 16bit signed | 321 // code, i.e. that offsets to the table can be encoded in the 16bit signed |
| 322 // immediate of a branch instruction. | 322 // immediate of a branch instruction. |
| 323 // To simplify we consider the code size from the first instruction to the | 323 // To simplify we consider the code size from the first instruction to the |
| 324 // end of the jump table. | 324 // end of the jump table. |
| 325 if (!is_int16((masm()->pc_offset() / Assembler::kInstrSize) + | 325 if (!is_int16((masm()->pc_offset() / Assembler::kInstrSize) + |
| 326 deopt_jump_table_.length() * 12)) { | 326 deopt_jump_table_.length() * 12)) { |
| 327 Abort(kGeneratedCodeIsTooLarge); | 327 Abort("Generated code is too large"); |
| 328 } | 328 } |
| 329 | 329 |
| 330 if (deopt_jump_table_.length() > 0) { | 330 if (deopt_jump_table_.length() > 0) { |
| 331 Comment(";;; -------------------- Jump table --------------------"); | 331 Comment(";;; -------------------- Jump table --------------------"); |
| 332 } | 332 } |
| 333 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | 333 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
| 334 Label table_start; | 334 Label table_start; |
| 335 __ bind(&table_start); | 335 __ bind(&table_start); |
| 336 Label needs_frame; | 336 Label needs_frame; |
| 337 for (int i = 0; i < deopt_jump_table_.length(); i++) { | 337 for (int i = 0; i < deopt_jump_table_.length(); i++) { |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 404 HConstant* constant = chunk_->LookupConstant(const_op); | 404 HConstant* constant = chunk_->LookupConstant(const_op); |
| 405 Handle<Object> literal = constant->handle(); | 405 Handle<Object> literal = constant->handle(); |
| 406 Representation r = chunk_->LookupLiteralRepresentation(const_op); | 406 Representation r = chunk_->LookupLiteralRepresentation(const_op); |
| 407 if (r.IsInteger32()) { | 407 if (r.IsInteger32()) { |
| 408 ASSERT(literal->IsNumber()); | 408 ASSERT(literal->IsNumber()); |
| 409 __ li(scratch, Operand(static_cast<int32_t>(literal->Number()))); | 409 __ li(scratch, Operand(static_cast<int32_t>(literal->Number()))); |
| 410 } else if (r.IsSmi()) { | 410 } else if (r.IsSmi()) { |
| 411 ASSERT(constant->HasSmiValue()); | 411 ASSERT(constant->HasSmiValue()); |
| 412 __ li(scratch, Operand(Smi::FromInt(constant->Integer32Value()))); | 412 __ li(scratch, Operand(Smi::FromInt(constant->Integer32Value()))); |
| 413 } else if (r.IsDouble()) { | 413 } else if (r.IsDouble()) { |
| 414 Abort(kEmitLoadRegisterUnsupportedDoubleImmediate); | 414 Abort("EmitLoadRegister: Unsupported double immediate."); |
| 415 } else { | 415 } else { |
| 416 ASSERT(r.IsTagged()); | 416 ASSERT(r.IsTagged()); |
| 417 __ LoadObject(scratch, literal); | 417 __ LoadObject(scratch, literal); |
| 418 } | 418 } |
| 419 return scratch; | 419 return scratch; |
| 420 } else if (op->IsStackSlot() || op->IsArgument()) { | 420 } else if (op->IsStackSlot() || op->IsArgument()) { |
| 421 __ lw(scratch, ToMemOperand(op)); | 421 __ lw(scratch, ToMemOperand(op)); |
| 422 return scratch; | 422 return scratch; |
| 423 } | 423 } |
| 424 UNREACHABLE(); | 424 UNREACHABLE(); |
| (...skipping 17 matching lines...) Expand all Loading... |
| 442 HConstant* constant = chunk_->LookupConstant(const_op); | 442 HConstant* constant = chunk_->LookupConstant(const_op); |
| 443 Handle<Object> literal = constant->handle(); | 443 Handle<Object> literal = constant->handle(); |
| 444 Representation r = chunk_->LookupLiteralRepresentation(const_op); | 444 Representation r = chunk_->LookupLiteralRepresentation(const_op); |
| 445 if (r.IsInteger32()) { | 445 if (r.IsInteger32()) { |
| 446 ASSERT(literal->IsNumber()); | 446 ASSERT(literal->IsNumber()); |
| 447 __ li(at, Operand(static_cast<int32_t>(literal->Number()))); | 447 __ li(at, Operand(static_cast<int32_t>(literal->Number()))); |
| 448 __ mtc1(at, flt_scratch); | 448 __ mtc1(at, flt_scratch); |
| 449 __ cvt_d_w(dbl_scratch, flt_scratch); | 449 __ cvt_d_w(dbl_scratch, flt_scratch); |
| 450 return dbl_scratch; | 450 return dbl_scratch; |
| 451 } else if (r.IsDouble()) { | 451 } else if (r.IsDouble()) { |
| 452 Abort(kUnsupportedDoubleImmediate); | 452 Abort("unsupported double immediate"); |
| 453 } else if (r.IsTagged()) { | 453 } else if (r.IsTagged()) { |
| 454 Abort(kUnsupportedTaggedImmediate); | 454 Abort("unsupported tagged immediate"); |
| 455 } | 455 } |
| 456 } else if (op->IsStackSlot() || op->IsArgument()) { | 456 } else if (op->IsStackSlot() || op->IsArgument()) { |
| 457 MemOperand mem_op = ToMemOperand(op); | 457 MemOperand mem_op = ToMemOperand(op); |
| 458 __ ldc1(dbl_scratch, mem_op); | 458 __ ldc1(dbl_scratch, mem_op); |
| 459 return dbl_scratch; | 459 return dbl_scratch; |
| 460 } | 460 } |
| 461 UNREACHABLE(); | 461 UNREACHABLE(); |
| 462 return dbl_scratch; | 462 return dbl_scratch; |
| 463 } | 463 } |
| 464 | 464 |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 513 LConstantOperand* const_op = LConstantOperand::cast(op); | 513 LConstantOperand* const_op = LConstantOperand::cast(op); |
| 514 HConstant* constant = chunk()->LookupConstant(const_op); | 514 HConstant* constant = chunk()->LookupConstant(const_op); |
| 515 Representation r = chunk_->LookupLiteralRepresentation(const_op); | 515 Representation r = chunk_->LookupLiteralRepresentation(const_op); |
| 516 if (r.IsSmi()) { | 516 if (r.IsSmi()) { |
| 517 ASSERT(constant->HasSmiValue()); | 517 ASSERT(constant->HasSmiValue()); |
| 518 return Operand(Smi::FromInt(constant->Integer32Value())); | 518 return Operand(Smi::FromInt(constant->Integer32Value())); |
| 519 } else if (r.IsInteger32()) { | 519 } else if (r.IsInteger32()) { |
| 520 ASSERT(constant->HasInteger32Value()); | 520 ASSERT(constant->HasInteger32Value()); |
| 521 return Operand(constant->Integer32Value()); | 521 return Operand(constant->Integer32Value()); |
| 522 } else if (r.IsDouble()) { | 522 } else if (r.IsDouble()) { |
| 523 Abort(kToOperandUnsupportedDoubleImmediate); | 523 Abort("ToOperand Unsupported double immediate."); |
| 524 } | 524 } |
| 525 ASSERT(r.IsTagged()); | 525 ASSERT(r.IsTagged()); |
| 526 return Operand(constant->handle()); | 526 return Operand(constant->handle()); |
| 527 } else if (op->IsRegister()) { | 527 } else if (op->IsRegister()) { |
| 528 return Operand(ToRegister(op)); | 528 return Operand(ToRegister(op)); |
| 529 } else if (op->IsDoubleRegister()) { | 529 } else if (op->IsDoubleRegister()) { |
| 530 Abort(kToOperandIsDoubleRegisterUnimplemented); | 530 Abort("ToOperand IsDoubleRegister unimplemented"); |
| 531 return Operand(0); | 531 return Operand(0); |
| 532 } | 532 } |
| 533 // Stack slots not implemented, use ToMemOperand instead. | 533 // Stack slots not implemented, use ToMemOperand instead. |
| 534 UNREACHABLE(); | 534 UNREACHABLE(); |
| 535 return Operand(0); | 535 return Operand(0); |
| 536 } | 536 } |
| 537 | 537 |
| 538 | 538 |
| 539 MemOperand LCodeGen::ToMemOperand(LOperand* op) const { | 539 MemOperand LCodeGen::ToMemOperand(LOperand* op) const { |
| 540 ASSERT(!op->IsRegister()); | 540 ASSERT(!op->IsRegister()); |
| (...skipping 200 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 741 Deoptimizer::BailoutType bailout_type, | 741 Deoptimizer::BailoutType bailout_type, |
| 742 Register src1, | 742 Register src1, |
| 743 const Operand& src2) { | 743 const Operand& src2) { |
| 744 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 744 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
| 745 ASSERT(environment->HasBeenRegistered()); | 745 ASSERT(environment->HasBeenRegistered()); |
| 746 int id = environment->deoptimization_index(); | 746 int id = environment->deoptimization_index(); |
| 747 ASSERT(info()->IsOptimizing() || info()->IsStub()); | 747 ASSERT(info()->IsOptimizing() || info()->IsStub()); |
| 748 Address entry = | 748 Address entry = |
| 749 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); | 749 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); |
| 750 if (entry == NULL) { | 750 if (entry == NULL) { |
| 751 Abort(kBailoutWasNotPrepared); | 751 Abort("bailout was not prepared"); |
| 752 return; | 752 return; |
| 753 } | 753 } |
| 754 | 754 |
| 755 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on MIPS. | 755 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on MIPS. |
| 756 if (FLAG_deopt_every_n_times == 1 && | 756 if (FLAG_deopt_every_n_times == 1 && |
| 757 !info()->IsStub() && | 757 !info()->IsStub() && |
| 758 info()->opt_count() == id) { | 758 info()->opt_count() == id) { |
| 759 ASSERT(frame_is_built_); | 759 ASSERT(frame_is_built_); |
| 760 __ Call(entry, RelocInfo::RUNTIME_ENTRY); | 760 __ Call(entry, RelocInfo::RUNTIME_ENTRY); |
| 761 return; | 761 return; |
| (...skipping 288 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1050 // Record the address of the first unknown OSR value as the place to enter. | 1050 // Record the address of the first unknown OSR value as the place to enter. |
| 1051 if (osr_pc_offset_ == -1) osr_pc_offset_ = masm()->pc_offset(); | 1051 if (osr_pc_offset_ == -1) osr_pc_offset_ = masm()->pc_offset(); |
| 1052 } | 1052 } |
| 1053 | 1053 |
| 1054 | 1054 |
| 1055 void LCodeGen::DoModI(LModI* instr) { | 1055 void LCodeGen::DoModI(LModI* instr) { |
| 1056 HMod* hmod = instr->hydrogen(); | 1056 HMod* hmod = instr->hydrogen(); |
| 1057 HValue* left = hmod->left(); | 1057 HValue* left = hmod->left(); |
| 1058 HValue* right = hmod->right(); | 1058 HValue* right = hmod->right(); |
| 1059 if (hmod->HasPowerOf2Divisor()) { | 1059 if (hmod->HasPowerOf2Divisor()) { |
| 1060 const Register scratch = scratch0(); |
| 1060 const Register left_reg = ToRegister(instr->left()); | 1061 const Register left_reg = ToRegister(instr->left()); |
| 1062 ASSERT(!left_reg.is(scratch)); |
| 1061 const Register result_reg = ToRegister(instr->result()); | 1063 const Register result_reg = ToRegister(instr->result()); |
| 1062 | 1064 |
| 1063 // Note: The code below even works when right contains kMinInt. | 1065 // Note: The code below even works when right contains kMinInt. |
| 1064 int32_t divisor = Abs(right->GetInteger32Constant()); | 1066 int32_t divisor = Abs(right->GetInteger32Constant()); |
| 1065 | 1067 |
| 1068 __ mov(scratch, left_reg); |
| 1069 |
| 1066 Label left_is_not_negative, done; | 1070 Label left_is_not_negative, done; |
| 1067 if (left->CanBeNegative()) { | 1071 if (left->CanBeNegative()) { |
| 1068 __ Branch(left_reg.is(result_reg) ? PROTECT : USE_DELAY_SLOT, | 1072 __ Branch(USE_DELAY_SLOT, &left_is_not_negative, |
| 1069 &left_is_not_negative, ge, left_reg, Operand(zero_reg)); | 1073 ge, left_reg, Operand(zero_reg)); |
| 1070 __ subu(result_reg, zero_reg, left_reg); | 1074 __ subu(result_reg, zero_reg, left_reg); |
| 1071 __ And(result_reg, result_reg, divisor - 1); | 1075 __ And(result_reg, result_reg, divisor - 1); |
| 1072 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1076 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 1073 DeoptimizeIf(eq, instr->environment(), result_reg, Operand(zero_reg)); | 1077 DeoptimizeIf(eq, instr->environment(), result_reg, Operand(zero_reg)); |
| 1074 } | 1078 } |
| 1075 __ Branch(USE_DELAY_SLOT, &done); | 1079 __ Branch(USE_DELAY_SLOT, &done); |
| 1076 __ subu(result_reg, zero_reg, result_reg); | 1080 __ subu(result_reg, zero_reg, result_reg); |
| 1077 } | 1081 } |
| 1078 | 1082 |
| 1079 __ bind(&left_is_not_negative); | 1083 __ bind(&left_is_not_negative); |
| 1080 __ And(result_reg, left_reg, divisor - 1); | 1084 __ And(result_reg, scratch, divisor - 1); |
| 1081 __ bind(&done); | 1085 __ bind(&done); |
| 1082 | 1086 |
| 1083 } else if (hmod->fixed_right_arg().has_value) { | 1087 } else if (hmod->fixed_right_arg().has_value) { |
| 1088 const Register scratch = scratch0(); |
| 1084 const Register left_reg = ToRegister(instr->left()); | 1089 const Register left_reg = ToRegister(instr->left()); |
| 1085 const Register result_reg = ToRegister(instr->result()); | 1090 const Register result_reg = ToRegister(instr->result()); |
| 1086 const Register right_reg = ToRegister(instr->right()); | 1091 |
| 1092 Register right_reg = EmitLoadRegister(instr->right(), scratch); |
| 1087 | 1093 |
| 1088 int32_t divisor = hmod->fixed_right_arg().value; | 1094 int32_t divisor = hmod->fixed_right_arg().value; |
| 1089 ASSERT(IsPowerOf2(divisor)); | 1095 ASSERT(IsPowerOf2(divisor)); |
| 1090 | 1096 |
| 1091 // Check if our assumption of a fixed right operand still holds. | 1097 // Check if our assumption of a fixed right operand still holds. |
| 1092 DeoptimizeIf(ne, instr->environment(), right_reg, Operand(divisor)); | 1098 DeoptimizeIf(ne, instr->environment(), right_reg, Operand(divisor)); |
| 1093 | 1099 |
| 1094 Label left_is_not_negative, done; | 1100 Label left_is_not_negative, done; |
| 1095 if (left->CanBeNegative()) { | 1101 if (left->CanBeNegative()) { |
| 1096 __ Branch(left_reg.is(result_reg) ? PROTECT : USE_DELAY_SLOT, | 1102 __ Branch(USE_DELAY_SLOT, &left_is_not_negative, |
| 1097 &left_is_not_negative, ge, left_reg, Operand(zero_reg)); | 1103 ge, left_reg, Operand(zero_reg)); |
| 1098 __ subu(result_reg, zero_reg, left_reg); | 1104 __ subu(result_reg, zero_reg, left_reg); |
| 1099 __ And(result_reg, result_reg, divisor - 1); | 1105 __ And(result_reg, result_reg, divisor - 1); |
| 1100 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1106 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 1101 DeoptimizeIf(eq, instr->environment(), result_reg, Operand(zero_reg)); | 1107 DeoptimizeIf(eq, instr->environment(), result_reg, Operand(zero_reg)); |
| 1102 } | 1108 } |
| 1103 __ Branch(USE_DELAY_SLOT, &done); | 1109 __ Branch(USE_DELAY_SLOT, &done); |
| 1104 __ subu(result_reg, zero_reg, result_reg); | 1110 __ subu(result_reg, zero_reg, result_reg); |
| 1105 } | 1111 } |
| 1106 | 1112 |
| 1107 __ bind(&left_is_not_negative); | 1113 __ bind(&left_is_not_negative); |
| (...skipping 388 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1496 } | 1502 } |
| 1497 | 1503 |
| 1498 switch (instr->op()) { | 1504 switch (instr->op()) { |
| 1499 case Token::BIT_AND: | 1505 case Token::BIT_AND: |
| 1500 __ And(result, left, right); | 1506 __ And(result, left, right); |
| 1501 break; | 1507 break; |
| 1502 case Token::BIT_OR: | 1508 case Token::BIT_OR: |
| 1503 __ Or(result, left, right); | 1509 __ Or(result, left, right); |
| 1504 break; | 1510 break; |
| 1505 case Token::BIT_XOR: | 1511 case Token::BIT_XOR: |
| 1506 if (right_op->IsConstantOperand() && right.immediate() == int32_t(~0)) { | 1512 __ Xor(result, left, right); |
| 1507 __ Nor(result, zero_reg, left); | |
| 1508 } else { | |
| 1509 __ Xor(result, left, right); | |
| 1510 } | |
| 1511 break; | 1513 break; |
| 1512 default: | 1514 default: |
| 1513 UNREACHABLE(); | 1515 UNREACHABLE(); |
| 1514 break; | 1516 break; |
| 1515 } | 1517 } |
| 1516 } | 1518 } |
| 1517 | 1519 |
| 1518 | 1520 |
| 1519 void LCodeGen::DoShiftI(LShiftI* instr) { | 1521 void LCodeGen::DoShiftI(LShiftI* instr) { |
| 1520 // Both 'left' and 'right' are "used at start" (see LCodeGen::DoShift), so | 1522 // Both 'left' and 'right' are "used at start" (see LCodeGen::DoShift), so |
| (...skipping 240 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1761 | 1763 |
| 1762 if (FLAG_debug_code) { | 1764 if (FLAG_debug_code) { |
| 1763 __ lw(at, FieldMemOperand(string, HeapObject::kMapOffset)); | 1765 __ lw(at, FieldMemOperand(string, HeapObject::kMapOffset)); |
| 1764 __ lbu(at, FieldMemOperand(at, Map::kInstanceTypeOffset)); | 1766 __ lbu(at, FieldMemOperand(at, Map::kInstanceTypeOffset)); |
| 1765 | 1767 |
| 1766 __ And(at, at, Operand(kStringRepresentationMask | kStringEncodingMask)); | 1768 __ And(at, at, Operand(kStringRepresentationMask | kStringEncodingMask)); |
| 1767 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; | 1769 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; |
| 1768 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; | 1770 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; |
| 1769 __ Subu(at, at, Operand(encoding == String::ONE_BYTE_ENCODING | 1771 __ Subu(at, at, Operand(encoding == String::ONE_BYTE_ENCODING |
| 1770 ? one_byte_seq_type : two_byte_seq_type)); | 1772 ? one_byte_seq_type : two_byte_seq_type)); |
| 1771 __ Check(eq, kUnexpectedStringType, at, Operand(zero_reg)); | 1773 __ Check(eq, "Unexpected string type", at, Operand(zero_reg)); |
| 1772 } | 1774 } |
| 1773 | 1775 |
| 1774 __ Addu(scratch, | 1776 __ Addu(scratch, |
| 1775 string, | 1777 string, |
| 1776 Operand(SeqString::kHeaderSize - kHeapObjectTag)); | 1778 Operand(SeqString::kHeaderSize - kHeapObjectTag)); |
| 1777 if (encoding == String::ONE_BYTE_ENCODING) { | 1779 if (encoding == String::ONE_BYTE_ENCODING) { |
| 1778 __ Addu(at, scratch, index); | 1780 __ Addu(at, scratch, index); |
| 1779 __ sb(value, MemOperand(at)); | 1781 __ sb(value, MemOperand(at)); |
| 1780 } else { | 1782 } else { |
| 1781 __ sll(at, index, 1); | 1783 __ sll(at, index, 1); |
| 1782 __ Addu(at, scratch, at); | 1784 __ Addu(at, scratch, at); |
| 1783 __ sh(value, MemOperand(at)); | 1785 __ sh(value, MemOperand(at)); |
| 1784 } | 1786 } |
| 1785 } | 1787 } |
| 1786 | 1788 |
| 1787 | 1789 |
| 1790 void LCodeGen::DoBitNotI(LBitNotI* instr) { |
| 1791 Register input = ToRegister(instr->value()); |
| 1792 Register result = ToRegister(instr->result()); |
| 1793 __ Nor(result, zero_reg, Operand(input)); |
| 1794 } |
| 1795 |
| 1796 |
| 1788 void LCodeGen::DoThrow(LThrow* instr) { | 1797 void LCodeGen::DoThrow(LThrow* instr) { |
| 1789 Register input_reg = EmitLoadRegister(instr->value(), at); | 1798 Register input_reg = EmitLoadRegister(instr->value(), at); |
| 1790 __ push(input_reg); | 1799 __ push(input_reg); |
| 1791 CallRuntime(Runtime::kThrow, 1, instr); | 1800 CallRuntime(Runtime::kThrow, 1, instr); |
| 1792 | 1801 |
| 1793 if (FLAG_debug_code) { | 1802 if (FLAG_debug_code) { |
| 1794 __ stop("Unreachable code."); | 1803 __ stop("Unreachable code."); |
| 1795 } | 1804 } |
| 1796 } | 1805 } |
| 1797 | 1806 |
| (...skipping 1262 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3060 | 3069 |
| 3061 void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) { | 3070 void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) { |
| 3062 Register external_pointer = ToRegister(instr->elements()); | 3071 Register external_pointer = ToRegister(instr->elements()); |
| 3063 Register key = no_reg; | 3072 Register key = no_reg; |
| 3064 ElementsKind elements_kind = instr->elements_kind(); | 3073 ElementsKind elements_kind = instr->elements_kind(); |
| 3065 bool key_is_constant = instr->key()->IsConstantOperand(); | 3074 bool key_is_constant = instr->key()->IsConstantOperand(); |
| 3066 int constant_key = 0; | 3075 int constant_key = 0; |
| 3067 if (key_is_constant) { | 3076 if (key_is_constant) { |
| 3068 constant_key = ToInteger32(LConstantOperand::cast(instr->key())); | 3077 constant_key = ToInteger32(LConstantOperand::cast(instr->key())); |
| 3069 if (constant_key & 0xF0000000) { | 3078 if (constant_key & 0xF0000000) { |
| 3070 Abort(kArrayIndexConstantValueTooBig); | 3079 Abort("array index constant value too big."); |
| 3071 } | 3080 } |
| 3072 } else { | 3081 } else { |
| 3073 key = ToRegister(instr->key()); | 3082 key = ToRegister(instr->key()); |
| 3074 } | 3083 } |
| 3075 int element_size_shift = ElementsKindToShiftSize(elements_kind); | 3084 int element_size_shift = ElementsKindToShiftSize(elements_kind); |
| 3076 int shift_size = (instr->hydrogen()->key()->representation().IsSmi()) | 3085 int shift_size = (instr->hydrogen()->key()->representation().IsSmi()) |
| 3077 ? (element_size_shift - kSmiTagSize) : element_size_shift; | 3086 ? (element_size_shift - kSmiTagSize) : element_size_shift; |
| 3078 int additional_offset = instr->additional_index() << element_size_shift; | 3087 int additional_offset = instr->additional_index() << element_size_shift; |
| 3079 | 3088 |
| 3080 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS || | 3089 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS || |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3146 DoubleRegister result = ToDoubleRegister(instr->result()); | 3155 DoubleRegister result = ToDoubleRegister(instr->result()); |
| 3147 Register scratch = scratch0(); | 3156 Register scratch = scratch0(); |
| 3148 | 3157 |
| 3149 int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS); | 3158 int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS); |
| 3150 int shift_size = (instr->hydrogen()->key()->representation().IsSmi()) | 3159 int shift_size = (instr->hydrogen()->key()->representation().IsSmi()) |
| 3151 ? (element_size_shift - kSmiTagSize) : element_size_shift; | 3160 ? (element_size_shift - kSmiTagSize) : element_size_shift; |
| 3152 int constant_key = 0; | 3161 int constant_key = 0; |
| 3153 if (key_is_constant) { | 3162 if (key_is_constant) { |
| 3154 constant_key = ToInteger32(LConstantOperand::cast(instr->key())); | 3163 constant_key = ToInteger32(LConstantOperand::cast(instr->key())); |
| 3155 if (constant_key & 0xF0000000) { | 3164 if (constant_key & 0xF0000000) { |
| 3156 Abort(kArrayIndexConstantValueTooBig); | 3165 Abort("array index constant value too big."); |
| 3157 } | 3166 } |
| 3158 } else { | 3167 } else { |
| 3159 key = ToRegister(instr->key()); | 3168 key = ToRegister(instr->key()); |
| 3160 } | 3169 } |
| 3161 | 3170 |
| 3162 int base_offset = (FixedDoubleArray::kHeaderSize - kHeapObjectTag) + | 3171 int base_offset = (FixedDoubleArray::kHeaderSize - kHeapObjectTag) + |
| 3163 ((constant_key + instr->additional_index()) << element_size_shift); | 3172 ((constant_key + instr->additional_index()) << element_size_shift); |
| 3164 if (!key_is_constant) { | 3173 if (!key_is_constant) { |
| 3165 __ sll(scratch, key, shift_size); | 3174 __ sll(scratch, key, shift_size); |
| 3166 __ Addu(elements, elements, scratch); | 3175 __ Addu(elements, elements, scratch); |
| (...skipping 250 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3417 ParameterCount actual(receiver); | 3426 ParameterCount actual(receiver); |
| 3418 __ InvokeFunction(function, actual, CALL_FUNCTION, | 3427 __ InvokeFunction(function, actual, CALL_FUNCTION, |
| 3419 safepoint_generator, CALL_AS_METHOD); | 3428 safepoint_generator, CALL_AS_METHOD); |
| 3420 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 3429 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 3421 } | 3430 } |
| 3422 | 3431 |
| 3423 | 3432 |
| 3424 void LCodeGen::DoPushArgument(LPushArgument* instr) { | 3433 void LCodeGen::DoPushArgument(LPushArgument* instr) { |
| 3425 LOperand* argument = instr->value(); | 3434 LOperand* argument = instr->value(); |
| 3426 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) { | 3435 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) { |
| 3427 Abort(kDoPushArgumentNotImplementedForDoubleType); | 3436 Abort("DoPushArgument not implemented for double type."); |
| 3428 } else { | 3437 } else { |
| 3429 Register argument_reg = EmitLoadRegister(argument, at); | 3438 Register argument_reg = EmitLoadRegister(argument, at); |
| 3430 __ push(argument_reg); | 3439 __ push(argument_reg); |
| 3431 } | 3440 } |
| 3432 } | 3441 } |
| 3433 | 3442 |
| 3434 | 3443 |
| 3435 void LCodeGen::DoDrop(LDrop* instr) { | 3444 void LCodeGen::DoDrop(LDrop* instr) { |
| 3436 __ Drop(instr->count()); | 3445 __ Drop(instr->count()); |
| 3437 } | 3446 } |
| (...skipping 198 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3636 virtual LInstruction* instr() { return instr_; } | 3645 virtual LInstruction* instr() { return instr_; } |
| 3637 private: | 3646 private: |
| 3638 LMathAbs* instr_; | 3647 LMathAbs* instr_; |
| 3639 }; | 3648 }; |
| 3640 | 3649 |
| 3641 Representation r = instr->hydrogen()->value()->representation(); | 3650 Representation r = instr->hydrogen()->value()->representation(); |
| 3642 if (r.IsDouble()) { | 3651 if (r.IsDouble()) { |
| 3643 FPURegister input = ToDoubleRegister(instr->value()); | 3652 FPURegister input = ToDoubleRegister(instr->value()); |
| 3644 FPURegister result = ToDoubleRegister(instr->result()); | 3653 FPURegister result = ToDoubleRegister(instr->result()); |
| 3645 __ abs_d(result, input); | 3654 __ abs_d(result, input); |
| 3646 } else if (r.IsSmiOrInteger32()) { | 3655 } else if (r.IsInteger32()) { |
| 3647 EmitIntegerMathAbs(instr); | 3656 EmitIntegerMathAbs(instr); |
| 3648 } else { | 3657 } else { |
| 3649 // Representation is tagged. | 3658 // Representation is tagged. |
| 3650 DeferredMathAbsTaggedHeapNumber* deferred = | 3659 DeferredMathAbsTaggedHeapNumber* deferred = |
| 3651 new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr); | 3660 new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr); |
| 3652 Register input = ToRegister(instr->value()); | 3661 Register input = ToRegister(instr->value()); |
| 3653 // Smi check. | 3662 // Smi check. |
| 3654 __ JumpIfNotSmi(input, deferred->entry()); | 3663 __ JumpIfNotSmi(input, deferred->entry()); |
| 3655 // If smi, handle it directly. | 3664 // If smi, handle it directly. |
| 3656 EmitIntegerMathAbs(instr); | 3665 EmitIntegerMathAbs(instr); |
| (...skipping 585 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4242 | 4251 |
| 4243 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { | 4252 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { |
| 4244 Register external_pointer = ToRegister(instr->elements()); | 4253 Register external_pointer = ToRegister(instr->elements()); |
| 4245 Register key = no_reg; | 4254 Register key = no_reg; |
| 4246 ElementsKind elements_kind = instr->elements_kind(); | 4255 ElementsKind elements_kind = instr->elements_kind(); |
| 4247 bool key_is_constant = instr->key()->IsConstantOperand(); | 4256 bool key_is_constant = instr->key()->IsConstantOperand(); |
| 4248 int constant_key = 0; | 4257 int constant_key = 0; |
| 4249 if (key_is_constant) { | 4258 if (key_is_constant) { |
| 4250 constant_key = ToInteger32(LConstantOperand::cast(instr->key())); | 4259 constant_key = ToInteger32(LConstantOperand::cast(instr->key())); |
| 4251 if (constant_key & 0xF0000000) { | 4260 if (constant_key & 0xF0000000) { |
| 4252 Abort(kArrayIndexConstantValueTooBig); | 4261 Abort("array index constant value too big."); |
| 4253 } | 4262 } |
| 4254 } else { | 4263 } else { |
| 4255 key = ToRegister(instr->key()); | 4264 key = ToRegister(instr->key()); |
| 4256 } | 4265 } |
| 4257 int element_size_shift = ElementsKindToShiftSize(elements_kind); | 4266 int element_size_shift = ElementsKindToShiftSize(elements_kind); |
| 4258 int shift_size = (instr->hydrogen()->key()->representation().IsSmi()) | 4267 int shift_size = (instr->hydrogen()->key()->representation().IsSmi()) |
| 4259 ? (element_size_shift - kSmiTagSize) : element_size_shift; | 4268 ? (element_size_shift - kSmiTagSize) : element_size_shift; |
| 4260 int additional_offset = instr->additional_index() << element_size_shift; | 4269 int additional_offset = instr->additional_index() << element_size_shift; |
| 4261 | 4270 |
| 4262 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS || | 4271 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS || |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4320 Register scratch = scratch0(); | 4329 Register scratch = scratch0(); |
| 4321 bool key_is_constant = instr->key()->IsConstantOperand(); | 4330 bool key_is_constant = instr->key()->IsConstantOperand(); |
| 4322 int constant_key = 0; | 4331 int constant_key = 0; |
| 4323 Label not_nan; | 4332 Label not_nan; |
| 4324 | 4333 |
| 4325 // Calculate the effective address of the slot in the array to store the | 4334 // Calculate the effective address of the slot in the array to store the |
| 4326 // double value. | 4335 // double value. |
| 4327 if (key_is_constant) { | 4336 if (key_is_constant) { |
| 4328 constant_key = ToInteger32(LConstantOperand::cast(instr->key())); | 4337 constant_key = ToInteger32(LConstantOperand::cast(instr->key())); |
| 4329 if (constant_key & 0xF0000000) { | 4338 if (constant_key & 0xF0000000) { |
| 4330 Abort(kArrayIndexConstantValueTooBig); | 4339 Abort("array index constant value too big."); |
| 4331 } | 4340 } |
| 4332 } else { | 4341 } else { |
| 4333 key = ToRegister(instr->key()); | 4342 key = ToRegister(instr->key()); |
| 4334 } | 4343 } |
| 4335 int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS); | 4344 int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS); |
| 4336 int shift_size = (instr->hydrogen()->key()->representation().IsSmi()) | 4345 int shift_size = (instr->hydrogen()->key()->representation().IsSmi()) |
| 4337 ? (element_size_shift - kSmiTagSize) : element_size_shift; | 4346 ? (element_size_shift - kSmiTagSize) : element_size_shift; |
| 4338 if (key_is_constant) { | 4347 if (key_is_constant) { |
| 4339 __ Addu(scratch, elements, Operand((constant_key << element_size_shift) + | 4348 __ Addu(scratch, elements, Operand((constant_key << element_size_shift) + |
| 4340 FixedDoubleArray::kHeaderSize - kHeapObjectTag)); | 4349 FixedDoubleArray::kHeaderSize - kHeapObjectTag)); |
| (...skipping 836 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5177 __ lw(at, FieldMemOperand(at, Cell::kValueOffset)); | 5186 __ lw(at, FieldMemOperand(at, Cell::kValueOffset)); |
| 5178 DeoptimizeIf(ne, instr->environment(), reg, | 5187 DeoptimizeIf(ne, instr->environment(), reg, |
| 5179 Operand(at)); | 5188 Operand(at)); |
| 5180 } else { | 5189 } else { |
| 5181 DeoptimizeIf(ne, instr->environment(), reg, | 5190 DeoptimizeIf(ne, instr->environment(), reg, |
| 5182 Operand(target)); | 5191 Operand(target)); |
| 5183 } | 5192 } |
| 5184 } | 5193 } |
| 5185 | 5194 |
| 5186 | 5195 |
| 5187 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { | 5196 void LCodeGen::DoCheckMapCommon(Register map_reg, |
| 5188 { | 5197 Handle<Map> map, |
| 5189 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | 5198 LEnvironment* env) { |
| 5190 __ push(object); | 5199 Label success; |
| 5191 CallRuntimeFromDeferred(Runtime::kMigrateInstance, 1, instr); | 5200 __ CompareMapAndBranch(map_reg, map, &success, eq, &success); |
| 5192 __ StoreToSafepointRegisterSlot(v0, scratch0()); | 5201 DeoptimizeIf(al, env); |
| 5193 } | 5202 __ bind(&success); |
| 5194 __ And(at, scratch0(), Operand(kSmiTagMask)); | |
| 5195 DeoptimizeIf(eq, instr->environment(), at, Operand(zero_reg)); | |
| 5196 } | 5203 } |
| 5197 | 5204 |
| 5198 | 5205 |
| 5199 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { | 5206 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { |
| 5200 class DeferredCheckMaps: public LDeferredCode { | |
| 5201 public: | |
| 5202 DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object) | |
| 5203 : LDeferredCode(codegen), instr_(instr), object_(object) { | |
| 5204 SetExit(check_maps()); | |
| 5205 } | |
| 5206 virtual void Generate() { | |
| 5207 codegen()->DoDeferredInstanceMigration(instr_, object_); | |
| 5208 } | |
| 5209 Label* check_maps() { return &check_maps_; } | |
| 5210 virtual LInstruction* instr() { return instr_; } | |
| 5211 private: | |
| 5212 LCheckMaps* instr_; | |
| 5213 Label check_maps_; | |
| 5214 Register object_; | |
| 5215 }; | |
| 5216 | |
| 5217 if (instr->hydrogen()->CanOmitMapChecks()) return; | 5207 if (instr->hydrogen()->CanOmitMapChecks()) return; |
| 5218 Register map_reg = scratch0(); | 5208 Register map_reg = scratch0(); |
| 5219 LOperand* input = instr->value(); | 5209 LOperand* input = instr->value(); |
| 5220 ASSERT(input->IsRegister()); | 5210 ASSERT(input->IsRegister()); |
| 5221 Register reg = ToRegister(input); | 5211 Register reg = ToRegister(input); |
| 5212 Label success; |
| 5222 SmallMapList* map_set = instr->hydrogen()->map_set(); | 5213 SmallMapList* map_set = instr->hydrogen()->map_set(); |
| 5223 __ lw(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset)); | 5214 __ lw(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset)); |
| 5224 | |
| 5225 DeferredCheckMaps* deferred = NULL; | |
| 5226 if (instr->hydrogen()->has_migration_target()) { | |
| 5227 deferred = new(zone()) DeferredCheckMaps(this, instr, reg); | |
| 5228 __ bind(deferred->check_maps()); | |
| 5229 } | |
| 5230 | |
| 5231 Label success; | |
| 5232 for (int i = 0; i < map_set->length() - 1; i++) { | 5215 for (int i = 0; i < map_set->length() - 1; i++) { |
| 5233 Handle<Map> map = map_set->at(i); | 5216 Handle<Map> map = map_set->at(i); |
| 5234 __ CompareMapAndBranch(map_reg, map, &success, eq, &success); | 5217 __ CompareMapAndBranch(map_reg, map, &success, eq, &success); |
| 5235 } | 5218 } |
| 5236 Handle<Map> map = map_set->last(); | 5219 Handle<Map> map = map_set->last(); |
| 5237 __ CompareMapAndBranch(map_reg, map, &success, eq, &success); | 5220 DoCheckMapCommon(map_reg, map, instr->environment()); |
| 5238 if (instr->hydrogen()->has_migration_target()) { | |
| 5239 __ Branch(deferred->entry()); | |
| 5240 } else { | |
| 5241 DeoptimizeIf(al, instr->environment()); | |
| 5242 } | |
| 5243 | |
| 5244 __ bind(&success); | 5221 __ bind(&success); |
| 5245 } | 5222 } |
| 5246 | 5223 |
| 5247 | 5224 |
| 5248 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { | 5225 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { |
| 5249 DoubleRegister value_reg = ToDoubleRegister(instr->unclamped()); | 5226 DoubleRegister value_reg = ToDoubleRegister(instr->unclamped()); |
| 5250 Register result_reg = ToRegister(instr->result()); | 5227 Register result_reg = ToRegister(instr->result()); |
| 5251 DoubleRegister temp_reg = ToDoubleRegister(instr->temp()); | 5228 DoubleRegister temp_reg = ToDoubleRegister(instr->temp()); |
| 5252 __ ClampDoubleToUint8(result_reg, value_reg, temp_reg); | 5229 __ ClampDoubleToUint8(result_reg, value_reg, temp_reg); |
| 5253 } | 5230 } |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5288 __ ClampDoubleToUint8(result_reg, double_scratch0(), temp_reg); | 5265 __ ClampDoubleToUint8(result_reg, double_scratch0(), temp_reg); |
| 5289 __ jmp(&done); | 5266 __ jmp(&done); |
| 5290 | 5267 |
| 5291 __ bind(&is_smi); | 5268 __ bind(&is_smi); |
| 5292 __ ClampUint8(result_reg, scratch); | 5269 __ ClampUint8(result_reg, scratch); |
| 5293 | 5270 |
| 5294 __ bind(&done); | 5271 __ bind(&done); |
| 5295 } | 5272 } |
| 5296 | 5273 |
| 5297 | 5274 |
| 5275 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { |
| 5276 if (instr->hydrogen()->CanOmitPrototypeChecks()) return; |
| 5277 |
| 5278 Register prototype_reg = ToRegister(instr->temp()); |
| 5279 Register map_reg = ToRegister(instr->temp2()); |
| 5280 |
| 5281 ZoneList<Handle<JSObject> >* prototypes = instr->prototypes(); |
| 5282 ZoneList<Handle<Map> >* maps = instr->maps(); |
| 5283 |
| 5284 ASSERT(prototypes->length() == maps->length()); |
| 5285 |
| 5286 for (int i = 0; i < prototypes->length(); i++) { |
| 5287 __ LoadHeapObject(prototype_reg, prototypes->at(i)); |
| 5288 __ lw(map_reg, FieldMemOperand(prototype_reg, HeapObject::kMapOffset)); |
| 5289 DoCheckMapCommon(map_reg, maps->at(i), instr->environment()); |
| 5290 } |
| 5291 } |
| 5292 |
| 5293 |
| 5298 void LCodeGen::DoAllocate(LAllocate* instr) { | 5294 void LCodeGen::DoAllocate(LAllocate* instr) { |
| 5299 class DeferredAllocate: public LDeferredCode { | 5295 class DeferredAllocate: public LDeferredCode { |
| 5300 public: | 5296 public: |
| 5301 DeferredAllocate(LCodeGen* codegen, LAllocate* instr) | 5297 DeferredAllocate(LCodeGen* codegen, LAllocate* instr) |
| 5302 : LDeferredCode(codegen), instr_(instr) { } | 5298 : LDeferredCode(codegen), instr_(instr) { } |
| 5303 virtual void Generate() { codegen()->DoDeferredAllocate(instr_); } | 5299 virtual void Generate() { codegen()->DoDeferredAllocate(instr_); } |
| 5304 virtual LInstruction* instr() { return instr_; } | 5300 virtual LInstruction* instr() { return instr_; } |
| 5305 private: | 5301 private: |
| 5306 LAllocate* instr_; | 5302 LAllocate* instr_; |
| 5307 }; | 5303 }; |
| (...skipping 538 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5846 __ Subu(scratch, result, scratch); | 5842 __ Subu(scratch, result, scratch); |
| 5847 __ lw(result, FieldMemOperand(scratch, | 5843 __ lw(result, FieldMemOperand(scratch, |
| 5848 FixedArray::kHeaderSize - kPointerSize)); | 5844 FixedArray::kHeaderSize - kPointerSize)); |
| 5849 __ bind(&done); | 5845 __ bind(&done); |
| 5850 } | 5846 } |
| 5851 | 5847 |
| 5852 | 5848 |
| 5853 #undef __ | 5849 #undef __ |
| 5854 | 5850 |
| 5855 } } // namespace v8::internal | 5851 } } // namespace v8::internal |
| OLD | NEW |