OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 393 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
404 translation->StoreLiteral(src_index); | 404 translation->StoreLiteral(src_index); |
405 } else { | 405 } else { |
406 UNREACHABLE(); | 406 UNREACHABLE(); |
407 } | 407 } |
408 } | 408 } |
409 | 409 |
410 | 410 |
411 void LCodeGen::CallCode(Handle<Code> code, | 411 void LCodeGen::CallCode(Handle<Code> code, |
412 RelocInfo::Mode mode, | 412 RelocInfo::Mode mode, |
413 LInstruction* instr, | 413 LInstruction* instr, |
414 bool adjusted) { | 414 ContextMode context_mode, |
| 415 SafepointMode safepoint_mode) { |
415 ASSERT(instr != NULL); | 416 ASSERT(instr != NULL); |
416 LPointerMap* pointers = instr->pointer_map(); | 417 LPointerMap* pointers = instr->pointer_map(); |
417 RecordPosition(pointers->position()); | 418 RecordPosition(pointers->position()); |
418 | 419 |
419 if (!adjusted) { | 420 if (context_mode == RESTORE_CONTEXT) { |
420 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 421 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
421 } | 422 } |
422 __ call(code, mode); | 423 __ call(code, mode); |
423 | 424 |
424 RegisterLazyDeoptimization(instr); | 425 RegisterLazyDeoptimization(instr, safepoint_mode); |
425 | 426 |
426 // Signal that we don't inline smi code before these stubs in the | 427 // Signal that we don't inline smi code before these stubs in the |
427 // optimizing code generator. | 428 // optimizing code generator. |
428 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC || | 429 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC || |
429 code->kind() == Code::COMPARE_IC) { | 430 code->kind() == Code::COMPARE_IC) { |
430 __ nop(); | 431 __ nop(); |
431 } | 432 } |
432 } | 433 } |
433 | 434 |
434 | 435 |
435 void LCodeGen::CallRuntime(const Runtime::Function* fun, | 436 void LCodeGen::CallRuntime(const Runtime::Function* fun, |
436 int argc, | 437 int argc, |
437 LInstruction* instr, | 438 LInstruction* instr, |
438 bool adjusted) { | 439 ContextMode context_mode) { |
439 ASSERT(instr != NULL); | 440 ASSERT(instr != NULL); |
440 ASSERT(instr->HasPointerMap()); | 441 ASSERT(instr->HasPointerMap()); |
441 LPointerMap* pointers = instr->pointer_map(); | 442 LPointerMap* pointers = instr->pointer_map(); |
442 RecordPosition(pointers->position()); | 443 RecordPosition(pointers->position()); |
443 | 444 |
444 if (!adjusted) { | 445 if (context_mode == RESTORE_CONTEXT) { |
445 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 446 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
446 } | 447 } |
447 __ CallRuntime(fun, argc); | 448 __ CallRuntime(fun, argc); |
448 | 449 |
449 RegisterLazyDeoptimization(instr); | 450 RegisterLazyDeoptimization(instr); |
450 } | 451 } |
451 | 452 |
452 | 453 |
453 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) { | 454 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr, |
| 455 SafepointMode safepoint_mode) { |
454 // Create the environment to bailout to. If the call has side effects | 456 // Create the environment to bailout to. If the call has side effects |
455 // execution has to continue after the call otherwise execution can continue | 457 // execution has to continue after the call otherwise execution can continue |
456 // from a previous bailout point repeating the call. | 458 // from a previous bailout point repeating the call. |
457 LEnvironment* deoptimization_environment; | 459 LEnvironment* deoptimization_environment; |
458 if (instr->HasDeoptimizationEnvironment()) { | 460 if (instr->HasDeoptimizationEnvironment()) { |
459 deoptimization_environment = instr->deoptimization_environment(); | 461 deoptimization_environment = instr->deoptimization_environment(); |
460 } else { | 462 } else { |
461 deoptimization_environment = instr->environment(); | 463 deoptimization_environment = instr->environment(); |
462 } | 464 } |
463 | 465 |
464 RegisterEnvironmentForDeoptimization(deoptimization_environment); | 466 RegisterEnvironmentForDeoptimization(deoptimization_environment); |
465 RecordSafepoint(instr->pointer_map(), | 467 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) { |
466 deoptimization_environment->deoptimization_index()); | 468 RecordSafepoint(instr->pointer_map(), |
| 469 deoptimization_environment->deoptimization_index()); |
| 470 } else { |
| 471 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
| 472 RecordSafepointWithRegisters( |
| 473 instr->pointer_map(), |
| 474 0, |
| 475 deoptimization_environment->deoptimization_index()); |
| 476 } |
467 } | 477 } |
468 | 478 |
469 | 479 |
470 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) { | 480 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) { |
471 if (!environment->HasBeenRegistered()) { | 481 if (!environment->HasBeenRegistered()) { |
472 // Physical stack frame layout: | 482 // Physical stack frame layout: |
473 // -x ............. -4 0 ..................................... y | 483 // -x ............. -4 0 ..................................... y |
474 // [incoming arguments] [spill slots] [pushed outgoing arguments] | 484 // [incoming arguments] [spill slots] [pushed outgoing arguments] |
475 | 485 |
476 // Layout of the environment: | 486 // Layout of the environment: |
(...skipping 659 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1136 | 1146 |
1137 void LCodeGen::DoBitNotI(LBitNotI* instr) { | 1147 void LCodeGen::DoBitNotI(LBitNotI* instr) { |
1138 LOperand* input = instr->InputAt(0); | 1148 LOperand* input = instr->InputAt(0); |
1139 ASSERT(input->Equals(instr->result())); | 1149 ASSERT(input->Equals(instr->result())); |
1140 __ not_(ToRegister(input)); | 1150 __ not_(ToRegister(input)); |
1141 } | 1151 } |
1142 | 1152 |
1143 | 1153 |
1144 void LCodeGen::DoThrow(LThrow* instr) { | 1154 void LCodeGen::DoThrow(LThrow* instr) { |
1145 __ push(ToOperand(instr->InputAt(0))); | 1155 __ push(ToOperand(instr->InputAt(0))); |
1146 CallRuntime(Runtime::kThrow, 1, instr, false); | 1156 CallRuntime(Runtime::kThrow, 1, instr, RESTORE_CONTEXT); |
1147 | 1157 |
1148 if (FLAG_debug_code) { | 1158 if (FLAG_debug_code) { |
1149 Comment("Unreachable code."); | 1159 Comment("Unreachable code."); |
1150 __ int3(); | 1160 __ int3(); |
1151 } | 1161 } |
1152 } | 1162 } |
1153 | 1163 |
1154 | 1164 |
1155 void LCodeGen::DoAddI(LAddI* instr) { | 1165 void LCodeGen::DoAddI(LAddI* instr) { |
1156 LOperand* left = instr->InputAt(0); | 1166 LOperand* left = instr->InputAt(0); |
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1211 } | 1221 } |
1212 } | 1222 } |
1213 | 1223 |
1214 | 1224 |
1215 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { | 1225 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { |
1216 ASSERT(ToRegister(instr->InputAt(0)).is(edx)); | 1226 ASSERT(ToRegister(instr->InputAt(0)).is(edx)); |
1217 ASSERT(ToRegister(instr->InputAt(1)).is(eax)); | 1227 ASSERT(ToRegister(instr->InputAt(1)).is(eax)); |
1218 ASSERT(ToRegister(instr->result()).is(eax)); | 1228 ASSERT(ToRegister(instr->result()).is(eax)); |
1219 | 1229 |
1220 TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE); | 1230 TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE); |
1221 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); | 1231 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
1222 } | 1232 } |
1223 | 1233 |
1224 | 1234 |
1225 int LCodeGen::GetNextEmittedBlock(int block) { | 1235 int LCodeGen::GetNextEmittedBlock(int block) { |
1226 for (int i = block + 1; i < graph()->blocks()->length(); ++i) { | 1236 for (int i = block + 1; i < graph()->blocks()->length(); ++i) { |
1227 LLabel* label = chunk_->GetLabel(i); | 1237 LLabel* label = chunk_->GetLabel(i); |
1228 if (!label->HasReplacement()) return i; | 1238 if (!label->HasReplacement()) return i; |
1229 } | 1239 } |
1230 return -1; | 1240 return -1; |
1231 } | 1241 } |
(...skipping 710 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1942 // Get the temp register reserved by the instruction. This needs to be edi as | 1952 // Get the temp register reserved by the instruction. This needs to be edi as |
1943 // its slot of the pushing of safepoint registers is used to communicate the | 1953 // its slot of the pushing of safepoint registers is used to communicate the |
1944 // offset to the location of the map check. | 1954 // offset to the location of the map check. |
1945 Register temp = ToRegister(instr->TempAt(0)); | 1955 Register temp = ToRegister(instr->TempAt(0)); |
1946 ASSERT(temp.is(edi)); | 1956 ASSERT(temp.is(edi)); |
1947 __ mov(InstanceofStub::right(), Immediate(instr->function())); | 1957 __ mov(InstanceofStub::right(), Immediate(instr->function())); |
1948 static const int kAdditionalDelta = 16; | 1958 static const int kAdditionalDelta = 16; |
1949 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; | 1959 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; |
1950 __ mov(temp, Immediate(delta)); | 1960 __ mov(temp, Immediate(delta)); |
1951 __ StoreToSafepointRegisterSlot(temp, temp); | 1961 __ StoreToSafepointRegisterSlot(temp, temp); |
1952 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); | 1962 CallCode(stub.GetCode(), |
| 1963 RelocInfo::CODE_TARGET, |
| 1964 instr, |
| 1965 RESTORE_CONTEXT, |
| 1966 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
1953 // Put the result value into the eax slot and restore all registers. | 1967 // Put the result value into the eax slot and restore all registers. |
1954 __ StoreToSafepointRegisterSlot(eax, eax); | 1968 __ StoreToSafepointRegisterSlot(eax, eax); |
1955 __ PopSafepointRegisters(); | 1969 __ PopSafepointRegisters(); |
1956 } | 1970 } |
1957 | 1971 |
1958 | 1972 |
1959 static Condition ComputeCompareCondition(Token::Value op) { | 1973 static Condition ComputeCompareCondition(Token::Value op) { |
1960 switch (op) { | 1974 switch (op) { |
1961 case Token::EQ_STRICT: | 1975 case Token::EQ_STRICT: |
1962 case Token::EQ: | 1976 case Token::EQ: |
(...skipping 10 matching lines...) Expand all Loading... |
1973 UNREACHABLE(); | 1987 UNREACHABLE(); |
1974 return no_condition; | 1988 return no_condition; |
1975 } | 1989 } |
1976 } | 1990 } |
1977 | 1991 |
1978 | 1992 |
1979 void LCodeGen::DoCmpT(LCmpT* instr) { | 1993 void LCodeGen::DoCmpT(LCmpT* instr) { |
1980 Token::Value op = instr->op(); | 1994 Token::Value op = instr->op(); |
1981 | 1995 |
1982 Handle<Code> ic = CompareIC::GetUninitialized(op); | 1996 Handle<Code> ic = CompareIC::GetUninitialized(op); |
1983 CallCode(ic, RelocInfo::CODE_TARGET, instr, false); | 1997 CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
1984 | 1998 |
1985 Condition condition = ComputeCompareCondition(op); | 1999 Condition condition = ComputeCompareCondition(op); |
1986 if (op == Token::GT || op == Token::LTE) { | 2000 if (op == Token::GT || op == Token::LTE) { |
1987 condition = ReverseCondition(condition); | 2001 condition = ReverseCondition(condition); |
1988 } | 2002 } |
1989 NearLabel true_value, done; | 2003 NearLabel true_value, done; |
1990 __ test(eax, Operand(eax)); | 2004 __ test(eax, Operand(eax)); |
1991 __ j(condition, &true_value); | 2005 __ j(condition, &true_value); |
1992 __ mov(ToRegister(instr->result()), factory()->false_value()); | 2006 __ mov(ToRegister(instr->result()), factory()->false_value()); |
1993 __ jmp(&done); | 2007 __ jmp(&done); |
1994 __ bind(&true_value); | 2008 __ bind(&true_value); |
1995 __ mov(ToRegister(instr->result()), factory()->true_value()); | 2009 __ mov(ToRegister(instr->result()), factory()->true_value()); |
1996 __ bind(&done); | 2010 __ bind(&done); |
1997 } | 2011 } |
1998 | 2012 |
1999 | 2013 |
2000 void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) { | 2014 void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) { |
2001 Token::Value op = instr->op(); | 2015 Token::Value op = instr->op(); |
2002 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 2016 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
2003 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 2017 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
2004 | 2018 |
2005 Handle<Code> ic = CompareIC::GetUninitialized(op); | 2019 Handle<Code> ic = CompareIC::GetUninitialized(op); |
2006 CallCode(ic, RelocInfo::CODE_TARGET, instr, false); | 2020 CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
2007 | 2021 |
2008 // The compare stub expects compare condition and the input operands | 2022 // The compare stub expects compare condition and the input operands |
2009 // reversed for GT and LTE. | 2023 // reversed for GT and LTE. |
2010 Condition condition = ComputeCompareCondition(op); | 2024 Condition condition = ComputeCompareCondition(op); |
2011 if (op == Token::GT || op == Token::LTE) { | 2025 if (op == Token::GT || op == Token::LTE) { |
2012 condition = ReverseCondition(condition); | 2026 condition = ReverseCondition(condition); |
2013 } | 2027 } |
2014 __ test(eax, Operand(eax)); | 2028 __ test(eax, Operand(eax)); |
2015 EmitBranch(true_block, false_block, condition); | 2029 EmitBranch(true_block, false_block, condition); |
2016 } | 2030 } |
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2128 void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) { | 2142 void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) { |
2129 Register object = ToRegister(instr->object()); | 2143 Register object = ToRegister(instr->object()); |
2130 Register result = ToRegister(instr->result()); | 2144 Register result = ToRegister(instr->result()); |
2131 | 2145 |
2132 int map_count = instr->hydrogen()->types()->length(); | 2146 int map_count = instr->hydrogen()->types()->length(); |
2133 Handle<String> name = instr->hydrogen()->name(); | 2147 Handle<String> name = instr->hydrogen()->name(); |
2134 if (map_count == 0) { | 2148 if (map_count == 0) { |
2135 ASSERT(instr->hydrogen()->need_generic()); | 2149 ASSERT(instr->hydrogen()->need_generic()); |
2136 __ mov(ecx, name); | 2150 __ mov(ecx, name); |
2137 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); | 2151 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); |
2138 CallCode(ic, RelocInfo::CODE_TARGET, instr, false); | 2152 CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
2139 } else { | 2153 } else { |
2140 NearLabel done; | 2154 NearLabel done; |
2141 for (int i = 0; i < map_count - 1; ++i) { | 2155 for (int i = 0; i < map_count - 1; ++i) { |
2142 Handle<Map> map = instr->hydrogen()->types()->at(i); | 2156 Handle<Map> map = instr->hydrogen()->types()->at(i); |
2143 NearLabel next; | 2157 NearLabel next; |
2144 __ cmp(FieldOperand(object, HeapObject::kMapOffset), map); | 2158 __ cmp(FieldOperand(object, HeapObject::kMapOffset), map); |
2145 __ j(not_equal, &next); | 2159 __ j(not_equal, &next); |
2146 EmitLoadField(result, object, map, name); | 2160 EmitLoadField(result, object, map, name); |
2147 __ jmp(&done); | 2161 __ jmp(&done); |
2148 __ bind(&next); | 2162 __ bind(&next); |
2149 } | 2163 } |
2150 Handle<Map> map = instr->hydrogen()->types()->last(); | 2164 Handle<Map> map = instr->hydrogen()->types()->last(); |
2151 __ cmp(FieldOperand(object, HeapObject::kMapOffset), map); | 2165 __ cmp(FieldOperand(object, HeapObject::kMapOffset), map); |
2152 if (instr->hydrogen()->need_generic()) { | 2166 if (instr->hydrogen()->need_generic()) { |
2153 NearLabel generic; | 2167 NearLabel generic; |
2154 __ j(not_equal, &generic); | 2168 __ j(not_equal, &generic); |
2155 EmitLoadField(result, object, map, name); | 2169 EmitLoadField(result, object, map, name); |
2156 __ jmp(&done); | 2170 __ jmp(&done); |
2157 __ bind(&generic); | 2171 __ bind(&generic); |
2158 __ mov(ecx, name); | 2172 __ mov(ecx, name); |
2159 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); | 2173 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); |
2160 CallCode(ic, RelocInfo::CODE_TARGET, instr, false); | 2174 CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
2161 } else { | 2175 } else { |
2162 DeoptimizeIf(not_equal, instr->environment()); | 2176 DeoptimizeIf(not_equal, instr->environment()); |
2163 EmitLoadField(result, object, map, name); | 2177 EmitLoadField(result, object, map, name); |
2164 } | 2178 } |
2165 __ bind(&done); | 2179 __ bind(&done); |
2166 } | 2180 } |
2167 } | 2181 } |
2168 | 2182 |
2169 | 2183 |
2170 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { | 2184 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { |
(...skipping 601 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2772 __ fstp_d(Operand(esp, 0)); | 2786 __ fstp_d(Operand(esp, 0)); |
2773 __ movdbl(result_reg, Operand(esp, 0)); | 2787 __ movdbl(result_reg, Operand(esp, 0)); |
2774 __ add(Operand(esp), Immediate(kDoubleSize)); | 2788 __ add(Operand(esp), Immediate(kDoubleSize)); |
2775 } | 2789 } |
2776 | 2790 |
2777 | 2791 |
2778 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) { | 2792 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) { |
2779 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); | 2793 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
2780 TranscendentalCacheStub stub(TranscendentalCache::LOG, | 2794 TranscendentalCacheStub stub(TranscendentalCache::LOG, |
2781 TranscendentalCacheStub::UNTAGGED); | 2795 TranscendentalCacheStub::UNTAGGED); |
2782 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); | 2796 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
2783 } | 2797 } |
2784 | 2798 |
2785 | 2799 |
2786 void LCodeGen::DoMathCos(LUnaryMathOperation* instr) { | 2800 void LCodeGen::DoMathCos(LUnaryMathOperation* instr) { |
2787 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); | 2801 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
2788 TranscendentalCacheStub stub(TranscendentalCache::COS, | 2802 TranscendentalCacheStub stub(TranscendentalCache::COS, |
2789 TranscendentalCacheStub::UNTAGGED); | 2803 TranscendentalCacheStub::UNTAGGED); |
2790 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); | 2804 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
2791 } | 2805 } |
2792 | 2806 |
2793 | 2807 |
2794 void LCodeGen::DoMathSin(LUnaryMathOperation* instr) { | 2808 void LCodeGen::DoMathSin(LUnaryMathOperation* instr) { |
2795 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); | 2809 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
2796 TranscendentalCacheStub stub(TranscendentalCache::SIN, | 2810 TranscendentalCacheStub stub(TranscendentalCache::SIN, |
2797 TranscendentalCacheStub::UNTAGGED); | 2811 TranscendentalCacheStub::UNTAGGED); |
2798 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); | 2812 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
2799 } | 2813 } |
2800 | 2814 |
2801 | 2815 |
2802 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) { | 2816 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) { |
2803 switch (instr->op()) { | 2817 switch (instr->op()) { |
2804 case kMathAbs: | 2818 case kMathAbs: |
2805 DoMathAbs(instr); | 2819 DoMathAbs(instr); |
2806 break; | 2820 break; |
2807 case kMathFloor: | 2821 case kMathFloor: |
2808 DoMathFloor(instr); | 2822 DoMathFloor(instr); |
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2891 ASSERT(ToRegister(instr->constructor()).is(edi)); | 2905 ASSERT(ToRegister(instr->constructor()).is(edi)); |
2892 ASSERT(ToRegister(instr->result()).is(eax)); | 2906 ASSERT(ToRegister(instr->result()).is(eax)); |
2893 | 2907 |
2894 Handle<Code> builtin = isolate()->builtins()->JSConstructCall(); | 2908 Handle<Code> builtin = isolate()->builtins()->JSConstructCall(); |
2895 __ Set(eax, Immediate(instr->arity())); | 2909 __ Set(eax, Immediate(instr->arity())); |
2896 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr); | 2910 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr); |
2897 } | 2911 } |
2898 | 2912 |
2899 | 2913 |
2900 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { | 2914 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { |
2901 CallRuntime(instr->function(), instr->arity(), instr, false); | 2915 CallRuntime(instr->function(), instr->arity(), instr, RESTORE_CONTEXT); |
2902 } | 2916 } |
2903 | 2917 |
2904 | 2918 |
2905 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { | 2919 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { |
2906 Register object = ToRegister(instr->object()); | 2920 Register object = ToRegister(instr->object()); |
2907 Register value = ToRegister(instr->value()); | 2921 Register value = ToRegister(instr->value()); |
2908 int offset = instr->offset(); | 2922 int offset = instr->offset(); |
2909 | 2923 |
2910 if (!instr->transition().is_null()) { | 2924 if (!instr->transition().is_null()) { |
2911 __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition()); | 2925 __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition()); |
(...skipping 849 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3761 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); | 3775 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); |
3762 __ push(Immediate(instr->hydrogen()->constant_elements())); | 3776 __ push(Immediate(instr->hydrogen()->constant_elements())); |
3763 | 3777 |
3764 // Pick the right runtime function or stub to call. | 3778 // Pick the right runtime function or stub to call. |
3765 int length = instr->hydrogen()->length(); | 3779 int length = instr->hydrogen()->length(); |
3766 if (instr->hydrogen()->IsCopyOnWrite()) { | 3780 if (instr->hydrogen()->IsCopyOnWrite()) { |
3767 ASSERT(instr->hydrogen()->depth() == 1); | 3781 ASSERT(instr->hydrogen()->depth() == 1); |
3768 FastCloneShallowArrayStub::Mode mode = | 3782 FastCloneShallowArrayStub::Mode mode = |
3769 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS; | 3783 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS; |
3770 FastCloneShallowArrayStub stub(mode, length); | 3784 FastCloneShallowArrayStub stub(mode, length); |
3771 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); | 3785 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
3772 } else if (instr->hydrogen()->depth() > 1) { | 3786 } else if (instr->hydrogen()->depth() > 1) { |
3773 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr, false); | 3787 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr, RESTORE_CONTEXT); |
3774 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { | 3788 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { |
3775 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr, false); | 3789 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr, RESTORE_CONTEXT); |
3776 } else { | 3790 } else { |
3777 FastCloneShallowArrayStub::Mode mode = | 3791 FastCloneShallowArrayStub::Mode mode = |
3778 FastCloneShallowArrayStub::CLONE_ELEMENTS; | 3792 FastCloneShallowArrayStub::CLONE_ELEMENTS; |
3779 FastCloneShallowArrayStub stub(mode, length); | 3793 FastCloneShallowArrayStub stub(mode, length); |
3780 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); | 3794 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
3781 } | 3795 } |
3782 } | 3796 } |
3783 | 3797 |
3784 | 3798 |
3785 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { | 3799 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { |
3786 ASSERT(ToRegister(instr->context()).is(esi)); | 3800 ASSERT(ToRegister(instr->context()).is(esi)); |
3787 // Setup the parameters to the stub/runtime call. | 3801 // Setup the parameters to the stub/runtime call. |
3788 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); | 3802 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); |
3789 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset)); | 3803 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset)); |
3790 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); | 3804 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3827 __ mov(ebx, FieldOperand(ecx, literal_offset)); | 3841 __ mov(ebx, FieldOperand(ecx, literal_offset)); |
3828 __ cmp(ebx, factory()->undefined_value()); | 3842 __ cmp(ebx, factory()->undefined_value()); |
3829 __ j(not_equal, &materialized); | 3843 __ j(not_equal, &materialized); |
3830 | 3844 |
3831 // Create regexp literal using runtime function | 3845 // Create regexp literal using runtime function |
3832 // Result will be in eax. | 3846 // Result will be in eax. |
3833 __ push(ecx); | 3847 __ push(ecx); |
3834 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); | 3848 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); |
3835 __ push(Immediate(instr->hydrogen()->pattern())); | 3849 __ push(Immediate(instr->hydrogen()->pattern())); |
3836 __ push(Immediate(instr->hydrogen()->flags())); | 3850 __ push(Immediate(instr->hydrogen()->flags())); |
3837 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr, false); | 3851 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr, RESTORE_CONTEXT); |
3838 __ mov(ebx, eax); | 3852 __ mov(ebx, eax); |
3839 | 3853 |
3840 __ bind(&materialized); | 3854 __ bind(&materialized); |
3841 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; | 3855 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; |
3842 Label allocated, runtime_allocate; | 3856 Label allocated, runtime_allocate; |
3843 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT); | 3857 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT); |
3844 __ jmp(&allocated); | 3858 __ jmp(&allocated); |
3845 | 3859 |
3846 __ bind(&runtime_allocate); | 3860 __ bind(&runtime_allocate); |
3847 __ push(ebx); | 3861 __ push(ebx); |
3848 __ push(Immediate(Smi::FromInt(size))); | 3862 __ push(Immediate(Smi::FromInt(size))); |
3849 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr, false); | 3863 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr, RESTORE_CONTEXT); |
3850 __ pop(ebx); | 3864 __ pop(ebx); |
3851 | 3865 |
3852 __ bind(&allocated); | 3866 __ bind(&allocated); |
3853 // Copy the content into the newly allocated memory. | 3867 // Copy the content into the newly allocated memory. |
3854 // (Unroll copy loop once for better throughput). | 3868 // (Unroll copy loop once for better throughput). |
3855 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) { | 3869 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) { |
3856 __ mov(edx, FieldOperand(ebx, i)); | 3870 __ mov(edx, FieldOperand(ebx, i)); |
3857 __ mov(ecx, FieldOperand(ebx, i + kPointerSize)); | 3871 __ mov(ecx, FieldOperand(ebx, i + kPointerSize)); |
3858 __ mov(FieldOperand(eax, i), edx); | 3872 __ mov(FieldOperand(eax, i), edx); |
3859 __ mov(FieldOperand(eax, i + kPointerSize), ecx); | 3873 __ mov(FieldOperand(eax, i + kPointerSize), ecx); |
3860 } | 3874 } |
3861 if ((size % (2 * kPointerSize)) != 0) { | 3875 if ((size % (2 * kPointerSize)) != 0) { |
3862 __ mov(edx, FieldOperand(ebx, size - kPointerSize)); | 3876 __ mov(edx, FieldOperand(ebx, size - kPointerSize)); |
3863 __ mov(FieldOperand(eax, size - kPointerSize), edx); | 3877 __ mov(FieldOperand(eax, size - kPointerSize), edx); |
3864 } | 3878 } |
3865 } | 3879 } |
3866 | 3880 |
3867 | 3881 |
3868 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { | 3882 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { |
3869 // Use the fast case closure allocation code that allocates in new | 3883 // Use the fast case closure allocation code that allocates in new |
3870 // space for nested functions that don't need literals cloning. | 3884 // space for nested functions that don't need literals cloning. |
3871 Handle<SharedFunctionInfo> shared_info = instr->shared_info(); | 3885 Handle<SharedFunctionInfo> shared_info = instr->shared_info(); |
3872 bool pretenure = instr->hydrogen()->pretenure(); | 3886 bool pretenure = instr->hydrogen()->pretenure(); |
3873 if (!pretenure && shared_info->num_literals() == 0) { | 3887 if (!pretenure && shared_info->num_literals() == 0) { |
3874 FastNewClosureStub stub( | 3888 FastNewClosureStub stub( |
3875 shared_info->strict_mode() ? kStrictMode : kNonStrictMode); | 3889 shared_info->strict_mode() ? kStrictMode : kNonStrictMode); |
3876 __ push(Immediate(shared_info)); | 3890 __ push(Immediate(shared_info)); |
3877 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); | 3891 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
3878 } else { | 3892 } else { |
3879 __ push(Operand(ebp, StandardFrameConstants::kContextOffset)); | 3893 __ push(Operand(ebp, StandardFrameConstants::kContextOffset)); |
3880 __ push(Immediate(shared_info)); | 3894 __ push(Immediate(shared_info)); |
3881 __ push(Immediate(pretenure | 3895 __ push(Immediate(pretenure |
3882 ? factory()->true_value() | 3896 ? factory()->true_value() |
3883 : factory()->false_value())); | 3897 : factory()->false_value())); |
3884 CallRuntime(Runtime::kNewClosure, 3, instr, false); | 3898 CallRuntime(Runtime::kNewClosure, 3, instr, RESTORE_CONTEXT); |
3885 } | 3899 } |
3886 } | 3900 } |
3887 | 3901 |
3888 | 3902 |
3889 void LCodeGen::DoTypeof(LTypeof* instr) { | 3903 void LCodeGen::DoTypeof(LTypeof* instr) { |
3890 LOperand* input = instr->InputAt(0); | 3904 LOperand* input = instr->InputAt(0); |
3891 if (input->IsConstantOperand()) { | 3905 if (input->IsConstantOperand()) { |
3892 __ push(ToImmediate(input)); | 3906 __ push(ToImmediate(input)); |
3893 } else { | 3907 } else { |
3894 __ push(ToOperand(input)); | 3908 __ push(ToOperand(input)); |
3895 } | 3909 } |
3896 CallRuntime(Runtime::kTypeof, 1, instr, false); | 3910 CallRuntime(Runtime::kTypeof, 1, instr, RESTORE_CONTEXT); |
3897 } | 3911 } |
3898 | 3912 |
3899 | 3913 |
3900 void LCodeGen::DoTypeofIs(LTypeofIs* instr) { | 3914 void LCodeGen::DoTypeofIs(LTypeofIs* instr) { |
3901 Register input = ToRegister(instr->InputAt(0)); | 3915 Register input = ToRegister(instr->InputAt(0)); |
3902 Register result = ToRegister(instr->result()); | 3916 Register result = ToRegister(instr->result()); |
3903 Label true_label; | 3917 Label true_label; |
3904 Label false_label; | 3918 Label false_label; |
3905 NearLabel done; | 3919 NearLabel done; |
3906 | 3920 |
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4089 | 4103 |
4090 void LCodeGen::DoStackCheck(LStackCheck* instr) { | 4104 void LCodeGen::DoStackCheck(LStackCheck* instr) { |
4091 // Perform stack overflow check. | 4105 // Perform stack overflow check. |
4092 NearLabel done; | 4106 NearLabel done; |
4093 ExternalReference stack_limit = | 4107 ExternalReference stack_limit = |
4094 ExternalReference::address_of_stack_limit(isolate()); | 4108 ExternalReference::address_of_stack_limit(isolate()); |
4095 __ cmp(esp, Operand::StaticVariable(stack_limit)); | 4109 __ cmp(esp, Operand::StaticVariable(stack_limit)); |
4096 __ j(above_equal, &done); | 4110 __ j(above_equal, &done); |
4097 | 4111 |
4098 StackCheckStub stub; | 4112 StackCheckStub stub; |
4099 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); | 4113 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
4100 __ bind(&done); | 4114 __ bind(&done); |
4101 } | 4115 } |
4102 | 4116 |
4103 | 4117 |
4104 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { | 4118 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { |
4105 // This is a pseudo-instruction that ensures that the environment here is | 4119 // This is a pseudo-instruction that ensures that the environment here is |
4106 // properly registered for deoptimization and records the assembler's PC | 4120 // properly registered for deoptimization and records the assembler's PC |
4107 // offset. | 4121 // offset. |
4108 LEnvironment* environment = instr->environment(); | 4122 LEnvironment* environment = instr->environment(); |
4109 environment->SetSpilledRegisters(instr->SpilledRegisterArray(), | 4123 environment->SetSpilledRegisters(instr->SpilledRegisterArray(), |
4110 instr->SpilledDoubleRegisterArray()); | 4124 instr->SpilledDoubleRegisterArray()); |
4111 | 4125 |
4112 // If the environment were already registered, we would have no way of | 4126 // If the environment were already registered, we would have no way of |
4113 // backpatching it with the spill slot operands. | 4127 // backpatching it with the spill slot operands. |
4114 ASSERT(!environment->HasBeenRegistered()); | 4128 ASSERT(!environment->HasBeenRegistered()); |
4115 RegisterEnvironmentForDeoptimization(environment); | 4129 RegisterEnvironmentForDeoptimization(environment); |
4116 ASSERT(osr_pc_offset_ == -1); | 4130 ASSERT(osr_pc_offset_ == -1); |
4117 osr_pc_offset_ = masm()->pc_offset(); | 4131 osr_pc_offset_ = masm()->pc_offset(); |
4118 } | 4132 } |
4119 | 4133 |
4120 | 4134 |
4121 #undef __ | 4135 #undef __ |
4122 | 4136 |
4123 } } // namespace v8::internal | 4137 } } // namespace v8::internal |
4124 | 4138 |
4125 #endif // V8_TARGET_ARCH_IA32 | 4139 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |