OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 464 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
475 descriptor->param_representations_ = representations; | 475 descriptor->param_representations_ = representations; |
476 } | 476 } |
477 } | 477 } |
478 | 478 |
479 | 479 |
480 #define __ ACCESS_MASM(masm) | 480 #define __ ACCESS_MASM(masm) |
481 | 481 |
482 | 482 |
483 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { | 483 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { |
484 // Update the static counter each time a new code stub is generated. | 484 // Update the static counter each time a new code stub is generated. |
485 Isolate* isolate = masm->isolate(); | 485 isolate()->counters()->code_stubs()->Increment(); |
486 isolate->counters()->code_stubs()->Increment(); | |
487 | 486 |
488 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate); | 487 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate()); |
489 int param_count = descriptor->register_param_count_; | 488 int param_count = descriptor->register_param_count_; |
490 { | 489 { |
491 // Call the runtime system in a fresh internal frame. | 490 // Call the runtime system in a fresh internal frame. |
492 FrameScope scope(masm, StackFrame::INTERNAL); | 491 FrameScope scope(masm, StackFrame::INTERNAL); |
493 ASSERT(descriptor->register_param_count_ == 0 || | 492 ASSERT(descriptor->register_param_count_ == 0 || |
494 eax.is(descriptor->register_params_[param_count - 1])); | 493 eax.is(descriptor->register_params_[param_count - 1])); |
495 // Push arguments | 494 // Push arguments |
496 for (int i = 0; i < param_count; ++i) { | 495 for (int i = 0; i < param_count; ++i) { |
497 __ push(descriptor->register_params_[i]); | 496 __ push(descriptor->register_params_[i]); |
498 } | 497 } |
(...skipping 16 matching lines...) Expand all Loading... | |
515 for (int i = 0; i < XMMRegister::kNumRegisters; i++) { | 514 for (int i = 0; i < XMMRegister::kNumRegisters; i++) { |
516 XMMRegister reg = XMMRegister::from_code(i); | 515 XMMRegister reg = XMMRegister::from_code(i); |
517 __ movsd(Operand(esp, i * kDoubleSize), reg); | 516 __ movsd(Operand(esp, i * kDoubleSize), reg); |
518 } | 517 } |
519 } | 518 } |
520 const int argument_count = 1; | 519 const int argument_count = 1; |
521 | 520 |
522 AllowExternalCallThatCantCauseGC scope(masm); | 521 AllowExternalCallThatCantCauseGC scope(masm); |
523 __ PrepareCallCFunction(argument_count, ecx); | 522 __ PrepareCallCFunction(argument_count, ecx); |
524 __ mov(Operand(esp, 0 * kPointerSize), | 523 __ mov(Operand(esp, 0 * kPointerSize), |
525 Immediate(ExternalReference::isolate_address(masm->isolate()))); | 524 Immediate(ExternalReference::isolate_address(isolate()))); |
526 __ CallCFunction( | 525 __ CallCFunction( |
527 ExternalReference::store_buffer_overflow_function(masm->isolate()), | 526 ExternalReference::store_buffer_overflow_function(isolate()), |
528 argument_count); | 527 argument_count); |
529 if (save_doubles_ == kSaveFPRegs) { | 528 if (save_doubles_ == kSaveFPRegs) { |
530 CpuFeatureScope scope(masm, SSE2); | 529 CpuFeatureScope scope(masm, SSE2); |
531 for (int i = 0; i < XMMRegister::kNumRegisters; i++) { | 530 for (int i = 0; i < XMMRegister::kNumRegisters; i++) { |
532 XMMRegister reg = XMMRegister::from_code(i); | 531 XMMRegister reg = XMMRegister::from_code(i); |
533 __ movsd(reg, Operand(esp, i * kDoubleSize)); | 532 __ movsd(reg, Operand(esp, i * kDoubleSize)); |
534 } | 533 } |
535 __ add(esp, Immediate(kDoubleSize * XMMRegister::kNumRegisters)); | 534 __ add(esp, Immediate(kDoubleSize * XMMRegister::kNumRegisters)); |
536 } | 535 } |
537 __ popad(); | 536 __ popad(); |
(...skipping 236 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
774 __ cmp(scratch, factory->heap_number_map()); | 773 __ cmp(scratch, factory->heap_number_map()); |
775 __ j(not_equal, non_float); // argument in eax is not a number -> NaN | 774 __ j(not_equal, non_float); // argument in eax is not a number -> NaN |
776 | 775 |
777 // Fall-through: Both operands are numbers. | 776 // Fall-through: Both operands are numbers. |
778 __ bind(&done); | 777 __ bind(&done); |
779 } | 778 } |
780 | 779 |
781 | 780 |
782 void MathPowStub::Generate(MacroAssembler* masm) { | 781 void MathPowStub::Generate(MacroAssembler* masm) { |
783 CpuFeatureScope use_sse2(masm, SSE2); | 782 CpuFeatureScope use_sse2(masm, SSE2); |
784 Factory* factory = masm->isolate()->factory(); | 783 Factory* factory = isolate()->factory(); |
785 const Register exponent = eax; | 784 const Register exponent = eax; |
786 const Register base = edx; | 785 const Register base = edx; |
787 const Register scratch = ecx; | 786 const Register scratch = ecx; |
788 const XMMRegister double_result = xmm3; | 787 const XMMRegister double_result = xmm3; |
789 const XMMRegister double_base = xmm2; | 788 const XMMRegister double_base = xmm2; |
790 const XMMRegister double_exponent = xmm1; | 789 const XMMRegister double_exponent = xmm1; |
791 const XMMRegister double_scratch = xmm4; | 790 const XMMRegister double_scratch = xmm4; |
792 | 791 |
793 Label call_runtime, done, exponent_not_smi, int_exponent; | 792 Label call_runtime, done, exponent_not_smi, int_exponent; |
794 | 793 |
(...skipping 208 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1003 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases. | 1002 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases. |
1004 __ xorps(double_scratch2, double_scratch2); | 1003 __ xorps(double_scratch2, double_scratch2); |
1005 __ ucomisd(double_scratch2, double_result); // Result cannot be NaN. | 1004 __ ucomisd(double_scratch2, double_result); // Result cannot be NaN. |
1006 // double_exponent aliased as double_scratch2 has already been overwritten | 1005 // double_exponent aliased as double_scratch2 has already been overwritten |
1007 // and may not have contained the exponent value in the first place when the | 1006 // and may not have contained the exponent value in the first place when the |
1008 // exponent is a smi. We reset it with exponent value before bailing out. | 1007 // exponent is a smi. We reset it with exponent value before bailing out. |
1009 __ j(not_equal, &done); | 1008 __ j(not_equal, &done); |
1010 __ Cvtsi2sd(double_exponent, exponent); | 1009 __ Cvtsi2sd(double_exponent, exponent); |
1011 | 1010 |
1012 // Returning or bailing out. | 1011 // Returning or bailing out. |
1013 Counters* counters = masm->isolate()->counters(); | 1012 Counters* counters = isolate()->counters(); |
1014 if (exponent_type_ == ON_STACK) { | 1013 if (exponent_type_ == ON_STACK) { |
1015 // The arguments are still on the stack. | 1014 // The arguments are still on the stack. |
1016 __ bind(&call_runtime); | 1015 __ bind(&call_runtime); |
1017 __ TailCallRuntime(Runtime::kHiddenMathPow, 2, 1); | 1016 __ TailCallRuntime(Runtime::kHiddenMathPow, 2, 1); |
1018 | 1017 |
1019 // The stub is called from non-optimized code, which expects the result | 1018 // The stub is called from non-optimized code, which expects the result |
1020 // as heap number in exponent. | 1019 // as heap number in exponent. |
1021 __ bind(&done); | 1020 __ bind(&done); |
1022 __ AllocateHeapNumber(eax, scratch, base, &call_runtime); | 1021 __ AllocateHeapNumber(eax, scratch, base, &call_runtime); |
1023 __ movsd(FieldOperand(eax, HeapNumber::kValueOffset), double_result); | 1022 __ movsd(FieldOperand(eax, HeapNumber::kValueOffset), double_result); |
1024 __ IncrementCounter(counters->math_pow(), 1); | 1023 __ IncrementCounter(counters->math_pow(), 1); |
1025 __ ret(2 * kPointerSize); | 1024 __ ret(2 * kPointerSize); |
1026 } else { | 1025 } else { |
1027 __ bind(&call_runtime); | 1026 __ bind(&call_runtime); |
1028 { | 1027 { |
1029 AllowExternalCallThatCantCauseGC scope(masm); | 1028 AllowExternalCallThatCantCauseGC scope(masm); |
1030 __ PrepareCallCFunction(4, scratch); | 1029 __ PrepareCallCFunction(4, scratch); |
1031 __ movsd(Operand(esp, 0 * kDoubleSize), double_base); | 1030 __ movsd(Operand(esp, 0 * kDoubleSize), double_base); |
1032 __ movsd(Operand(esp, 1 * kDoubleSize), double_exponent); | 1031 __ movsd(Operand(esp, 1 * kDoubleSize), double_exponent); |
1033 __ CallCFunction( | 1032 __ CallCFunction( |
1034 ExternalReference::power_double_double_function(masm->isolate()), 4); | 1033 ExternalReference::power_double_double_function(isolate()), 4); |
1035 } | 1034 } |
1036 // Return value is in st(0) on ia32. | 1035 // Return value is in st(0) on ia32. |
1037 // Store it into the (fixed) result register. | 1036 // Store it into the (fixed) result register. |
1038 __ sub(esp, Immediate(kDoubleSize)); | 1037 __ sub(esp, Immediate(kDoubleSize)); |
1039 __ fstp_d(Operand(esp, 0)); | 1038 __ fstp_d(Operand(esp, 0)); |
1040 __ movsd(double_result, Operand(esp, 0)); | 1039 __ movsd(double_result, Operand(esp, 0)); |
1041 __ add(esp, Immediate(kDoubleSize)); | 1040 __ add(esp, Immediate(kDoubleSize)); |
1042 | 1041 |
1043 __ bind(&done); | 1042 __ bind(&done); |
1044 __ IncrementCounter(counters->math_pow(), 1); | 1043 __ IncrementCounter(counters->math_pow(), 1); |
1045 __ ret(0); | 1044 __ ret(0); |
1046 } | 1045 } |
1047 } | 1046 } |
1048 | 1047 |
1049 | 1048 |
1050 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { | 1049 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { |
1051 // ----------- S t a t e ------------- | 1050 // ----------- S t a t e ------------- |
1052 // -- ecx : name | 1051 // -- ecx : name |
1053 // -- edx : receiver | 1052 // -- edx : receiver |
1054 // -- esp[0] : return address | 1053 // -- esp[0] : return address |
1055 // ----------------------------------- | 1054 // ----------------------------------- |
1056 Label miss; | 1055 Label miss; |
1057 | 1056 |
1058 if (kind() == Code::KEYED_LOAD_IC) { | 1057 if (kind() == Code::KEYED_LOAD_IC) { |
1059 __ cmp(ecx, Immediate(masm->isolate()->factory()->prototype_string())); | 1058 __ cmp(ecx, Immediate(isolate()->factory()->prototype_string())); |
1060 __ j(not_equal, &miss); | 1059 __ j(not_equal, &miss); |
1061 } | 1060 } |
1062 | 1061 |
1063 StubCompiler::GenerateLoadFunctionPrototype(masm, edx, eax, ebx, &miss); | 1062 StubCompiler::GenerateLoadFunctionPrototype(masm, edx, eax, ebx, &miss); |
1064 __ bind(&miss); | 1063 __ bind(&miss); |
1065 StubCompiler::TailCallBuiltin( | 1064 StubCompiler::TailCallBuiltin( |
1066 masm, BaseLoadStoreStubCompiler::MissBuiltin(kind())); | 1065 masm, BaseLoadStoreStubCompiler::MissBuiltin(kind())); |
1067 } | 1066 } |
1068 | 1067 |
1069 | 1068 |
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1145 __ lea(edx, Operand(edx, ecx, times_2, | 1144 __ lea(edx, Operand(edx, ecx, times_2, |
1146 StandardFrameConstants::kCallerSPOffset)); | 1145 StandardFrameConstants::kCallerSPOffset)); |
1147 __ mov(Operand(esp, 2 * kPointerSize), edx); | 1146 __ mov(Operand(esp, 2 * kPointerSize), edx); |
1148 | 1147 |
1149 __ bind(&runtime); | 1148 __ bind(&runtime); |
1150 __ TailCallRuntime(Runtime::kHiddenNewArgumentsFast, 3, 1); | 1149 __ TailCallRuntime(Runtime::kHiddenNewArgumentsFast, 3, 1); |
1151 } | 1150 } |
1152 | 1151 |
1153 | 1152 |
1154 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { | 1153 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { |
1155 Isolate* isolate = masm->isolate(); | |
1156 | |
1157 // esp[0] : return address | 1154 // esp[0] : return address |
1158 // esp[4] : number of parameters (tagged) | 1155 // esp[4] : number of parameters (tagged) |
1159 // esp[8] : receiver displacement | 1156 // esp[8] : receiver displacement |
1160 // esp[12] : function | 1157 // esp[12] : function |
1161 | 1158 |
1162 // ebx = parameter count (tagged) | 1159 // ebx = parameter count (tagged) |
1163 __ mov(ebx, Operand(esp, 1 * kPointerSize)); | 1160 __ mov(ebx, Operand(esp, 1 * kPointerSize)); |
1164 | 1161 |
1165 // Check if the calling frame is an arguments adaptor frame. | 1162 // Check if the calling frame is an arguments adaptor frame. |
1166 // TODO(rossberg): Factor out some of the bits that are shared with the other | 1163 // TODO(rossberg): Factor out some of the bits that are shared with the other |
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1278 // esp[12] = address of receiver argument | 1275 // esp[12] = address of receiver argument |
1279 // Free a register. | 1276 // Free a register. |
1280 __ push(eax); | 1277 __ push(eax); |
1281 | 1278 |
1282 // Initialize parameter map. If there are no mapped arguments, we're done. | 1279 // Initialize parameter map. If there are no mapped arguments, we're done. |
1283 Label skip_parameter_map; | 1280 Label skip_parameter_map; |
1284 __ test(ebx, ebx); | 1281 __ test(ebx, ebx); |
1285 __ j(zero, &skip_parameter_map); | 1282 __ j(zero, &skip_parameter_map); |
1286 | 1283 |
1287 __ mov(FieldOperand(edi, FixedArray::kMapOffset), | 1284 __ mov(FieldOperand(edi, FixedArray::kMapOffset), |
1288 Immediate(isolate->factory()->sloppy_arguments_elements_map())); | 1285 Immediate(isolate()->factory()->sloppy_arguments_elements_map())); |
1289 __ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2)))); | 1286 __ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2)))); |
1290 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), eax); | 1287 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), eax); |
1291 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi); | 1288 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi); |
1292 __ lea(eax, Operand(edi, ebx, times_2, kParameterMapHeaderSize)); | 1289 __ lea(eax, Operand(edi, ebx, times_2, kParameterMapHeaderSize)); |
1293 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 1 * kPointerSize), eax); | 1290 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 1 * kPointerSize), eax); |
1294 | 1291 |
1295 // Copy the parameter slots and the holes in the arguments. | 1292 // Copy the parameter slots and the holes in the arguments. |
1296 // We need to fill in mapped_parameter_count slots. They index the context, | 1293 // We need to fill in mapped_parameter_count slots. They index the context, |
1297 // where parameters are stored in reverse order, at | 1294 // where parameters are stored in reverse order, at |
1298 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1 | 1295 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1 |
1299 // The mapped parameter thus need to get indices | 1296 // The mapped parameter thus need to get indices |
1300 // MIN_CONTEXT_SLOTS+parameter_count-1 .. | 1297 // MIN_CONTEXT_SLOTS+parameter_count-1 .. |
1301 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count | 1298 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count |
1302 // We loop from right to left. | 1299 // We loop from right to left. |
1303 Label parameters_loop, parameters_test; | 1300 Label parameters_loop, parameters_test; |
1304 __ push(ecx); | 1301 __ push(ecx); |
1305 __ mov(eax, Operand(esp, 2 * kPointerSize)); | 1302 __ mov(eax, Operand(esp, 2 * kPointerSize)); |
1306 __ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS))); | 1303 __ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS))); |
1307 __ add(ebx, Operand(esp, 4 * kPointerSize)); | 1304 __ add(ebx, Operand(esp, 4 * kPointerSize)); |
1308 __ sub(ebx, eax); | 1305 __ sub(ebx, eax); |
1309 __ mov(ecx, isolate->factory()->the_hole_value()); | 1306 __ mov(ecx, isolate()->factory()->the_hole_value()); |
1310 __ mov(edx, edi); | 1307 __ mov(edx, edi); |
1311 __ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize)); | 1308 __ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize)); |
1312 // eax = loop variable (tagged) | 1309 // eax = loop variable (tagged) |
1313 // ebx = mapping index (tagged) | 1310 // ebx = mapping index (tagged) |
1314 // ecx = the hole value | 1311 // ecx = the hole value |
1315 // edx = address of parameter map (tagged) | 1312 // edx = address of parameter map (tagged) |
1316 // edi = address of backing store (tagged) | 1313 // edi = address of backing store (tagged) |
1317 // esp[0] = argument count (tagged) | 1314 // esp[0] = argument count (tagged) |
1318 // esp[4] = address of new object (tagged) | 1315 // esp[4] = address of new object (tagged) |
1319 // esp[8] = mapped parameter count (tagged) | 1316 // esp[8] = mapped parameter count (tagged) |
(...skipping 14 matching lines...) Expand all Loading... | |
1334 __ bind(&skip_parameter_map); | 1331 __ bind(&skip_parameter_map); |
1335 | 1332 |
1336 // ecx = argument count (tagged) | 1333 // ecx = argument count (tagged) |
1337 // edi = address of backing store (tagged) | 1334 // edi = address of backing store (tagged) |
1338 // esp[0] = address of new object (tagged) | 1335 // esp[0] = address of new object (tagged) |
1339 // esp[4] = mapped parameter count (tagged) | 1336 // esp[4] = mapped parameter count (tagged) |
1340 // esp[12] = parameter count (tagged) | 1337 // esp[12] = parameter count (tagged) |
1341 // esp[16] = address of receiver argument | 1338 // esp[16] = address of receiver argument |
1342 // Copy arguments header and remaining slots (if there are any). | 1339 // Copy arguments header and remaining slots (if there are any). |
1343 __ mov(FieldOperand(edi, FixedArray::kMapOffset), | 1340 __ mov(FieldOperand(edi, FixedArray::kMapOffset), |
1344 Immediate(isolate->factory()->fixed_array_map())); | 1341 Immediate(isolate()->factory()->fixed_array_map())); |
1345 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx); | 1342 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx); |
1346 | 1343 |
1347 Label arguments_loop, arguments_test; | 1344 Label arguments_loop, arguments_test; |
1348 __ mov(ebx, Operand(esp, 1 * kPointerSize)); | 1345 __ mov(ebx, Operand(esp, 1 * kPointerSize)); |
1349 __ mov(edx, Operand(esp, 4 * kPointerSize)); | 1346 __ mov(edx, Operand(esp, 4 * kPointerSize)); |
1350 __ sub(edx, ebx); // Is there a smarter way to do negative scaling? | 1347 __ sub(edx, ebx); // Is there a smarter way to do negative scaling? |
1351 __ sub(edx, ebx); | 1348 __ sub(edx, ebx); |
1352 __ jmp(&arguments_test, Label::kNear); | 1349 __ jmp(&arguments_test, Label::kNear); |
1353 | 1350 |
1354 __ bind(&arguments_loop); | 1351 __ bind(&arguments_loop); |
(...skipping 15 matching lines...) Expand all Loading... | |
1370 | 1367 |
1371 // Do the runtime call to allocate the arguments object. | 1368 // Do the runtime call to allocate the arguments object. |
1372 __ bind(&runtime); | 1369 __ bind(&runtime); |
1373 __ pop(eax); // Remove saved parameter count. | 1370 __ pop(eax); // Remove saved parameter count. |
1374 __ mov(Operand(esp, 1 * kPointerSize), ecx); // Patch argument count. | 1371 __ mov(Operand(esp, 1 * kPointerSize), ecx); // Patch argument count. |
1375 __ TailCallRuntime(Runtime::kHiddenNewArgumentsFast, 3, 1); | 1372 __ TailCallRuntime(Runtime::kHiddenNewArgumentsFast, 3, 1); |
1376 } | 1373 } |
1377 | 1374 |
1378 | 1375 |
1379 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { | 1376 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { |
1380 Isolate* isolate = masm->isolate(); | |
1381 | |
1382 // esp[0] : return address | 1377 // esp[0] : return address |
1383 // esp[4] : number of parameters | 1378 // esp[4] : number of parameters |
1384 // esp[8] : receiver displacement | 1379 // esp[8] : receiver displacement |
1385 // esp[12] : function | 1380 // esp[12] : function |
1386 | 1381 |
1387 // Check if the calling frame is an arguments adaptor frame. | 1382 // Check if the calling frame is an arguments adaptor frame. |
1388 Label adaptor_frame, try_allocate, runtime; | 1383 Label adaptor_frame, try_allocate, runtime; |
1389 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); | 1384 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); |
1390 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset)); | 1385 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset)); |
1391 __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 1386 __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1442 __ j(zero, &done, Label::kNear); | 1437 __ j(zero, &done, Label::kNear); |
1443 | 1438 |
1444 // Get the parameters pointer from the stack. | 1439 // Get the parameters pointer from the stack. |
1445 __ mov(edx, Operand(esp, 2 * kPointerSize)); | 1440 __ mov(edx, Operand(esp, 2 * kPointerSize)); |
1446 | 1441 |
1447 // Set up the elements pointer in the allocated arguments object and | 1442 // Set up the elements pointer in the allocated arguments object and |
1448 // initialize the header in the elements fixed array. | 1443 // initialize the header in the elements fixed array. |
1449 __ lea(edi, Operand(eax, Heap::kStrictArgumentsObjectSize)); | 1444 __ lea(edi, Operand(eax, Heap::kStrictArgumentsObjectSize)); |
1450 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi); | 1445 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi); |
1451 __ mov(FieldOperand(edi, FixedArray::kMapOffset), | 1446 __ mov(FieldOperand(edi, FixedArray::kMapOffset), |
1452 Immediate(isolate->factory()->fixed_array_map())); | 1447 Immediate(isolate()->factory()->fixed_array_map())); |
1453 | 1448 |
1454 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx); | 1449 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx); |
1455 // Untag the length for the loop below. | 1450 // Untag the length for the loop below. |
1456 __ SmiUntag(ecx); | 1451 __ SmiUntag(ecx); |
1457 | 1452 |
1458 // Copy the fixed array slots. | 1453 // Copy the fixed array slots. |
1459 Label loop; | 1454 Label loop; |
1460 __ bind(&loop); | 1455 __ bind(&loop); |
1461 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver. | 1456 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver. |
1462 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx); | 1457 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx); |
(...skipping 26 matching lines...) Expand all Loading... | |
1489 // esp[8]: previous index | 1484 // esp[8]: previous index |
1490 // esp[12]: subject string | 1485 // esp[12]: subject string |
1491 // esp[16]: JSRegExp object | 1486 // esp[16]: JSRegExp object |
1492 | 1487 |
1493 static const int kLastMatchInfoOffset = 1 * kPointerSize; | 1488 static const int kLastMatchInfoOffset = 1 * kPointerSize; |
1494 static const int kPreviousIndexOffset = 2 * kPointerSize; | 1489 static const int kPreviousIndexOffset = 2 * kPointerSize; |
1495 static const int kSubjectOffset = 3 * kPointerSize; | 1490 static const int kSubjectOffset = 3 * kPointerSize; |
1496 static const int kJSRegExpOffset = 4 * kPointerSize; | 1491 static const int kJSRegExpOffset = 4 * kPointerSize; |
1497 | 1492 |
1498 Label runtime; | 1493 Label runtime; |
1499 Factory* factory = masm->isolate()->factory(); | 1494 Factory* factory = isolate()->factory(); |
1500 | 1495 |
1501 // Ensure that a RegExp stack is allocated. | 1496 // Ensure that a RegExp stack is allocated. |
1502 ExternalReference address_of_regexp_stack_memory_address = | 1497 ExternalReference address_of_regexp_stack_memory_address = |
1503 ExternalReference::address_of_regexp_stack_memory_address( | 1498 ExternalReference::address_of_regexp_stack_memory_address(isolate()); |
1504 masm->isolate()); | |
1505 ExternalReference address_of_regexp_stack_memory_size = | 1499 ExternalReference address_of_regexp_stack_memory_size = |
1506 ExternalReference::address_of_regexp_stack_memory_size(masm->isolate()); | 1500 ExternalReference::address_of_regexp_stack_memory_size(isolate()); |
1507 __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size)); | 1501 __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size)); |
1508 __ test(ebx, ebx); | 1502 __ test(ebx, ebx); |
1509 __ j(zero, &runtime); | 1503 __ j(zero, &runtime); |
1510 | 1504 |
1511 // Check that the first argument is a JSRegExp object. | 1505 // Check that the first argument is a JSRegExp object. |
1512 __ mov(eax, Operand(esp, kJSRegExpOffset)); | 1506 __ mov(eax, Operand(esp, kJSRegExpOffset)); |
1513 STATIC_ASSERT(kSmiTag == 0); | 1507 STATIC_ASSERT(kSmiTag == 0); |
1514 __ JumpIfSmi(eax, &runtime); | 1508 __ JumpIfSmi(eax, &runtime); |
1515 __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx); | 1509 __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx); |
1516 __ j(not_equal, &runtime); | 1510 __ j(not_equal, &runtime); |
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1645 // Check that the irregexp code has been generated for the actual string | 1639 // Check that the irregexp code has been generated for the actual string |
1646 // encoding. If it has, the field contains a code object otherwise it contains | 1640 // encoding. If it has, the field contains a code object otherwise it contains |
1647 // a smi (code flushing support). | 1641 // a smi (code flushing support). |
1648 __ JumpIfSmi(edx, &runtime); | 1642 __ JumpIfSmi(edx, &runtime); |
1649 | 1643 |
1650 // eax: subject string | 1644 // eax: subject string |
1651 // ebx: previous index (smi) | 1645 // ebx: previous index (smi) |
1652 // edx: code | 1646 // edx: code |
1653 // ecx: encoding of subject string (1 if ASCII, 0 if two_byte); | 1647 // ecx: encoding of subject string (1 if ASCII, 0 if two_byte); |
1654 // All checks done. Now push arguments for native regexp code. | 1648 // All checks done. Now push arguments for native regexp code. |
1655 Counters* counters = masm->isolate()->counters(); | 1649 Counters* counters = isolate()->counters(); |
1656 __ IncrementCounter(counters->regexp_entry_native(), 1); | 1650 __ IncrementCounter(counters->regexp_entry_native(), 1); |
1657 | 1651 |
1658 // Isolates: note we add an additional parameter here (isolate pointer). | 1652 // Isolates: note we add an additional parameter here (isolate pointer). |
1659 static const int kRegExpExecuteArguments = 9; | 1653 static const int kRegExpExecuteArguments = 9; |
1660 __ EnterApiExitFrame(kRegExpExecuteArguments); | 1654 __ EnterApiExitFrame(kRegExpExecuteArguments); |
1661 | 1655 |
1662 // Argument 9: Pass current isolate address. | 1656 // Argument 9: Pass current isolate address. |
1663 __ mov(Operand(esp, 8 * kPointerSize), | 1657 __ mov(Operand(esp, 8 * kPointerSize), |
1664 Immediate(ExternalReference::isolate_address(masm->isolate()))); | 1658 Immediate(ExternalReference::isolate_address(isolate()))); |
1665 | 1659 |
1666 // Argument 8: Indicate that this is a direct call from JavaScript. | 1660 // Argument 8: Indicate that this is a direct call from JavaScript. |
1667 __ mov(Operand(esp, 7 * kPointerSize), Immediate(1)); | 1661 __ mov(Operand(esp, 7 * kPointerSize), Immediate(1)); |
1668 | 1662 |
1669 // Argument 7: Start (high end) of backtracking stack memory area. | 1663 // Argument 7: Start (high end) of backtracking stack memory area. |
1670 __ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address)); | 1664 __ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address)); |
1671 __ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size)); | 1665 __ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size)); |
1672 __ mov(Operand(esp, 6 * kPointerSize), esi); | 1666 __ mov(Operand(esp, 6 * kPointerSize), esi); |
1673 | 1667 |
1674 // Argument 6: Set the number of capture registers to zero to force global | 1668 // Argument 6: Set the number of capture registers to zero to force global |
1675 // regexps to behave as non-global. This does not affect non-global regexps. | 1669 // regexps to behave as non-global. This does not affect non-global regexps. |
1676 __ mov(Operand(esp, 5 * kPointerSize), Immediate(0)); | 1670 __ mov(Operand(esp, 5 * kPointerSize), Immediate(0)); |
1677 | 1671 |
1678 // Argument 5: static offsets vector buffer. | 1672 // Argument 5: static offsets vector buffer. |
1679 __ mov(Operand(esp, 4 * kPointerSize), | 1673 __ mov(Operand(esp, 4 * kPointerSize), |
1680 Immediate(ExternalReference::address_of_static_offsets_vector( | 1674 Immediate(ExternalReference::address_of_static_offsets_vector( |
1681 masm->isolate()))); | 1675 isolate()))); |
1682 | 1676 |
1683 // Argument 2: Previous index. | 1677 // Argument 2: Previous index. |
1684 __ SmiUntag(ebx); | 1678 __ SmiUntag(ebx); |
1685 __ mov(Operand(esp, 1 * kPointerSize), ebx); | 1679 __ mov(Operand(esp, 1 * kPointerSize), ebx); |
1686 | 1680 |
1687 // Argument 1: Original subject string. | 1681 // Argument 1: Original subject string. |
1688 // The original subject is in the previous stack frame. Therefore we have to | 1682 // The original subject is in the previous stack frame. Therefore we have to |
1689 // use ebp, which points exactly to one pointer size below the previous esp. | 1683 // use ebp, which points exactly to one pointer size below the previous esp. |
1690 // (Because creating a new stack frame pushes the previous ebp onto the stack | 1684 // (Because creating a new stack frame pushes the previous ebp onto the stack |
1691 // and thereby moves up esp by one kPointerSize.) | 1685 // and thereby moves up esp by one kPointerSize.) |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1745 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE); | 1739 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE); |
1746 __ j(equal, &failure); | 1740 __ j(equal, &failure); |
1747 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION); | 1741 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION); |
1748 // If not exception it can only be retry. Handle that in the runtime system. | 1742 // If not exception it can only be retry. Handle that in the runtime system. |
1749 __ j(not_equal, &runtime); | 1743 __ j(not_equal, &runtime); |
1750 // Result must now be exception. If there is no pending exception already a | 1744 // Result must now be exception. If there is no pending exception already a |
1751 // stack overflow (on the backtrack stack) was detected in RegExp code but | 1745 // stack overflow (on the backtrack stack) was detected in RegExp code but |
1752 // haven't created the exception yet. Handle that in the runtime system. | 1746 // haven't created the exception yet. Handle that in the runtime system. |
1753 // TODO(592): Rerunning the RegExp to get the stack overflow exception. | 1747 // TODO(592): Rerunning the RegExp to get the stack overflow exception. |
1754 ExternalReference pending_exception(Isolate::kPendingExceptionAddress, | 1748 ExternalReference pending_exception(Isolate::kPendingExceptionAddress, |
1755 masm->isolate()); | 1749 isolate()); |
1756 __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value())); | 1750 __ mov(edx, Immediate(isolate()->factory()->the_hole_value())); |
1757 __ mov(eax, Operand::StaticVariable(pending_exception)); | 1751 __ mov(eax, Operand::StaticVariable(pending_exception)); |
1758 __ cmp(edx, eax); | 1752 __ cmp(edx, eax); |
1759 __ j(equal, &runtime); | 1753 __ j(equal, &runtime); |
1760 // For exception, throw the exception again. | 1754 // For exception, throw the exception again. |
1761 | 1755 |
1762 // Clear the pending exception variable. | 1756 // Clear the pending exception variable. |
1763 __ mov(Operand::StaticVariable(pending_exception), edx); | 1757 __ mov(Operand::StaticVariable(pending_exception), edx); |
1764 | 1758 |
1765 // Special handling of termination exceptions which are uncatchable | 1759 // Special handling of termination exceptions which are uncatchable |
1766 // by javascript code. | 1760 // by javascript code. |
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1827 __ mov(eax, ecx); | 1821 __ mov(eax, ecx); |
1828 __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax); | 1822 __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax); |
1829 __ RecordWriteField(ebx, | 1823 __ RecordWriteField(ebx, |
1830 RegExpImpl::kLastInputOffset, | 1824 RegExpImpl::kLastInputOffset, |
1831 eax, | 1825 eax, |
1832 edi, | 1826 edi, |
1833 kDontSaveFPRegs); | 1827 kDontSaveFPRegs); |
1834 | 1828 |
1835 // Get the static offsets vector filled by the native regexp code. | 1829 // Get the static offsets vector filled by the native regexp code. |
1836 ExternalReference address_of_static_offsets_vector = | 1830 ExternalReference address_of_static_offsets_vector = |
1837 ExternalReference::address_of_static_offsets_vector(masm->isolate()); | 1831 ExternalReference::address_of_static_offsets_vector(isolate()); |
1838 __ mov(ecx, Immediate(address_of_static_offsets_vector)); | 1832 __ mov(ecx, Immediate(address_of_static_offsets_vector)); |
1839 | 1833 |
1840 // ebx: last_match_info backing store (FixedArray) | 1834 // ebx: last_match_info backing store (FixedArray) |
1841 // ecx: offsets vector | 1835 // ecx: offsets vector |
1842 // edx: number of capture registers | 1836 // edx: number of capture registers |
1843 Label next_capture, done; | 1837 Label next_capture, done; |
1844 // Capture register counter starts from number of capture registers and | 1838 // Capture register counter starts from number of capture registers and |
1845 // counts down until wraping after zero. | 1839 // counts down until wraping after zero. |
1846 __ bind(&next_capture); | 1840 __ bind(&next_capture); |
1847 __ sub(edx, Immediate(1)); | 1841 __ sub(edx, Immediate(1)); |
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1993 Label generic_heap_number_comparison; | 1987 Label generic_heap_number_comparison; |
1994 { | 1988 { |
1995 Label not_identical; | 1989 Label not_identical; |
1996 __ cmp(eax, edx); | 1990 __ cmp(eax, edx); |
1997 __ j(not_equal, ¬_identical); | 1991 __ j(not_equal, ¬_identical); |
1998 | 1992 |
1999 if (cc != equal) { | 1993 if (cc != equal) { |
2000 // Check for undefined. undefined OP undefined is false even though | 1994 // Check for undefined. undefined OP undefined is false even though |
2001 // undefined == undefined. | 1995 // undefined == undefined. |
2002 Label check_for_nan; | 1996 Label check_for_nan; |
2003 __ cmp(edx, masm->isolate()->factory()->undefined_value()); | 1997 __ cmp(edx, isolate()->factory()->undefined_value()); |
2004 __ j(not_equal, &check_for_nan, Label::kNear); | 1998 __ j(not_equal, &check_for_nan, Label::kNear); |
2005 __ Move(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc)))); | 1999 __ Move(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc)))); |
2006 __ ret(0); | 2000 __ ret(0); |
2007 __ bind(&check_for_nan); | 2001 __ bind(&check_for_nan); |
2008 } | 2002 } |
2009 | 2003 |
2010 // Test for NaN. Compare heap numbers in a general way, | 2004 // Test for NaN. Compare heap numbers in a general way, |
2011 // to hanlde NaNs correctly. | 2005 // to hanlde NaNs correctly. |
2012 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), | 2006 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), |
2013 Immediate(masm->isolate()->factory()->heap_number_map())); | 2007 Immediate(isolate()->factory()->heap_number_map())); |
2014 __ j(equal, &generic_heap_number_comparison, Label::kNear); | 2008 __ j(equal, &generic_heap_number_comparison, Label::kNear); |
2015 if (cc != equal) { | 2009 if (cc != equal) { |
2016 // Call runtime on identical JSObjects. Otherwise return equal. | 2010 // Call runtime on identical JSObjects. Otherwise return equal. |
2017 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx); | 2011 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx); |
2018 __ j(above_equal, ¬_identical); | 2012 __ j(above_equal, ¬_identical); |
2019 } | 2013 } |
2020 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); | 2014 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); |
2021 __ ret(0); | 2015 __ ret(0); |
2022 | 2016 |
2023 | 2017 |
(...skipping 24 matching lines...) Expand all Loading... | |
2048 // ecx still holds eax & kSmiTag, which is either zero or one. | 2042 // ecx still holds eax & kSmiTag, which is either zero or one. |
2049 __ sub(ecx, Immediate(0x01)); | 2043 __ sub(ecx, Immediate(0x01)); |
2050 __ mov(ebx, edx); | 2044 __ mov(ebx, edx); |
2051 __ xor_(ebx, eax); | 2045 __ xor_(ebx, eax); |
2052 __ and_(ebx, ecx); // ebx holds either 0 or eax ^ edx. | 2046 __ and_(ebx, ecx); // ebx holds either 0 or eax ^ edx. |
2053 __ xor_(ebx, eax); | 2047 __ xor_(ebx, eax); |
2054 // if eax was smi, ebx is now edx, else eax. | 2048 // if eax was smi, ebx is now edx, else eax. |
2055 | 2049 |
2056 // Check if the non-smi operand is a heap number. | 2050 // Check if the non-smi operand is a heap number. |
2057 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), | 2051 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), |
2058 Immediate(masm->isolate()->factory()->heap_number_map())); | 2052 Immediate(isolate()->factory()->heap_number_map())); |
2059 // If heap number, handle it in the slow case. | 2053 // If heap number, handle it in the slow case. |
2060 __ j(equal, &slow, Label::kNear); | 2054 __ j(equal, &slow, Label::kNear); |
2061 // Return non-equal (ebx is not zero) | 2055 // Return non-equal (ebx is not zero) |
2062 __ mov(eax, ebx); | 2056 __ mov(eax, ebx); |
2063 __ ret(0); | 2057 __ ret(0); |
2064 | 2058 |
2065 __ bind(¬_smis); | 2059 __ bind(¬_smis); |
2066 // If either operand is a JSObject or an oddball value, then they are not | 2060 // If either operand is a JSObject or an oddball value, then they are not |
2067 // equal since their pointers are different | 2061 // equal since their pointers are different |
2068 // There is no test for undetectability in strict equality. | 2062 // There is no test for undetectability in strict equality. |
(...skipping 208 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2277 __ cmp(ecx, edi); | 2271 __ cmp(ecx, edi); |
2278 __ j(equal, &done, Label::kFar); | 2272 __ j(equal, &done, Label::kFar); |
2279 __ cmp(ecx, Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate))); | 2273 __ cmp(ecx, Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate))); |
2280 __ j(equal, &done, Label::kFar); | 2274 __ j(equal, &done, Label::kFar); |
2281 | 2275 |
2282 if (!FLAG_pretenuring_call_new) { | 2276 if (!FLAG_pretenuring_call_new) { |
2283 // If we came here, we need to see if we are the array function. | 2277 // If we came here, we need to see if we are the array function. |
2284 // If we didn't have a matching function, and we didn't find the megamorph | 2278 // If we didn't have a matching function, and we didn't find the megamorph |
2285 // sentinel, then we have in the slot either some other function or an | 2279 // sentinel, then we have in the slot either some other function or an |
2286 // AllocationSite. Do a map check on the object in ecx. | 2280 // AllocationSite. Do a map check on the object in ecx. |
2287 Handle<Map> allocation_site_map = | 2281 Handle<Map> allocation_site_map = isolate->factory()->allocation_site_map(); |
2288 masm->isolate()->factory()->allocation_site_map(); | |
2289 __ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map)); | 2282 __ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map)); |
2290 __ j(not_equal, &miss); | 2283 __ j(not_equal, &miss); |
2291 | 2284 |
2292 // Make sure the function is the Array() function | 2285 // Make sure the function is the Array() function |
2293 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx); | 2286 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx); |
2294 __ cmp(edi, ecx); | 2287 __ cmp(edi, ecx); |
2295 __ j(not_equal, &megamorphic); | 2288 __ j(not_equal, &megamorphic); |
2296 __ jmp(&done, Label::kFar); | 2289 __ jmp(&done, Label::kFar); |
2297 } | 2290 } |
2298 | 2291 |
(...skipping 26 matching lines...) Expand all Loading... | |
2325 { | 2318 { |
2326 FrameScope scope(masm, StackFrame::INTERNAL); | 2319 FrameScope scope(masm, StackFrame::INTERNAL); |
2327 | 2320 |
2328 // Arguments register must be smi-tagged to call out. | 2321 // Arguments register must be smi-tagged to call out. |
2329 __ SmiTag(eax); | 2322 __ SmiTag(eax); |
2330 __ push(eax); | 2323 __ push(eax); |
2331 __ push(edi); | 2324 __ push(edi); |
2332 __ push(edx); | 2325 __ push(edx); |
2333 __ push(ebx); | 2326 __ push(ebx); |
2334 | 2327 |
2335 CreateAllocationSiteStub create_stub; | 2328 CreateAllocationSiteStub create_stub(isolate); |
2336 __ CallStub(&create_stub); | 2329 __ CallStub(&create_stub); |
2337 | 2330 |
2338 __ pop(ebx); | 2331 __ pop(ebx); |
2339 __ pop(edx); | 2332 __ pop(edx); |
2340 __ pop(edi); | 2333 __ pop(edi); |
2341 __ pop(eax); | 2334 __ pop(eax); |
2342 __ SmiUntag(eax); | 2335 __ SmiUntag(eax); |
2343 } | 2336 } |
2344 __ jmp(&done); | 2337 __ jmp(&done); |
2345 | 2338 |
(...skipping 15 matching lines...) Expand all Loading... | |
2361 | 2354 |
2362 __ bind(&done); | 2355 __ bind(&done); |
2363 } | 2356 } |
2364 | 2357 |
2365 | 2358 |
2366 void CallFunctionStub::Generate(MacroAssembler* masm) { | 2359 void CallFunctionStub::Generate(MacroAssembler* masm) { |
2367 // ebx : feedback vector | 2360 // ebx : feedback vector |
2368 // edx : (only if ebx is not the megamorphic symbol) slot in feedback | 2361 // edx : (only if ebx is not the megamorphic symbol) slot in feedback |
2369 // vector (Smi) | 2362 // vector (Smi) |
2370 // edi : the function to call | 2363 // edi : the function to call |
2371 Isolate* isolate = masm->isolate(); | |
2372 Label slow, non_function, wrap, cont; | 2364 Label slow, non_function, wrap, cont; |
2373 | 2365 |
2374 if (NeedsChecks()) { | 2366 if (NeedsChecks()) { |
2375 // Check that the function really is a JavaScript function. | 2367 // Check that the function really is a JavaScript function. |
2376 __ JumpIfSmi(edi, &non_function); | 2368 __ JumpIfSmi(edi, &non_function); |
2377 | 2369 |
2378 // Goto slow case if we do not have a function. | 2370 // Goto slow case if we do not have a function. |
2379 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); | 2371 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); |
2380 __ j(not_equal, &slow); | 2372 __ j(not_equal, &slow); |
2381 | 2373 |
2382 if (RecordCallTarget()) { | 2374 if (RecordCallTarget()) { |
2383 GenerateRecordCallTarget(masm); | 2375 GenerateRecordCallTarget(masm); |
2384 // Type information was updated. Because we may call Array, which | 2376 // Type information was updated. Because we may call Array, which |
2385 // expects either undefined or an AllocationSite in ebx we need | 2377 // expects either undefined or an AllocationSite in ebx we need |
2386 // to set ebx to undefined. | 2378 // to set ebx to undefined. |
2387 __ mov(ebx, Immediate(isolate->factory()->undefined_value())); | 2379 __ mov(ebx, Immediate(isolate()->factory()->undefined_value())); |
2388 } | 2380 } |
2389 } | 2381 } |
2390 | 2382 |
2391 // Fast-case: Just invoke the function. | 2383 // Fast-case: Just invoke the function. |
2392 ParameterCount actual(argc_); | 2384 ParameterCount actual(argc_); |
2393 | 2385 |
2394 if (CallAsMethod()) { | 2386 if (CallAsMethod()) { |
2395 if (NeedsChecks()) { | 2387 if (NeedsChecks()) { |
2396 // Do not transform the receiver for strict mode functions. | 2388 // Do not transform the receiver for strict mode functions. |
2397 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); | 2389 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); |
(...skipping 26 matching lines...) Expand all Loading... | |
2424 | 2416 |
2425 if (NeedsChecks()) { | 2417 if (NeedsChecks()) { |
2426 // Slow-case: Non-function called. | 2418 // Slow-case: Non-function called. |
2427 __ bind(&slow); | 2419 __ bind(&slow); |
2428 if (RecordCallTarget()) { | 2420 if (RecordCallTarget()) { |
2429 // If there is a call target cache, mark it megamorphic in the | 2421 // If there is a call target cache, mark it megamorphic in the |
2430 // non-function case. MegamorphicSentinel is an immortal immovable | 2422 // non-function case. MegamorphicSentinel is an immortal immovable |
2431 // object (megamorphic symbol) so no write barrier is needed. | 2423 // object (megamorphic symbol) so no write barrier is needed. |
2432 __ mov(FieldOperand(ebx, edx, times_half_pointer_size, | 2424 __ mov(FieldOperand(ebx, edx, times_half_pointer_size, |
2433 FixedArray::kHeaderSize), | 2425 FixedArray::kHeaderSize), |
2434 Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate))); | 2426 Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate()))); |
2435 } | 2427 } |
2436 // Check for function proxy. | 2428 // Check for function proxy. |
2437 __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE); | 2429 __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE); |
2438 __ j(not_equal, &non_function); | 2430 __ j(not_equal, &non_function); |
2439 __ pop(ecx); | 2431 __ pop(ecx); |
2440 __ push(edi); // put proxy as additional argument under return address | 2432 __ push(edi); // put proxy as additional argument under return address |
2441 __ push(ecx); | 2433 __ push(ecx); |
2442 __ Move(eax, Immediate(argc_ + 1)); | 2434 __ Move(eax, Immediate(argc_ + 1)); |
2443 __ Move(ebx, Immediate(0)); | 2435 __ Move(ebx, Immediate(0)); |
2444 __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY); | 2436 __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY); |
2445 { | 2437 { |
2446 Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline(); | 2438 Handle<Code> adaptor = |
2439 isolate()->builtins()->ArgumentsAdaptorTrampoline(); | |
2447 __ jmp(adaptor, RelocInfo::CODE_TARGET); | 2440 __ jmp(adaptor, RelocInfo::CODE_TARGET); |
2448 } | 2441 } |
2449 | 2442 |
2450 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead | 2443 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead |
2451 // of the original receiver from the call site). | 2444 // of the original receiver from the call site). |
2452 __ bind(&non_function); | 2445 __ bind(&non_function); |
2453 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi); | 2446 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi); |
2454 __ Move(eax, Immediate(argc_)); | 2447 __ Move(eax, Immediate(argc_)); |
2455 __ Move(ebx, Immediate(0)); | 2448 __ Move(ebx, Immediate(0)); |
2456 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION); | 2449 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION); |
2457 Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline(); | 2450 Handle<Code> adaptor = isolate()->builtins()->ArgumentsAdaptorTrampoline(); |
2458 __ jmp(adaptor, RelocInfo::CODE_TARGET); | 2451 __ jmp(adaptor, RelocInfo::CODE_TARGET); |
2459 } | 2452 } |
2460 | 2453 |
2461 if (CallAsMethod()) { | 2454 if (CallAsMethod()) { |
2462 __ bind(&wrap); | 2455 __ bind(&wrap); |
2463 // Wrap the receiver and patch it back onto the stack. | 2456 // Wrap the receiver and patch it back onto the stack. |
2464 { FrameScope frame_scope(masm, StackFrame::INTERNAL); | 2457 { FrameScope frame_scope(masm, StackFrame::INTERNAL); |
2465 __ push(edi); | 2458 __ push(edi); |
2466 __ push(eax); | 2459 __ push(eax); |
2467 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | 2460 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
(...skipping 27 matching lines...) Expand all Loading... | |
2495 // By adding kPointerSize we encode that we know the AllocationSite | 2488 // By adding kPointerSize we encode that we know the AllocationSite |
2496 // entry is at the feedback vector slot given by edx + 1. | 2489 // entry is at the feedback vector slot given by edx + 1. |
2497 __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size, | 2490 __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size, |
2498 FixedArray::kHeaderSize + kPointerSize)); | 2491 FixedArray::kHeaderSize + kPointerSize)); |
2499 } else { | 2492 } else { |
2500 Label feedback_register_initialized; | 2493 Label feedback_register_initialized; |
2501 // Put the AllocationSite from the feedback vector into ebx, or undefined. | 2494 // Put the AllocationSite from the feedback vector into ebx, or undefined. |
2502 __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size, | 2495 __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size, |
2503 FixedArray::kHeaderSize)); | 2496 FixedArray::kHeaderSize)); |
2504 Handle<Map> allocation_site_map = | 2497 Handle<Map> allocation_site_map = |
2505 masm->isolate()->factory()->allocation_site_map(); | 2498 isolate()->factory()->allocation_site_map(); |
2506 __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map)); | 2499 __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map)); |
2507 __ j(equal, &feedback_register_initialized); | 2500 __ j(equal, &feedback_register_initialized); |
2508 __ mov(ebx, masm->isolate()->factory()->undefined_value()); | 2501 __ mov(ebx, isolate()->factory()->undefined_value()); |
2509 __ bind(&feedback_register_initialized); | 2502 __ bind(&feedback_register_initialized); |
2510 } | 2503 } |
2511 | 2504 |
2512 __ AssertUndefinedOrAllocationSite(ebx); | 2505 __ AssertUndefinedOrAllocationSite(ebx); |
2513 } | 2506 } |
2514 | 2507 |
2515 // Jump to the function-specific construct stub. | 2508 // Jump to the function-specific construct stub. |
2516 Register jmp_reg = ecx; | 2509 Register jmp_reg = ecx; |
2517 __ mov(jmp_reg, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); | 2510 __ mov(jmp_reg, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); |
2518 __ mov(jmp_reg, FieldOperand(jmp_reg, | 2511 __ mov(jmp_reg, FieldOperand(jmp_reg, |
(...skipping 10 matching lines...) Expand all Loading... | |
2529 __ j(not_equal, &non_function_call); | 2522 __ j(not_equal, &non_function_call); |
2530 __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR); | 2523 __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR); |
2531 __ jmp(&do_call); | 2524 __ jmp(&do_call); |
2532 | 2525 |
2533 __ bind(&non_function_call); | 2526 __ bind(&non_function_call); |
2534 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); | 2527 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); |
2535 __ bind(&do_call); | 2528 __ bind(&do_call); |
2536 // Set expected number of arguments to zero (not changing eax). | 2529 // Set expected number of arguments to zero (not changing eax). |
2537 __ Move(ebx, Immediate(0)); | 2530 __ Move(ebx, Immediate(0)); |
2538 Handle<Code> arguments_adaptor = | 2531 Handle<Code> arguments_adaptor = |
2539 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); | 2532 isolate()->builtins()->ArgumentsAdaptorTrampoline(); |
2540 __ jmp(arguments_adaptor, RelocInfo::CODE_TARGET); | 2533 __ jmp(arguments_adaptor, RelocInfo::CODE_TARGET); |
2541 } | 2534 } |
2542 | 2535 |
2543 | 2536 |
2544 bool CEntryStub::NeedsImmovableCode() { | 2537 bool CEntryStub::NeedsImmovableCode() { |
2545 return false; | 2538 return false; |
2546 } | 2539 } |
2547 | 2540 |
2548 | 2541 |
2549 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 2542 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
2550 CEntryStub::GenerateAheadOfTime(isolate); | 2543 CEntryStub::GenerateAheadOfTime(isolate); |
2551 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 2544 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
2552 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 2545 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
2553 // It is important that the store buffer overflow stubs are generated first. | 2546 // It is important that the store buffer overflow stubs are generated first. |
2554 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); | 2547 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); |
2555 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); | 2548 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); |
2556 if (Serializer::enabled()) { | 2549 if (Serializer::enabled()) { |
2557 PlatformFeatureScope sse2(SSE2); | 2550 PlatformFeatureScope sse2(SSE2); |
2558 BinaryOpICStub::GenerateAheadOfTime(isolate); | 2551 BinaryOpICStub::GenerateAheadOfTime(isolate); |
2559 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); | 2552 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); |
2560 } else { | 2553 } else { |
2561 BinaryOpICStub::GenerateAheadOfTime(isolate); | 2554 BinaryOpICStub::GenerateAheadOfTime(isolate); |
2562 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); | 2555 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); |
2563 } | 2556 } |
2564 } | 2557 } |
2565 | 2558 |
2566 | 2559 |
2567 void CodeStub::GenerateFPStubs(Isolate* isolate) { | 2560 void CodeStub::GenerateFPStubs(Isolate* isolate) { |
2568 if (CpuFeatures::IsSupported(SSE2)) { | 2561 if (CpuFeatures::IsSupported(SSE2)) { |
2569 CEntryStub save_doubles(1, kSaveFPRegs); | 2562 CEntryStub save_doubles(isolate, 1, kSaveFPRegs); |
2570 // Stubs might already be in the snapshot, detect that and don't regenerate, | 2563 // Stubs might already be in the snapshot, detect that and don't regenerate, |
2571 // which would lead to code stub initialization state being messed up. | 2564 // which would lead to code stub initialization state being messed up. |
2572 Code* save_doubles_code; | 2565 Code* save_doubles_code; |
2573 if (!save_doubles.FindCodeInCache(&save_doubles_code, isolate)) { | 2566 if (!save_doubles.FindCodeInCache(&save_doubles_code, isolate)) { |
2574 save_doubles_code = *(save_doubles.GetCode(isolate)); | 2567 save_doubles_code = *(save_doubles.GetCode(isolate)); |
2575 } | 2568 } |
2576 isolate->set_fp_stubs_generated(true); | 2569 isolate->set_fp_stubs_generated(true); |
2577 } | 2570 } |
2578 } | 2571 } |
2579 | 2572 |
2580 | 2573 |
2581 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { | 2574 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { |
2582 CEntryStub stub(1, kDontSaveFPRegs); | 2575 CEntryStub stub(isolate, 1, kDontSaveFPRegs); |
2583 stub.GetCode(isolate); | 2576 stub.GetCode(isolate); |
2584 } | 2577 } |
2585 | 2578 |
2586 | 2579 |
2587 void CEntryStub::Generate(MacroAssembler* masm) { | 2580 void CEntryStub::Generate(MacroAssembler* masm) { |
2588 // eax: number of arguments including receiver | 2581 // eax: number of arguments including receiver |
2589 // ebx: pointer to C function (C callee-saved) | 2582 // ebx: pointer to C function (C callee-saved) |
2590 // ebp: frame pointer (restored after C call) | 2583 // ebp: frame pointer (restored after C call) |
2591 // esp: stack pointer (restored after C call) | 2584 // esp: stack pointer (restored after C call) |
2592 // esi: current context (C callee-saved) | 2585 // esi: current context (C callee-saved) |
2593 // edi: JS function of the caller (C callee-saved) | 2586 // edi: JS function of the caller (C callee-saved) |
2594 | 2587 |
2595 ProfileEntryHookStub::MaybeCallEntryHook(masm); | 2588 ProfileEntryHookStub::MaybeCallEntryHook(masm); |
2596 | 2589 |
2597 // Enter the exit frame that transitions from JavaScript to C++. | 2590 // Enter the exit frame that transitions from JavaScript to C++. |
2598 __ EnterExitFrame(save_doubles_ == kSaveFPRegs); | 2591 __ EnterExitFrame(save_doubles_ == kSaveFPRegs); |
2599 | 2592 |
2600 // ebx: pointer to C function (C callee-saved) | 2593 // ebx: pointer to C function (C callee-saved) |
2601 // ebp: frame pointer (restored after C call) | 2594 // ebp: frame pointer (restored after C call) |
2602 // esp: stack pointer (restored after C call) | 2595 // esp: stack pointer (restored after C call) |
2603 // edi: number of arguments including receiver (C callee-saved) | 2596 // edi: number of arguments including receiver (C callee-saved) |
2604 // esi: pointer to the first argument (C callee-saved) | 2597 // esi: pointer to the first argument (C callee-saved) |
2605 | 2598 |
2606 // Result returned in eax, or eax+edx if result_size_ is 2. | 2599 // Result returned in eax, or eax+edx if result_size_ is 2. |
2607 | 2600 |
2608 Isolate* isolate = masm->isolate(); | |
2609 | |
2610 // Check stack alignment. | 2601 // Check stack alignment. |
2611 if (FLAG_debug_code) { | 2602 if (FLAG_debug_code) { |
2612 __ CheckStackAlignment(); | 2603 __ CheckStackAlignment(); |
2613 } | 2604 } |
2614 | 2605 |
2615 // Call C function. | 2606 // Call C function. |
2616 __ mov(Operand(esp, 0 * kPointerSize), edi); // argc. | 2607 __ mov(Operand(esp, 0 * kPointerSize), edi); // argc. |
2617 __ mov(Operand(esp, 1 * kPointerSize), esi); // argv. | 2608 __ mov(Operand(esp, 1 * kPointerSize), esi); // argv. |
2618 __ mov(Operand(esp, 2 * kPointerSize), | 2609 __ mov(Operand(esp, 2 * kPointerSize), |
2619 Immediate(ExternalReference::isolate_address(isolate))); | 2610 Immediate(ExternalReference::isolate_address(isolate()))); |
2620 __ call(ebx); | 2611 __ call(ebx); |
2621 // Result is in eax or edx:eax - do not destroy these registers! | 2612 // Result is in eax or edx:eax - do not destroy these registers! |
2622 | 2613 |
2623 // Runtime functions should not return 'the hole'. Allowing it to escape may | 2614 // Runtime functions should not return 'the hole'. Allowing it to escape may |
2624 // lead to crashes in the IC code later. | 2615 // lead to crashes in the IC code later. |
2625 if (FLAG_debug_code) { | 2616 if (FLAG_debug_code) { |
2626 Label okay; | 2617 Label okay; |
2627 __ cmp(eax, isolate->factory()->the_hole_value()); | 2618 __ cmp(eax, isolate()->factory()->the_hole_value()); |
2628 __ j(not_equal, &okay, Label::kNear); | 2619 __ j(not_equal, &okay, Label::kNear); |
2629 __ int3(); | 2620 __ int3(); |
2630 __ bind(&okay); | 2621 __ bind(&okay); |
2631 } | 2622 } |
2632 | 2623 |
2633 // Check result for exception sentinel. | 2624 // Check result for exception sentinel. |
2634 Label exception_returned; | 2625 Label exception_returned; |
2635 __ cmp(eax, isolate->factory()->exception()); | 2626 __ cmp(eax, isolate()->factory()->exception()); |
2636 __ j(equal, &exception_returned); | 2627 __ j(equal, &exception_returned); |
2637 | 2628 |
2638 ExternalReference pending_exception_address( | 2629 ExternalReference pending_exception_address( |
2639 Isolate::kPendingExceptionAddress, isolate); | 2630 Isolate::kPendingExceptionAddress, isolate()); |
2640 | 2631 |
2641 // Check that there is no pending exception, otherwise we | 2632 // Check that there is no pending exception, otherwise we |
2642 // should have returned the exception sentinel. | 2633 // should have returned the exception sentinel. |
2643 if (FLAG_debug_code) { | 2634 if (FLAG_debug_code) { |
2644 __ push(edx); | 2635 __ push(edx); |
2645 __ mov(edx, Immediate(isolate->factory()->the_hole_value())); | 2636 __ mov(edx, Immediate(isolate()->factory()->the_hole_value())); |
2646 Label okay; | 2637 Label okay; |
2647 __ cmp(edx, Operand::StaticVariable(pending_exception_address)); | 2638 __ cmp(edx, Operand::StaticVariable(pending_exception_address)); |
2648 // Cannot use check here as it attempts to generate call into runtime. | 2639 // Cannot use check here as it attempts to generate call into runtime. |
2649 __ j(equal, &okay, Label::kNear); | 2640 __ j(equal, &okay, Label::kNear); |
2650 __ int3(); | 2641 __ int3(); |
2651 __ bind(&okay); | 2642 __ bind(&okay); |
2652 __ pop(edx); | 2643 __ pop(edx); |
2653 } | 2644 } |
2654 | 2645 |
2655 // Exit the JavaScript to C++ exit frame. | 2646 // Exit the JavaScript to C++ exit frame. |
2656 __ LeaveExitFrame(save_doubles_ == kSaveFPRegs); | 2647 __ LeaveExitFrame(save_doubles_ == kSaveFPRegs); |
2657 __ ret(0); | 2648 __ ret(0); |
2658 | 2649 |
2659 // Handling of exception. | 2650 // Handling of exception. |
2660 __ bind(&exception_returned); | 2651 __ bind(&exception_returned); |
2661 | 2652 |
2662 // Retrieve the pending exception. | 2653 // Retrieve the pending exception. |
2663 __ mov(eax, Operand::StaticVariable(pending_exception_address)); | 2654 __ mov(eax, Operand::StaticVariable(pending_exception_address)); |
2664 | 2655 |
2665 // Clear the pending exception. | 2656 // Clear the pending exception. |
2666 __ mov(edx, Immediate(isolate->factory()->the_hole_value())); | 2657 __ mov(edx, Immediate(isolate()->factory()->the_hole_value())); |
2667 __ mov(Operand::StaticVariable(pending_exception_address), edx); | 2658 __ mov(Operand::StaticVariable(pending_exception_address), edx); |
2668 | 2659 |
2669 // Special handling of termination exceptions which are uncatchable | 2660 // Special handling of termination exceptions which are uncatchable |
2670 // by javascript code. | 2661 // by javascript code. |
2671 Label throw_termination_exception; | 2662 Label throw_termination_exception; |
2672 __ cmp(eax, isolate->factory()->termination_exception()); | 2663 __ cmp(eax, isolate()->factory()->termination_exception()); |
2673 __ j(equal, &throw_termination_exception); | 2664 __ j(equal, &throw_termination_exception); |
2674 | 2665 |
2675 // Handle normal exception. | 2666 // Handle normal exception. |
2676 __ Throw(eax); | 2667 __ Throw(eax); |
2677 | 2668 |
2678 __ bind(&throw_termination_exception); | 2669 __ bind(&throw_termination_exception); |
2679 __ ThrowUncatchable(eax); | 2670 __ ThrowUncatchable(eax); |
2680 } | 2671 } |
2681 | 2672 |
2682 | 2673 |
(...skipping 10 matching lines...) Expand all Loading... | |
2693 // Push marker in two places. | 2684 // Push marker in two places. |
2694 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; | 2685 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; |
2695 __ push(Immediate(Smi::FromInt(marker))); // context slot | 2686 __ push(Immediate(Smi::FromInt(marker))); // context slot |
2696 __ push(Immediate(Smi::FromInt(marker))); // function slot | 2687 __ push(Immediate(Smi::FromInt(marker))); // function slot |
2697 // Save callee-saved registers (C calling conventions). | 2688 // Save callee-saved registers (C calling conventions). |
2698 __ push(edi); | 2689 __ push(edi); |
2699 __ push(esi); | 2690 __ push(esi); |
2700 __ push(ebx); | 2691 __ push(ebx); |
2701 | 2692 |
2702 // Save copies of the top frame descriptor on the stack. | 2693 // Save copies of the top frame descriptor on the stack. |
2703 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, masm->isolate()); | 2694 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate()); |
2704 __ push(Operand::StaticVariable(c_entry_fp)); | 2695 __ push(Operand::StaticVariable(c_entry_fp)); |
2705 | 2696 |
2706 // If this is the outermost JS call, set js_entry_sp value. | 2697 // If this is the outermost JS call, set js_entry_sp value. |
2707 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, | 2698 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate()); |
2708 masm->isolate()); | |
2709 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0)); | 2699 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0)); |
2710 __ j(not_equal, ¬_outermost_js, Label::kNear); | 2700 __ j(not_equal, ¬_outermost_js, Label::kNear); |
2711 __ mov(Operand::StaticVariable(js_entry_sp), ebp); | 2701 __ mov(Operand::StaticVariable(js_entry_sp), ebp); |
2712 __ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); | 2702 __ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); |
2713 __ jmp(&invoke, Label::kNear); | 2703 __ jmp(&invoke, Label::kNear); |
2714 __ bind(¬_outermost_js); | 2704 __ bind(¬_outermost_js); |
2715 __ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME))); | 2705 __ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME))); |
2716 | 2706 |
2717 // Jump to a faked try block that does the invoke, with a faked catch | 2707 // Jump to a faked try block that does the invoke, with a faked catch |
2718 // block that sets the pending exception. | 2708 // block that sets the pending exception. |
2719 __ jmp(&invoke); | 2709 __ jmp(&invoke); |
2720 __ bind(&handler_entry); | 2710 __ bind(&handler_entry); |
2721 handler_offset_ = handler_entry.pos(); | 2711 handler_offset_ = handler_entry.pos(); |
2722 // Caught exception: Store result (exception) in the pending exception | 2712 // Caught exception: Store result (exception) in the pending exception |
2723 // field in the JSEnv and return a failure sentinel. | 2713 // field in the JSEnv and return a failure sentinel. |
2724 ExternalReference pending_exception(Isolate::kPendingExceptionAddress, | 2714 ExternalReference pending_exception(Isolate::kPendingExceptionAddress, |
2725 masm->isolate()); | 2715 isolate()); |
2726 __ mov(Operand::StaticVariable(pending_exception), eax); | 2716 __ mov(Operand::StaticVariable(pending_exception), eax); |
2727 __ mov(eax, Immediate(masm->isolate()->factory()->exception())); | 2717 __ mov(eax, Immediate(isolate()->factory()->exception())); |
2728 __ jmp(&exit); | 2718 __ jmp(&exit); |
2729 | 2719 |
2730 // Invoke: Link this frame into the handler chain. There's only one | 2720 // Invoke: Link this frame into the handler chain. There's only one |
2731 // handler block in this code object, so its index is 0. | 2721 // handler block in this code object, so its index is 0. |
2732 __ bind(&invoke); | 2722 __ bind(&invoke); |
2733 __ PushTryHandler(StackHandler::JS_ENTRY, 0); | 2723 __ PushTryHandler(StackHandler::JS_ENTRY, 0); |
2734 | 2724 |
2735 // Clear any pending exceptions. | 2725 // Clear any pending exceptions. |
2736 __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value())); | 2726 __ mov(edx, Immediate(isolate()->factory()->the_hole_value())); |
2737 __ mov(Operand::StaticVariable(pending_exception), edx); | 2727 __ mov(Operand::StaticVariable(pending_exception), edx); |
2738 | 2728 |
2739 // Fake a receiver (NULL). | 2729 // Fake a receiver (NULL). |
2740 __ push(Immediate(0)); // receiver | 2730 __ push(Immediate(0)); // receiver |
2741 | 2731 |
2742 // Invoke the function by calling through JS entry trampoline builtin and | 2732 // Invoke the function by calling through JS entry trampoline builtin and |
2743 // pop the faked function when we return. Notice that we cannot store a | 2733 // pop the faked function when we return. Notice that we cannot store a |
2744 // reference to the trampoline code directly in this stub, because the | 2734 // reference to the trampoline code directly in this stub, because the |
2745 // builtin stubs may not have been generated yet. | 2735 // builtin stubs may not have been generated yet. |
2746 if (is_construct) { | 2736 if (is_construct) { |
2747 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline, | 2737 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline, |
2748 masm->isolate()); | 2738 isolate()); |
2749 __ mov(edx, Immediate(construct_entry)); | 2739 __ mov(edx, Immediate(construct_entry)); |
2750 } else { | 2740 } else { |
2751 ExternalReference entry(Builtins::kJSEntryTrampoline, | 2741 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate()); |
2752 masm->isolate()); | |
2753 __ mov(edx, Immediate(entry)); | 2742 __ mov(edx, Immediate(entry)); |
2754 } | 2743 } |
2755 __ mov(edx, Operand(edx, 0)); // deref address | 2744 __ mov(edx, Operand(edx, 0)); // deref address |
2756 __ lea(edx, FieldOperand(edx, Code::kHeaderSize)); | 2745 __ lea(edx, FieldOperand(edx, Code::kHeaderSize)); |
2757 __ call(edx); | 2746 __ call(edx); |
2758 | 2747 |
2759 // Unlink this frame from the handler chain. | 2748 // Unlink this frame from the handler chain. |
2760 __ PopTryHandler(); | 2749 __ PopTryHandler(); |
2761 | 2750 |
2762 __ bind(&exit); | 2751 __ bind(&exit); |
2763 // Check if the current stack frame is marked as the outermost JS frame. | 2752 // Check if the current stack frame is marked as the outermost JS frame. |
2764 __ pop(ebx); | 2753 __ pop(ebx); |
2765 __ cmp(ebx, Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); | 2754 __ cmp(ebx, Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); |
2766 __ j(not_equal, ¬_outermost_js_2); | 2755 __ j(not_equal, ¬_outermost_js_2); |
2767 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0)); | 2756 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0)); |
2768 __ bind(¬_outermost_js_2); | 2757 __ bind(¬_outermost_js_2); |
2769 | 2758 |
2770 // Restore the top frame descriptor from the stack. | 2759 // Restore the top frame descriptor from the stack. |
2771 __ pop(Operand::StaticVariable(ExternalReference( | 2760 __ pop(Operand::StaticVariable(ExternalReference( |
2772 Isolate::kCEntryFPAddress, | 2761 Isolate::kCEntryFPAddress, isolate()))); |
2773 masm->isolate()))); | |
2774 | 2762 |
2775 // Restore callee-saved registers (C calling conventions). | 2763 // Restore callee-saved registers (C calling conventions). |
2776 __ pop(ebx); | 2764 __ pop(ebx); |
2777 __ pop(esi); | 2765 __ pop(esi); |
2778 __ pop(edi); | 2766 __ pop(edi); |
2779 __ add(esp, Immediate(2 * kPointerSize)); // remove markers | 2767 __ add(esp, Immediate(2 * kPointerSize)); // remove markers |
2780 | 2768 |
2781 // Restore frame pointer and return. | 2769 // Restore frame pointer and return. |
2782 __ pop(ebp); | 2770 __ pop(ebp); |
2783 __ ret(0); | 2771 __ ret(0); |
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2876 __ mov(Operand(scratch, 0), map); | 2864 __ mov(Operand(scratch, 0), map); |
2877 } | 2865 } |
2878 | 2866 |
2879 // Loop through the prototype chain of the object looking for the function | 2867 // Loop through the prototype chain of the object looking for the function |
2880 // prototype. | 2868 // prototype. |
2881 __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset)); | 2869 __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset)); |
2882 Label loop, is_instance, is_not_instance; | 2870 Label loop, is_instance, is_not_instance; |
2883 __ bind(&loop); | 2871 __ bind(&loop); |
2884 __ cmp(scratch, prototype); | 2872 __ cmp(scratch, prototype); |
2885 __ j(equal, &is_instance, Label::kNear); | 2873 __ j(equal, &is_instance, Label::kNear); |
2886 Factory* factory = masm->isolate()->factory(); | 2874 Factory* factory = isolate()->factory(); |
2887 __ cmp(scratch, Immediate(factory->null_value())); | 2875 __ cmp(scratch, Immediate(factory->null_value())); |
2888 __ j(equal, &is_not_instance, Label::kNear); | 2876 __ j(equal, &is_not_instance, Label::kNear); |
2889 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); | 2877 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); |
2890 __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset)); | 2878 __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset)); |
2891 __ jmp(&loop); | 2879 __ jmp(&loop); |
2892 | 2880 |
2893 __ bind(&is_instance); | 2881 __ bind(&is_instance); |
2894 if (!HasCallSiteInlineCheck()) { | 2882 if (!HasCallSiteInlineCheck()) { |
2895 __ mov(eax, Immediate(0)); | 2883 __ mov(eax, Immediate(0)); |
2896 __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex); | 2884 __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex); |
(...skipping 391 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3288 __ mov(edx, Operand(esp, 2 * kPointerSize)); // From index. | 3276 __ mov(edx, Operand(esp, 2 * kPointerSize)); // From index. |
3289 __ JumpIfNotSmi(edx, &runtime); | 3277 __ JumpIfNotSmi(edx, &runtime); |
3290 __ sub(ecx, edx); | 3278 __ sub(ecx, edx); |
3291 __ cmp(ecx, FieldOperand(eax, String::kLengthOffset)); | 3279 __ cmp(ecx, FieldOperand(eax, String::kLengthOffset)); |
3292 Label not_original_string; | 3280 Label not_original_string; |
3293 // Shorter than original string's length: an actual substring. | 3281 // Shorter than original string's length: an actual substring. |
3294 __ j(below, ¬_original_string, Label::kNear); | 3282 __ j(below, ¬_original_string, Label::kNear); |
3295 // Longer than original string's length or negative: unsafe arguments. | 3283 // Longer than original string's length or negative: unsafe arguments. |
3296 __ j(above, &runtime); | 3284 __ j(above, &runtime); |
3297 // Return original string. | 3285 // Return original string. |
3298 Counters* counters = masm->isolate()->counters(); | 3286 Counters* counters = isolate()->counters(); |
3299 __ IncrementCounter(counters->sub_string_native(), 1); | 3287 __ IncrementCounter(counters->sub_string_native(), 1); |
3300 __ ret(3 * kPointerSize); | 3288 __ ret(3 * kPointerSize); |
3301 __ bind(¬_original_string); | 3289 __ bind(¬_original_string); |
3302 | 3290 |
3303 Label single_char; | 3291 Label single_char; |
3304 __ cmp(ecx, Immediate(Smi::FromInt(1))); | 3292 __ cmp(ecx, Immediate(Smi::FromInt(1))); |
3305 __ j(equal, &single_char); | 3293 __ j(equal, &single_char); |
3306 | 3294 |
3307 // eax: string | 3295 // eax: string |
3308 // ebx: instance type | 3296 // ebx: instance type |
3309 // ecx: sub string length (smi) | 3297 // ecx: sub string length (smi) |
3310 // edx: from index (smi) | 3298 // edx: from index (smi) |
3311 // Deal with different string types: update the index if necessary | 3299 // Deal with different string types: update the index if necessary |
3312 // and put the underlying string into edi. | 3300 // and put the underlying string into edi. |
3313 Label underlying_unpacked, sliced_string, seq_or_external_string; | 3301 Label underlying_unpacked, sliced_string, seq_or_external_string; |
3314 // If the string is not indirect, it can only be sequential or external. | 3302 // If the string is not indirect, it can only be sequential or external. |
3315 STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag)); | 3303 STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag)); |
3316 STATIC_ASSERT(kIsIndirectStringMask != 0); | 3304 STATIC_ASSERT(kIsIndirectStringMask != 0); |
3317 __ test(ebx, Immediate(kIsIndirectStringMask)); | 3305 __ test(ebx, Immediate(kIsIndirectStringMask)); |
3318 __ j(zero, &seq_or_external_string, Label::kNear); | 3306 __ j(zero, &seq_or_external_string, Label::kNear); |
3319 | 3307 |
3320 Factory* factory = masm->isolate()->factory(); | 3308 Factory* factory = isolate()->factory(); |
3321 __ test(ebx, Immediate(kSlicedNotConsMask)); | 3309 __ test(ebx, Immediate(kSlicedNotConsMask)); |
3322 __ j(not_zero, &sliced_string, Label::kNear); | 3310 __ j(not_zero, &sliced_string, Label::kNear); |
3323 // Cons string. Check whether it is flat, then fetch first part. | 3311 // Cons string. Check whether it is flat, then fetch first part. |
3324 // Flat cons strings have an empty second part. | 3312 // Flat cons strings have an empty second part. |
3325 __ cmp(FieldOperand(eax, ConsString::kSecondOffset), | 3313 __ cmp(FieldOperand(eax, ConsString::kSecondOffset), |
3326 factory->empty_string()); | 3314 factory->empty_string()); |
3327 __ j(not_equal, &runtime); | 3315 __ j(not_equal, &runtime); |
3328 __ mov(edi, FieldOperand(eax, ConsString::kFirstOffset)); | 3316 __ mov(edi, FieldOperand(eax, ConsString::kFirstOffset)); |
3329 // Update instance type. | 3317 // Update instance type. |
3330 __ mov(ebx, FieldOperand(edi, HeapObject::kMapOffset)); | 3318 __ mov(ebx, FieldOperand(edi, HeapObject::kMapOffset)); |
(...skipping 299 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3630 | 3618 |
3631 __ mov(edx, Operand(esp, 2 * kPointerSize)); // left | 3619 __ mov(edx, Operand(esp, 2 * kPointerSize)); // left |
3632 __ mov(eax, Operand(esp, 1 * kPointerSize)); // right | 3620 __ mov(eax, Operand(esp, 1 * kPointerSize)); // right |
3633 | 3621 |
3634 Label not_same; | 3622 Label not_same; |
3635 __ cmp(edx, eax); | 3623 __ cmp(edx, eax); |
3636 __ j(not_equal, ¬_same, Label::kNear); | 3624 __ j(not_equal, ¬_same, Label::kNear); |
3637 STATIC_ASSERT(EQUAL == 0); | 3625 STATIC_ASSERT(EQUAL == 0); |
3638 STATIC_ASSERT(kSmiTag == 0); | 3626 STATIC_ASSERT(kSmiTag == 0); |
3639 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); | 3627 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); |
3640 __ IncrementCounter(masm->isolate()->counters()->string_compare_native(), 1); | 3628 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1); |
3641 __ ret(2 * kPointerSize); | 3629 __ ret(2 * kPointerSize); |
3642 | 3630 |
3643 __ bind(¬_same); | 3631 __ bind(¬_same); |
3644 | 3632 |
3645 // Check that both objects are sequential ASCII strings. | 3633 // Check that both objects are sequential ASCII strings. |
3646 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx, &runtime); | 3634 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx, &runtime); |
3647 | 3635 |
3648 // Compare flat ASCII strings. | 3636 // Compare flat ASCII strings. |
3649 // Drop arguments from the stack. | 3637 // Drop arguments from the stack. |
3650 __ pop(ecx); | 3638 __ pop(ecx); |
3651 __ add(esp, Immediate(2 * kPointerSize)); | 3639 __ add(esp, Immediate(2 * kPointerSize)); |
3652 __ push(ecx); | 3640 __ push(ecx); |
3653 GenerateCompareFlatAsciiStrings(masm, edx, eax, ecx, ebx, edi); | 3641 GenerateCompareFlatAsciiStrings(masm, edx, eax, ecx, ebx, edi); |
3654 | 3642 |
3655 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) | 3643 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) |
3656 // tagged as a small integer. | 3644 // tagged as a small integer. |
3657 __ bind(&runtime); | 3645 __ bind(&runtime); |
3658 __ TailCallRuntime(Runtime::kHiddenStringCompare, 2, 1); | 3646 __ TailCallRuntime(Runtime::kHiddenStringCompare, 2, 1); |
3659 } | 3647 } |
3660 | 3648 |
3661 | 3649 |
3662 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { | 3650 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { |
3663 // ----------- S t a t e ------------- | 3651 // ----------- S t a t e ------------- |
3664 // -- edx : left | 3652 // -- edx : left |
3665 // -- eax : right | 3653 // -- eax : right |
3666 // -- esp[0] : return address | 3654 // -- esp[0] : return address |
3667 // ----------------------------------- | 3655 // ----------------------------------- |
3668 Isolate* isolate = masm->isolate(); | |
3669 | |
Michael Starzinger
2014/04/23 13:58:21
nit: Can we get the empty newline back for readabi
Sven Panne
2014/04/24 06:05:34
Done.
| |
3670 // Load ecx with the allocation site. We stick an undefined dummy value here | 3656 // Load ecx with the allocation site. We stick an undefined dummy value here |
3671 // and replace it with the real allocation site later when we instantiate this | 3657 // and replace it with the real allocation site later when we instantiate this |
3672 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate(). | 3658 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate(). |
3673 __ mov(ecx, handle(isolate->heap()->undefined_value())); | 3659 __ mov(ecx, handle(isolate()->heap()->undefined_value())); |
3674 | 3660 |
3675 // Make sure that we actually patched the allocation site. | 3661 // Make sure that we actually patched the allocation site. |
3676 if (FLAG_debug_code) { | 3662 if (FLAG_debug_code) { |
3677 __ test(ecx, Immediate(kSmiTagMask)); | 3663 __ test(ecx, Immediate(kSmiTagMask)); |
3678 __ Assert(not_equal, kExpectedAllocationSite); | 3664 __ Assert(not_equal, kExpectedAllocationSite); |
3679 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), | 3665 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), |
3680 isolate->factory()->allocation_site_map()); | 3666 isolate()->factory()->allocation_site_map()); |
3681 __ Assert(equal, kExpectedAllocationSite); | 3667 __ Assert(equal, kExpectedAllocationSite); |
3682 } | 3668 } |
3683 | 3669 |
3684 // Tail call into the stub that handles binary operations with allocation | 3670 // Tail call into the stub that handles binary operations with allocation |
3685 // sites. | 3671 // sites. |
3686 BinaryOpWithAllocationSiteStub stub(state_); | 3672 BinaryOpWithAllocationSiteStub stub(isolate(), state_); |
3687 __ TailCallStub(&stub); | 3673 __ TailCallStub(&stub); |
3688 } | 3674 } |
3689 | 3675 |
3690 | 3676 |
3691 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { | 3677 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { |
3692 ASSERT(state_ == CompareIC::SMI); | 3678 ASSERT(state_ == CompareIC::SMI); |
3693 Label miss; | 3679 Label miss; |
3694 __ mov(ecx, edx); | 3680 __ mov(ecx, edx); |
3695 __ or_(ecx, eax); | 3681 __ or_(ecx, eax); |
3696 __ JumpIfNotSmi(ecx, &miss, Label::kNear); | 3682 __ JumpIfNotSmi(ecx, &miss, Label::kNear); |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3731 // Inlining the double comparison and falling back to the general compare | 3717 // Inlining the double comparison and falling back to the general compare |
3732 // stub if NaN is involved or SSE2 or CMOV is unsupported. | 3718 // stub if NaN is involved or SSE2 or CMOV is unsupported. |
3733 if (CpuFeatures::IsSupported(SSE2) && CpuFeatures::IsSupported(CMOV)) { | 3719 if (CpuFeatures::IsSupported(SSE2) && CpuFeatures::IsSupported(CMOV)) { |
3734 CpuFeatureScope scope1(masm, SSE2); | 3720 CpuFeatureScope scope1(masm, SSE2); |
3735 CpuFeatureScope scope2(masm, CMOV); | 3721 CpuFeatureScope scope2(masm, CMOV); |
3736 | 3722 |
3737 // Load left and right operand. | 3723 // Load left and right operand. |
3738 Label done, left, left_smi, right_smi; | 3724 Label done, left, left_smi, right_smi; |
3739 __ JumpIfSmi(eax, &right_smi, Label::kNear); | 3725 __ JumpIfSmi(eax, &right_smi, Label::kNear); |
3740 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), | 3726 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), |
3741 masm->isolate()->factory()->heap_number_map()); | 3727 isolate()->factory()->heap_number_map()); |
3742 __ j(not_equal, &maybe_undefined1, Label::kNear); | 3728 __ j(not_equal, &maybe_undefined1, Label::kNear); |
3743 __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); | 3729 __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); |
3744 __ jmp(&left, Label::kNear); | 3730 __ jmp(&left, Label::kNear); |
3745 __ bind(&right_smi); | 3731 __ bind(&right_smi); |
3746 __ mov(ecx, eax); // Can't clobber eax because we can still jump away. | 3732 __ mov(ecx, eax); // Can't clobber eax because we can still jump away. |
3747 __ SmiUntag(ecx); | 3733 __ SmiUntag(ecx); |
3748 __ Cvtsi2sd(xmm1, ecx); | 3734 __ Cvtsi2sd(xmm1, ecx); |
3749 | 3735 |
3750 __ bind(&left); | 3736 __ bind(&left); |
3751 __ JumpIfSmi(edx, &left_smi, Label::kNear); | 3737 __ JumpIfSmi(edx, &left_smi, Label::kNear); |
3752 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), | 3738 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), |
3753 masm->isolate()->factory()->heap_number_map()); | 3739 isolate()->factory()->heap_number_map()); |
3754 __ j(not_equal, &maybe_undefined2, Label::kNear); | 3740 __ j(not_equal, &maybe_undefined2, Label::kNear); |
3755 __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); | 3741 __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); |
3756 __ jmp(&done); | 3742 __ jmp(&done); |
3757 __ bind(&left_smi); | 3743 __ bind(&left_smi); |
3758 __ mov(ecx, edx); // Can't clobber edx because we can still jump away. | 3744 __ mov(ecx, edx); // Can't clobber edx because we can still jump away. |
3759 __ SmiUntag(ecx); | 3745 __ SmiUntag(ecx); |
3760 __ Cvtsi2sd(xmm0, ecx); | 3746 __ Cvtsi2sd(xmm0, ecx); |
3761 | 3747 |
3762 __ bind(&done); | 3748 __ bind(&done); |
3763 // Compare operands. | 3749 // Compare operands. |
3764 __ ucomisd(xmm0, xmm1); | 3750 __ ucomisd(xmm0, xmm1); |
3765 | 3751 |
3766 // Don't base result on EFLAGS when a NaN is involved. | 3752 // Don't base result on EFLAGS when a NaN is involved. |
3767 __ j(parity_even, &unordered, Label::kNear); | 3753 __ j(parity_even, &unordered, Label::kNear); |
3768 | 3754 |
3769 // Return a result of -1, 0, or 1, based on EFLAGS. | 3755 // Return a result of -1, 0, or 1, based on EFLAGS. |
3770 // Performing mov, because xor would destroy the flag register. | 3756 // Performing mov, because xor would destroy the flag register. |
3771 __ mov(eax, 0); // equal | 3757 __ mov(eax, 0); // equal |
3772 __ mov(ecx, Immediate(Smi::FromInt(1))); | 3758 __ mov(ecx, Immediate(Smi::FromInt(1))); |
3773 __ cmov(above, eax, ecx); | 3759 __ cmov(above, eax, ecx); |
3774 __ mov(ecx, Immediate(Smi::FromInt(-1))); | 3760 __ mov(ecx, Immediate(Smi::FromInt(-1))); |
3775 __ cmov(below, eax, ecx); | 3761 __ cmov(below, eax, ecx); |
3776 __ ret(0); | 3762 __ ret(0); |
3777 } else { | 3763 } else { |
3778 __ mov(ecx, edx); | 3764 __ mov(ecx, edx); |
3779 __ and_(ecx, eax); | 3765 __ and_(ecx, eax); |
3780 __ JumpIfSmi(ecx, &generic_stub, Label::kNear); | 3766 __ JumpIfSmi(ecx, &generic_stub, Label::kNear); |
3781 | 3767 |
3782 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), | 3768 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), |
3783 masm->isolate()->factory()->heap_number_map()); | 3769 isolate()->factory()->heap_number_map()); |
3784 __ j(not_equal, &maybe_undefined1, Label::kNear); | 3770 __ j(not_equal, &maybe_undefined1, Label::kNear); |
3785 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), | 3771 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), |
3786 masm->isolate()->factory()->heap_number_map()); | 3772 isolate()->factory()->heap_number_map()); |
3787 __ j(not_equal, &maybe_undefined2, Label::kNear); | 3773 __ j(not_equal, &maybe_undefined2, Label::kNear); |
3788 } | 3774 } |
3789 | 3775 |
3790 __ bind(&unordered); | 3776 __ bind(&unordered); |
3791 __ bind(&generic_stub); | 3777 __ bind(&generic_stub); |
3792 ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC, | 3778 ICCompareStub stub(isolate(), op_, CompareIC::GENERIC, CompareIC::GENERIC, |
3793 CompareIC::GENERIC); | 3779 CompareIC::GENERIC); |
3794 __ jmp(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); | 3780 __ jmp(stub.GetCode(isolate()), RelocInfo::CODE_TARGET); |
3795 | 3781 |
3796 __ bind(&maybe_undefined1); | 3782 __ bind(&maybe_undefined1); |
3797 if (Token::IsOrderedRelationalCompareOp(op_)) { | 3783 if (Token::IsOrderedRelationalCompareOp(op_)) { |
3798 __ cmp(eax, Immediate(masm->isolate()->factory()->undefined_value())); | 3784 __ cmp(eax, Immediate(isolate()->factory()->undefined_value())); |
3799 __ j(not_equal, &miss); | 3785 __ j(not_equal, &miss); |
3800 __ JumpIfSmi(edx, &unordered); | 3786 __ JumpIfSmi(edx, &unordered); |
3801 __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx); | 3787 __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx); |
3802 __ j(not_equal, &maybe_undefined2, Label::kNear); | 3788 __ j(not_equal, &maybe_undefined2, Label::kNear); |
3803 __ jmp(&unordered); | 3789 __ jmp(&unordered); |
3804 } | 3790 } |
3805 | 3791 |
3806 __ bind(&maybe_undefined2); | 3792 __ bind(&maybe_undefined2); |
3807 if (Token::IsOrderedRelationalCompareOp(op_)) { | 3793 if (Token::IsOrderedRelationalCompareOp(op_)) { |
3808 __ cmp(edx, Immediate(masm->isolate()->factory()->undefined_value())); | 3794 __ cmp(edx, Immediate(isolate()->factory()->undefined_value())); |
3809 __ j(equal, &unordered); | 3795 __ j(equal, &unordered); |
3810 } | 3796 } |
3811 | 3797 |
3812 __ bind(&miss); | 3798 __ bind(&miss); |
3813 GenerateMiss(masm); | 3799 GenerateMiss(masm); |
3814 } | 3800 } |
3815 | 3801 |
3816 | 3802 |
3817 void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) { | 3803 void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) { |
3818 ASSERT(state_ == CompareIC::INTERNALIZED_STRING); | 3804 ASSERT(state_ == CompareIC::INTERNALIZED_STRING); |
(...skipping 214 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4033 | 4019 |
4034 __ bind(&miss); | 4020 __ bind(&miss); |
4035 GenerateMiss(masm); | 4021 GenerateMiss(masm); |
4036 } | 4022 } |
4037 | 4023 |
4038 | 4024 |
4039 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { | 4025 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { |
4040 { | 4026 { |
4041 // Call the runtime system in a fresh internal frame. | 4027 // Call the runtime system in a fresh internal frame. |
4042 ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss), | 4028 ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss), |
4043 masm->isolate()); | 4029 isolate()); |
4044 FrameScope scope(masm, StackFrame::INTERNAL); | 4030 FrameScope scope(masm, StackFrame::INTERNAL); |
4045 __ push(edx); // Preserve edx and eax. | 4031 __ push(edx); // Preserve edx and eax. |
4046 __ push(eax); | 4032 __ push(eax); |
4047 __ push(edx); // And also use them as the arguments. | 4033 __ push(edx); // And also use them as the arguments. |
4048 __ push(eax); | 4034 __ push(eax); |
4049 __ push(Immediate(Smi::FromInt(op_))); | 4035 __ push(Immediate(Smi::FromInt(op_))); |
4050 __ CallExternalReference(miss, 3); | 4036 __ CallExternalReference(miss, 3); |
4051 // Compute the entry point of the rewritten stub. | 4037 // Compute the entry point of the rewritten stub. |
4052 __ lea(edi, FieldOperand(eax, Code::kHeaderSize)); | 4038 __ lea(edi, FieldOperand(eax, Code::kHeaderSize)); |
4053 __ pop(eax); | 4039 __ pop(eax); |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4107 __ cmp(entity_name, masm->isolate()->factory()->the_hole_value()); | 4093 __ cmp(entity_name, masm->isolate()->factory()->the_hole_value()); |
4108 __ j(equal, &good, Label::kNear); | 4094 __ j(equal, &good, Label::kNear); |
4109 | 4095 |
4110 // Check if the entry name is not a unique name. | 4096 // Check if the entry name is not a unique name. |
4111 __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset)); | 4097 __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset)); |
4112 __ JumpIfNotUniqueName(FieldOperand(entity_name, Map::kInstanceTypeOffset), | 4098 __ JumpIfNotUniqueName(FieldOperand(entity_name, Map::kInstanceTypeOffset), |
4113 miss); | 4099 miss); |
4114 __ bind(&good); | 4100 __ bind(&good); |
4115 } | 4101 } |
4116 | 4102 |
4117 NameDictionaryLookupStub stub(properties, r0, r0, NEGATIVE_LOOKUP); | 4103 NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0, |
4104 NEGATIVE_LOOKUP); | |
4118 __ push(Immediate(Handle<Object>(name))); | 4105 __ push(Immediate(Handle<Object>(name))); |
4119 __ push(Immediate(name->Hash())); | 4106 __ push(Immediate(name->Hash())); |
4120 __ CallStub(&stub); | 4107 __ CallStub(&stub); |
4121 __ test(r0, r0); | 4108 __ test(r0, r0); |
4122 __ j(not_zero, miss); | 4109 __ j(not_zero, miss); |
4123 __ jmp(done); | 4110 __ jmp(done); |
4124 } | 4111 } |
4125 | 4112 |
4126 | 4113 |
4127 // Probe the name dictionary in the |elements| register. Jump to the | 4114 // Probe the name dictionary in the |elements| register. Jump to the |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4163 __ lea(r0, Operand(r0, r0, times_2, 0)); // r0 = r0 * 3 | 4150 __ lea(r0, Operand(r0, r0, times_2, 0)); // r0 = r0 * 3 |
4164 | 4151 |
4165 // Check if the key is identical to the name. | 4152 // Check if the key is identical to the name. |
4166 __ cmp(name, Operand(elements, | 4153 __ cmp(name, Operand(elements, |
4167 r0, | 4154 r0, |
4168 times_4, | 4155 times_4, |
4169 kElementsStartOffset - kHeapObjectTag)); | 4156 kElementsStartOffset - kHeapObjectTag)); |
4170 __ j(equal, done); | 4157 __ j(equal, done); |
4171 } | 4158 } |
4172 | 4159 |
4173 NameDictionaryLookupStub stub(elements, r1, r0, POSITIVE_LOOKUP); | 4160 NameDictionaryLookupStub stub(masm->isolate(), elements, r1, r0, |
4161 POSITIVE_LOOKUP); | |
4174 __ push(name); | 4162 __ push(name); |
4175 __ mov(r0, FieldOperand(name, Name::kHashFieldOffset)); | 4163 __ mov(r0, FieldOperand(name, Name::kHashFieldOffset)); |
4176 __ shr(r0, Name::kHashShift); | 4164 __ shr(r0, Name::kHashShift); |
4177 __ push(r0); | 4165 __ push(r0); |
4178 __ CallStub(&stub); | 4166 __ CallStub(&stub); |
4179 | 4167 |
4180 __ test(r1, r1); | 4168 __ test(r1, r1); |
4181 __ j(zero, miss); | 4169 __ j(zero, miss); |
4182 __ jmp(done); | 4170 __ jmp(done); |
4183 } | 4171 } |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4223 // Scale the index by multiplying by the entry size. | 4211 // Scale the index by multiplying by the entry size. |
4224 ASSERT(NameDictionary::kEntrySize == 3); | 4212 ASSERT(NameDictionary::kEntrySize == 3); |
4225 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3. | 4213 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3. |
4226 | 4214 |
4227 // Having undefined at this place means the name is not contained. | 4215 // Having undefined at this place means the name is not contained. |
4228 ASSERT_EQ(kSmiTagSize, 1); | 4216 ASSERT_EQ(kSmiTagSize, 1); |
4229 __ mov(scratch, Operand(dictionary_, | 4217 __ mov(scratch, Operand(dictionary_, |
4230 index_, | 4218 index_, |
4231 times_pointer_size, | 4219 times_pointer_size, |
4232 kElementsStartOffset - kHeapObjectTag)); | 4220 kElementsStartOffset - kHeapObjectTag)); |
4233 __ cmp(scratch, masm->isolate()->factory()->undefined_value()); | 4221 __ cmp(scratch, isolate()->factory()->undefined_value()); |
4234 __ j(equal, ¬_in_dictionary); | 4222 __ j(equal, ¬_in_dictionary); |
4235 | 4223 |
4236 // Stop if found the property. | 4224 // Stop if found the property. |
4237 __ cmp(scratch, Operand(esp, 3 * kPointerSize)); | 4225 __ cmp(scratch, Operand(esp, 3 * kPointerSize)); |
4238 __ j(equal, &in_dictionary); | 4226 __ j(equal, &in_dictionary); |
4239 | 4227 |
4240 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { | 4228 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { |
4241 // If we hit a key that is not a unique name during negative | 4229 // If we hit a key that is not a unique name during negative |
4242 // lookup we have to bailout as this key might be equal to the | 4230 // lookup we have to bailout as this key might be equal to the |
4243 // key we are looking for. | 4231 // key we are looking for. |
(...skipping 22 matching lines...) Expand all Loading... | |
4266 | 4254 |
4267 __ bind(¬_in_dictionary); | 4255 __ bind(¬_in_dictionary); |
4268 __ mov(result_, Immediate(0)); | 4256 __ mov(result_, Immediate(0)); |
4269 __ Drop(1); | 4257 __ Drop(1); |
4270 __ ret(2 * kPointerSize); | 4258 __ ret(2 * kPointerSize); |
4271 } | 4259 } |
4272 | 4260 |
4273 | 4261 |
4274 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( | 4262 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( |
4275 Isolate* isolate) { | 4263 Isolate* isolate) { |
4276 StoreBufferOverflowStub stub(kDontSaveFPRegs); | 4264 StoreBufferOverflowStub stub(isolate, kDontSaveFPRegs); |
4277 stub.GetCode(isolate); | 4265 stub.GetCode(isolate); |
4278 if (CpuFeatures::IsSafeForSnapshot(SSE2)) { | 4266 if (CpuFeatures::IsSafeForSnapshot(SSE2)) { |
4279 StoreBufferOverflowStub stub2(kSaveFPRegs); | 4267 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs); |
4280 stub2.GetCode(isolate); | 4268 stub2.GetCode(isolate); |
4281 } | 4269 } |
4282 } | 4270 } |
4283 | 4271 |
4284 | 4272 |
4285 bool CodeStub::CanUseFPRegisters() { | 4273 bool CodeStub::CanUseFPRegisters() { |
4286 return CpuFeatures::IsSupported(SSE2); | 4274 return CpuFeatures::IsSupported(SSE2); |
4287 } | 4275 } |
4288 | 4276 |
4289 | 4277 |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4369 } | 4357 } |
4370 | 4358 |
4371 | 4359 |
4372 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) { | 4360 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) { |
4373 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_); | 4361 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_); |
4374 int argument_count = 3; | 4362 int argument_count = 3; |
4375 __ PrepareCallCFunction(argument_count, regs_.scratch0()); | 4363 __ PrepareCallCFunction(argument_count, regs_.scratch0()); |
4376 __ mov(Operand(esp, 0 * kPointerSize), regs_.object()); | 4364 __ mov(Operand(esp, 0 * kPointerSize), regs_.object()); |
4377 __ mov(Operand(esp, 1 * kPointerSize), regs_.address()); // Slot. | 4365 __ mov(Operand(esp, 1 * kPointerSize), regs_.address()); // Slot. |
4378 __ mov(Operand(esp, 2 * kPointerSize), | 4366 __ mov(Operand(esp, 2 * kPointerSize), |
4379 Immediate(ExternalReference::isolate_address(masm->isolate()))); | 4367 Immediate(ExternalReference::isolate_address(isolate()))); |
4380 | 4368 |
4381 AllowExternalCallThatCantCauseGC scope(masm); | 4369 AllowExternalCallThatCantCauseGC scope(masm); |
4382 __ CallCFunction( | 4370 __ CallCFunction( |
4383 ExternalReference::incremental_marking_record_write_function( | 4371 ExternalReference::incremental_marking_record_write_function(isolate()), |
4384 masm->isolate()), | |
4385 argument_count); | 4372 argument_count); |
4386 | 4373 |
4387 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); | 4374 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); |
4388 } | 4375 } |
4389 | 4376 |
4390 | 4377 |
4391 void RecordWriteStub::CheckNeedsToInformIncrementalMarker( | 4378 void RecordWriteStub::CheckNeedsToInformIncrementalMarker( |
4392 MacroAssembler* masm, | 4379 MacroAssembler* masm, |
4393 OnNoNeedToInformIncrementalMarker on_no_need, | 4380 OnNoNeedToInformIncrementalMarker on_no_need, |
4394 Mode mode) { | 4381 Mode mode) { |
(...skipping 166 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4561 edi, | 4548 edi, |
4562 xmm0, | 4549 xmm0, |
4563 &slow_elements_from_double, | 4550 &slow_elements_from_double, |
4564 false); | 4551 false); |
4565 __ pop(edx); | 4552 __ pop(edx); |
4566 __ ret(0); | 4553 __ ret(0); |
4567 } | 4554 } |
4568 | 4555 |
4569 | 4556 |
4570 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { | 4557 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { |
4571 CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); | 4558 CEntryStub ces(isolate(), 1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); |
4572 __ call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); | 4559 __ call(ces.GetCode(isolate()), RelocInfo::CODE_TARGET); |
4573 int parameter_count_offset = | 4560 int parameter_count_offset = |
4574 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; | 4561 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; |
4575 __ mov(ebx, MemOperand(ebp, parameter_count_offset)); | 4562 __ mov(ebx, MemOperand(ebp, parameter_count_offset)); |
4576 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); | 4563 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); |
4577 __ pop(ecx); | 4564 __ pop(ecx); |
4578 int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE | 4565 int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE |
4579 ? kPointerSize | 4566 ? kPointerSize |
4580 : 0; | 4567 : 0; |
4581 __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset)); | 4568 __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset)); |
4582 __ jmp(ecx); // Return to IC Miss stub, continuation still on stack. | 4569 __ jmp(ecx); // Return to IC Miss stub, continuation still on stack. |
4583 } | 4570 } |
4584 | 4571 |
4585 | 4572 |
4586 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { | 4573 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { |
4587 if (masm->isolate()->function_entry_hook() != NULL) { | 4574 if (masm->isolate()->function_entry_hook() != NULL) { |
4588 ProfileEntryHookStub stub; | 4575 ProfileEntryHookStub stub(masm->isolate()); |
4589 masm->CallStub(&stub); | 4576 masm->CallStub(&stub); |
4590 } | 4577 } |
4591 } | 4578 } |
4592 | 4579 |
4593 | 4580 |
4594 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { | 4581 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { |
4595 // Save volatile registers. | 4582 // Save volatile registers. |
4596 const int kNumSavedRegisters = 3; | 4583 const int kNumSavedRegisters = 3; |
4597 __ push(eax); | 4584 __ push(eax); |
4598 __ push(ecx); | 4585 __ push(ecx); |
4599 __ push(edx); | 4586 __ push(edx); |
4600 | 4587 |
4601 // Calculate and push the original stack pointer. | 4588 // Calculate and push the original stack pointer. |
4602 __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize)); | 4589 __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize)); |
4603 __ push(eax); | 4590 __ push(eax); |
4604 | 4591 |
4605 // Retrieve our return address and use it to calculate the calling | 4592 // Retrieve our return address and use it to calculate the calling |
4606 // function's address. | 4593 // function's address. |
4607 __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize)); | 4594 __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize)); |
4608 __ sub(eax, Immediate(Assembler::kCallInstructionLength)); | 4595 __ sub(eax, Immediate(Assembler::kCallInstructionLength)); |
4609 __ push(eax); | 4596 __ push(eax); |
4610 | 4597 |
4611 // Call the entry hook. | 4598 // Call the entry hook. |
4612 ASSERT(masm->isolate()->function_entry_hook() != NULL); | 4599 ASSERT(isolate()->function_entry_hook() != NULL); |
4613 __ call(FUNCTION_ADDR(masm->isolate()->function_entry_hook()), | 4600 __ call(FUNCTION_ADDR(isolate()->function_entry_hook()), |
4614 RelocInfo::RUNTIME_ENTRY); | 4601 RelocInfo::RUNTIME_ENTRY); |
4615 __ add(esp, Immediate(2 * kPointerSize)); | 4602 __ add(esp, Immediate(2 * kPointerSize)); |
4616 | 4603 |
4617 // Restore ecx. | 4604 // Restore ecx. |
4618 __ pop(edx); | 4605 __ pop(edx); |
4619 __ pop(ecx); | 4606 __ pop(ecx); |
4620 __ pop(eax); | 4607 __ pop(eax); |
4621 | 4608 |
4622 __ ret(0); | 4609 __ ret(0); |
4623 } | 4610 } |
4624 | 4611 |
4625 | 4612 |
4626 template<class T> | 4613 template<class T> |
4627 static void CreateArrayDispatch(MacroAssembler* masm, | 4614 static void CreateArrayDispatch(MacroAssembler* masm, |
4628 AllocationSiteOverrideMode mode) { | 4615 AllocationSiteOverrideMode mode) { |
4629 if (mode == DISABLE_ALLOCATION_SITES) { | 4616 if (mode == DISABLE_ALLOCATION_SITES) { |
4630 T stub(GetInitialFastElementsKind(), | 4617 T stub(masm->isolate(), |
4618 GetInitialFastElementsKind(), | |
4631 mode); | 4619 mode); |
4632 __ TailCallStub(&stub); | 4620 __ TailCallStub(&stub); |
4633 } else if (mode == DONT_OVERRIDE) { | 4621 } else if (mode == DONT_OVERRIDE) { |
4634 int last_index = GetSequenceIndexFromFastElementsKind( | 4622 int last_index = GetSequenceIndexFromFastElementsKind( |
4635 TERMINAL_FAST_ELEMENTS_KIND); | 4623 TERMINAL_FAST_ELEMENTS_KIND); |
4636 for (int i = 0; i <= last_index; ++i) { | 4624 for (int i = 0; i <= last_index; ++i) { |
4637 Label next; | 4625 Label next; |
4638 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); | 4626 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); |
4639 __ cmp(edx, kind); | 4627 __ cmp(edx, kind); |
4640 __ j(not_equal, &next); | 4628 __ j(not_equal, &next); |
4641 T stub(kind); | 4629 T stub(masm->isolate(), kind); |
4642 __ TailCallStub(&stub); | 4630 __ TailCallStub(&stub); |
4643 __ bind(&next); | 4631 __ bind(&next); |
4644 } | 4632 } |
4645 | 4633 |
4646 // If we reached this point there is a problem. | 4634 // If we reached this point there is a problem. |
4647 __ Abort(kUnexpectedElementsKindInArrayConstructor); | 4635 __ Abort(kUnexpectedElementsKindInArrayConstructor); |
4648 } else { | 4636 } else { |
4649 UNREACHABLE(); | 4637 UNREACHABLE(); |
4650 } | 4638 } |
4651 } | 4639 } |
(...skipping 23 matching lines...) Expand all Loading... | |
4675 | 4663 |
4676 // look at the first argument | 4664 // look at the first argument |
4677 __ mov(ecx, Operand(esp, kPointerSize)); | 4665 __ mov(ecx, Operand(esp, kPointerSize)); |
4678 __ test(ecx, ecx); | 4666 __ test(ecx, ecx); |
4679 __ j(zero, &normal_sequence); | 4667 __ j(zero, &normal_sequence); |
4680 | 4668 |
4681 if (mode == DISABLE_ALLOCATION_SITES) { | 4669 if (mode == DISABLE_ALLOCATION_SITES) { |
4682 ElementsKind initial = GetInitialFastElementsKind(); | 4670 ElementsKind initial = GetInitialFastElementsKind(); |
4683 ElementsKind holey_initial = GetHoleyElementsKind(initial); | 4671 ElementsKind holey_initial = GetHoleyElementsKind(initial); |
4684 | 4672 |
4685 ArraySingleArgumentConstructorStub stub_holey(holey_initial, | 4673 ArraySingleArgumentConstructorStub stub_holey(masm->isolate(), |
4674 holey_initial, | |
4686 DISABLE_ALLOCATION_SITES); | 4675 DISABLE_ALLOCATION_SITES); |
4687 __ TailCallStub(&stub_holey); | 4676 __ TailCallStub(&stub_holey); |
4688 | 4677 |
4689 __ bind(&normal_sequence); | 4678 __ bind(&normal_sequence); |
4690 ArraySingleArgumentConstructorStub stub(initial, | 4679 ArraySingleArgumentConstructorStub stub(masm->isolate(), |
4680 initial, | |
4691 DISABLE_ALLOCATION_SITES); | 4681 DISABLE_ALLOCATION_SITES); |
4692 __ TailCallStub(&stub); | 4682 __ TailCallStub(&stub); |
4693 } else if (mode == DONT_OVERRIDE) { | 4683 } else if (mode == DONT_OVERRIDE) { |
4694 // We are going to create a holey array, but our kind is non-holey. | 4684 // We are going to create a holey array, but our kind is non-holey. |
4695 // Fix kind and retry. | 4685 // Fix kind and retry. |
4696 __ inc(edx); | 4686 __ inc(edx); |
4697 | 4687 |
4698 if (FLAG_debug_code) { | 4688 if (FLAG_debug_code) { |
4699 Handle<Map> allocation_site_map = | 4689 Handle<Map> allocation_site_map = |
4700 masm->isolate()->factory()->allocation_site_map(); | 4690 masm->isolate()->factory()->allocation_site_map(); |
4701 __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map)); | 4691 __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map)); |
4702 __ Assert(equal, kExpectedAllocationSite); | 4692 __ Assert(equal, kExpectedAllocationSite); |
4703 } | 4693 } |
4704 | 4694 |
4705 // Save the resulting elements kind in type info. We can't just store r3 | 4695 // Save the resulting elements kind in type info. We can't just store r3 |
4706 // in the AllocationSite::transition_info field because elements kind is | 4696 // in the AllocationSite::transition_info field because elements kind is |
4707 // restricted to a portion of the field...upper bits need to be left alone. | 4697 // restricted to a portion of the field...upper bits need to be left alone. |
4708 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); | 4698 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); |
4709 __ add(FieldOperand(ebx, AllocationSite::kTransitionInfoOffset), | 4699 __ add(FieldOperand(ebx, AllocationSite::kTransitionInfoOffset), |
4710 Immediate(Smi::FromInt(kFastElementsKindPackedToHoley))); | 4700 Immediate(Smi::FromInt(kFastElementsKindPackedToHoley))); |
4711 | 4701 |
4712 __ bind(&normal_sequence); | 4702 __ bind(&normal_sequence); |
4713 int last_index = GetSequenceIndexFromFastElementsKind( | 4703 int last_index = GetSequenceIndexFromFastElementsKind( |
4714 TERMINAL_FAST_ELEMENTS_KIND); | 4704 TERMINAL_FAST_ELEMENTS_KIND); |
4715 for (int i = 0; i <= last_index; ++i) { | 4705 for (int i = 0; i <= last_index; ++i) { |
4716 Label next; | 4706 Label next; |
4717 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); | 4707 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); |
4718 __ cmp(edx, kind); | 4708 __ cmp(edx, kind); |
4719 __ j(not_equal, &next); | 4709 __ j(not_equal, &next); |
4720 ArraySingleArgumentConstructorStub stub(kind); | 4710 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind); |
4721 __ TailCallStub(&stub); | 4711 __ TailCallStub(&stub); |
4722 __ bind(&next); | 4712 __ bind(&next); |
4723 } | 4713 } |
4724 | 4714 |
4725 // If we reached this point there is a problem. | 4715 // If we reached this point there is a problem. |
4726 __ Abort(kUnexpectedElementsKindInArrayConstructor); | 4716 __ Abort(kUnexpectedElementsKindInArrayConstructor); |
4727 } else { | 4717 } else { |
4728 UNREACHABLE(); | 4718 UNREACHABLE(); |
4729 } | 4719 } |
4730 } | 4720 } |
4731 | 4721 |
4732 | 4722 |
4733 template<class T> | 4723 template<class T> |
4734 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { | 4724 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { |
4735 int to_index = GetSequenceIndexFromFastElementsKind( | 4725 int to_index = GetSequenceIndexFromFastElementsKind( |
4736 TERMINAL_FAST_ELEMENTS_KIND); | 4726 TERMINAL_FAST_ELEMENTS_KIND); |
4737 for (int i = 0; i <= to_index; ++i) { | 4727 for (int i = 0; i <= to_index; ++i) { |
4738 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); | 4728 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); |
4739 T stub(kind); | 4729 T stub(isolate, kind); |
4740 stub.GetCode(isolate); | 4730 stub.GetCode(isolate); |
4741 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) { | 4731 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) { |
4742 T stub1(kind, DISABLE_ALLOCATION_SITES); | 4732 T stub1(isolate, kind, DISABLE_ALLOCATION_SITES); |
4743 stub1.GetCode(isolate); | 4733 stub1.GetCode(isolate); |
4744 } | 4734 } |
4745 } | 4735 } |
4746 } | 4736 } |
4747 | 4737 |
4748 | 4738 |
4749 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) { | 4739 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) { |
4750 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>( | 4740 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>( |
4751 isolate); | 4741 isolate); |
4752 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>( | 4742 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>( |
4753 isolate); | 4743 isolate); |
4754 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>( | 4744 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>( |
4755 isolate); | 4745 isolate); |
4756 } | 4746 } |
4757 | 4747 |
4758 | 4748 |
4759 void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime( | 4749 void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime( |
4760 Isolate* isolate) { | 4750 Isolate* isolate) { |
4761 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS }; | 4751 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS }; |
4762 for (int i = 0; i < 2; i++) { | 4752 for (int i = 0; i < 2; i++) { |
4763 // For internal arrays we only need a few things | 4753 // For internal arrays we only need a few things |
4764 InternalArrayNoArgumentConstructorStub stubh1(kinds[i]); | 4754 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]); |
4765 stubh1.GetCode(isolate); | 4755 stubh1.GetCode(isolate); |
4766 InternalArraySingleArgumentConstructorStub stubh2(kinds[i]); | 4756 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]); |
4767 stubh2.GetCode(isolate); | 4757 stubh2.GetCode(isolate); |
4768 InternalArrayNArgumentsConstructorStub stubh3(kinds[i]); | 4758 InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]); |
4769 stubh3.GetCode(isolate); | 4759 stubh3.GetCode(isolate); |
4770 } | 4760 } |
4771 } | 4761 } |
4772 | 4762 |
4773 | 4763 |
4774 void ArrayConstructorStub::GenerateDispatchToArrayStub( | 4764 void ArrayConstructorStub::GenerateDispatchToArrayStub( |
4775 MacroAssembler* masm, | 4765 MacroAssembler* masm, |
4776 AllocationSiteOverrideMode mode) { | 4766 AllocationSiteOverrideMode mode) { |
4777 if (argument_count_ == ANY) { | 4767 if (argument_count_ == ANY) { |
4778 Label not_zero_case, not_one_case; | 4768 Label not_zero_case, not_one_case; |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4819 __ CmpObjectType(ecx, MAP_TYPE, ecx); | 4809 __ CmpObjectType(ecx, MAP_TYPE, ecx); |
4820 __ Assert(equal, kUnexpectedInitialMapForArrayFunction); | 4810 __ Assert(equal, kUnexpectedInitialMapForArrayFunction); |
4821 | 4811 |
4822 // We should either have undefined in ebx or a valid AllocationSite | 4812 // We should either have undefined in ebx or a valid AllocationSite |
4823 __ AssertUndefinedOrAllocationSite(ebx); | 4813 __ AssertUndefinedOrAllocationSite(ebx); |
4824 } | 4814 } |
4825 | 4815 |
4826 Label no_info; | 4816 Label no_info; |
4827 // If the feedback vector is the undefined value call an array constructor | 4817 // If the feedback vector is the undefined value call an array constructor |
4828 // that doesn't use AllocationSites. | 4818 // that doesn't use AllocationSites. |
4829 __ cmp(ebx, masm->isolate()->factory()->undefined_value()); | 4819 __ cmp(ebx, isolate()->factory()->undefined_value()); |
4830 __ j(equal, &no_info); | 4820 __ j(equal, &no_info); |
4831 | 4821 |
4832 // Only look at the lower 16 bits of the transition info. | 4822 // Only look at the lower 16 bits of the transition info. |
4833 __ mov(edx, FieldOperand(ebx, AllocationSite::kTransitionInfoOffset)); | 4823 __ mov(edx, FieldOperand(ebx, AllocationSite::kTransitionInfoOffset)); |
4834 __ SmiUntag(edx); | 4824 __ SmiUntag(edx); |
4835 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); | 4825 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); |
4836 __ and_(edx, Immediate(AllocationSite::ElementsKindBits::kMask)); | 4826 __ and_(edx, Immediate(AllocationSite::ElementsKindBits::kMask)); |
4837 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); | 4827 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); |
4838 | 4828 |
4839 __ bind(&no_info); | 4829 __ bind(&no_info); |
4840 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES); | 4830 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES); |
4841 } | 4831 } |
4842 | 4832 |
4843 | 4833 |
4844 void InternalArrayConstructorStub::GenerateCase( | 4834 void InternalArrayConstructorStub::GenerateCase( |
4845 MacroAssembler* masm, ElementsKind kind) { | 4835 MacroAssembler* masm, ElementsKind kind) { |
4846 Label not_zero_case, not_one_case; | 4836 Label not_zero_case, not_one_case; |
4847 Label normal_sequence; | 4837 Label normal_sequence; |
4848 | 4838 |
4849 __ test(eax, eax); | 4839 __ test(eax, eax); |
4850 __ j(not_zero, ¬_zero_case); | 4840 __ j(not_zero, ¬_zero_case); |
4851 InternalArrayNoArgumentConstructorStub stub0(kind); | 4841 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind); |
4852 __ TailCallStub(&stub0); | 4842 __ TailCallStub(&stub0); |
4853 | 4843 |
4854 __ bind(¬_zero_case); | 4844 __ bind(¬_zero_case); |
4855 __ cmp(eax, 1); | 4845 __ cmp(eax, 1); |
4856 __ j(greater, ¬_one_case); | 4846 __ j(greater, ¬_one_case); |
4857 | 4847 |
4858 if (IsFastPackedElementsKind(kind)) { | 4848 if (IsFastPackedElementsKind(kind)) { |
4859 // We might need to create a holey array | 4849 // We might need to create a holey array |
4860 // look at the first argument | 4850 // look at the first argument |
4861 __ mov(ecx, Operand(esp, kPointerSize)); | 4851 __ mov(ecx, Operand(esp, kPointerSize)); |
4862 __ test(ecx, ecx); | 4852 __ test(ecx, ecx); |
4863 __ j(zero, &normal_sequence); | 4853 __ j(zero, &normal_sequence); |
4864 | 4854 |
4865 InternalArraySingleArgumentConstructorStub | 4855 InternalArraySingleArgumentConstructorStub |
4866 stub1_holey(GetHoleyElementsKind(kind)); | 4856 stub1_holey(isolate(), GetHoleyElementsKind(kind)); |
4867 __ TailCallStub(&stub1_holey); | 4857 __ TailCallStub(&stub1_holey); |
4868 } | 4858 } |
4869 | 4859 |
4870 __ bind(&normal_sequence); | 4860 __ bind(&normal_sequence); |
4871 InternalArraySingleArgumentConstructorStub stub1(kind); | 4861 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind); |
4872 __ TailCallStub(&stub1); | 4862 __ TailCallStub(&stub1); |
4873 | 4863 |
4874 __ bind(¬_one_case); | 4864 __ bind(¬_one_case); |
4875 InternalArrayNArgumentsConstructorStub stubN(kind); | 4865 InternalArrayNArgumentsConstructorStub stubN(isolate(), kind); |
4876 __ TailCallStub(&stubN); | 4866 __ TailCallStub(&stubN); |
4877 } | 4867 } |
4878 | 4868 |
4879 | 4869 |
4880 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { | 4870 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { |
4881 // ----------- S t a t e ------------- | 4871 // ----------- S t a t e ------------- |
4882 // -- eax : argc | 4872 // -- eax : argc |
4883 // -- edi : constructor | 4873 // -- edi : constructor |
4884 // -- esp[0] : return address | 4874 // -- esp[0] : return address |
4885 // -- esp[4] : last argument | 4875 // -- esp[4] : last argument |
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4958 | 4948 |
4959 STATIC_ASSERT(FCA::kContextSaveIndex == 6); | 4949 STATIC_ASSERT(FCA::kContextSaveIndex == 6); |
4960 STATIC_ASSERT(FCA::kCalleeIndex == 5); | 4950 STATIC_ASSERT(FCA::kCalleeIndex == 5); |
4961 STATIC_ASSERT(FCA::kDataIndex == 4); | 4951 STATIC_ASSERT(FCA::kDataIndex == 4); |
4962 STATIC_ASSERT(FCA::kReturnValueOffset == 3); | 4952 STATIC_ASSERT(FCA::kReturnValueOffset == 3); |
4963 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2); | 4953 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2); |
4964 STATIC_ASSERT(FCA::kIsolateIndex == 1); | 4954 STATIC_ASSERT(FCA::kIsolateIndex == 1); |
4965 STATIC_ASSERT(FCA::kHolderIndex == 0); | 4955 STATIC_ASSERT(FCA::kHolderIndex == 0); |
4966 STATIC_ASSERT(FCA::kArgsLength == 7); | 4956 STATIC_ASSERT(FCA::kArgsLength == 7); |
4967 | 4957 |
4968 Isolate* isolate = masm->isolate(); | |
4969 | |
4970 __ pop(return_address); | 4958 __ pop(return_address); |
4971 | 4959 |
4972 // context save | 4960 // context save |
4973 __ push(context); | 4961 __ push(context); |
4974 // load context from callee | 4962 // load context from callee |
4975 __ mov(context, FieldOperand(callee, JSFunction::kContextOffset)); | 4963 __ mov(context, FieldOperand(callee, JSFunction::kContextOffset)); |
4976 | 4964 |
4977 // callee | 4965 // callee |
4978 __ push(callee); | 4966 __ push(callee); |
4979 | 4967 |
4980 // call data | 4968 // call data |
4981 __ push(call_data); | 4969 __ push(call_data); |
4982 | 4970 |
4983 Register scratch = call_data; | 4971 Register scratch = call_data; |
4984 if (!call_data_undefined) { | 4972 if (!call_data_undefined) { |
4985 // return value | 4973 // return value |
4986 __ push(Immediate(isolate->factory()->undefined_value())); | 4974 __ push(Immediate(isolate()->factory()->undefined_value())); |
4987 // return value default | 4975 // return value default |
4988 __ push(Immediate(isolate->factory()->undefined_value())); | 4976 __ push(Immediate(isolate()->factory()->undefined_value())); |
4989 } else { | 4977 } else { |
4990 // return value | 4978 // return value |
4991 __ push(scratch); | 4979 __ push(scratch); |
4992 // return value default | 4980 // return value default |
4993 __ push(scratch); | 4981 __ push(scratch); |
4994 } | 4982 } |
4995 // isolate | 4983 // isolate |
4996 __ push(Immediate(reinterpret_cast<int>(isolate))); | 4984 __ push(Immediate(reinterpret_cast<int>(isolate()))); |
4997 // holder | 4985 // holder |
4998 __ push(holder); | 4986 __ push(holder); |
4999 | 4987 |
5000 __ mov(scratch, esp); | 4988 __ mov(scratch, esp); |
5001 | 4989 |
5002 // return address | 4990 // return address |
5003 __ push(return_address); | 4991 __ push(return_address); |
5004 | 4992 |
5005 // API function gets reference to the v8::Arguments. If CPU profiler | 4993 // API function gets reference to the v8::Arguments. If CPU profiler |
5006 // is enabled wrapper function will be called and we need to pass | 4994 // is enabled wrapper function will be called and we need to pass |
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5085 Operand(ebp, 7 * kPointerSize), | 5073 Operand(ebp, 7 * kPointerSize), |
5086 NULL); | 5074 NULL); |
5087 } | 5075 } |
5088 | 5076 |
5089 | 5077 |
5090 #undef __ | 5078 #undef __ |
5091 | 5079 |
5092 } } // namespace v8::internal | 5080 } } // namespace v8::internal |
5093 | 5081 |
5094 #endif // V8_TARGET_ARCH_IA32 | 5082 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |