OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 221 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
232 // For named function expressions, declare the function name as a | 232 // For named function expressions, declare the function name as a |
233 // constant. | 233 // constant. |
234 if (scope()->is_function_scope() && scope()->function() != NULL) { | 234 if (scope()->is_function_scope() && scope()->function() != NULL) { |
235 EmitDeclaration(scope()->function(), Variable::CONST, NULL); | 235 EmitDeclaration(scope()->function(), Variable::CONST, NULL); |
236 } | 236 } |
237 VisitDeclarations(scope()->declarations()); | 237 VisitDeclarations(scope()->declarations()); |
238 } | 238 } |
239 | 239 |
240 { Comment cmnt(masm_, "[ Stack check"); | 240 { Comment cmnt(masm_, "[ Stack check"); |
241 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS); | 241 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS); |
242 NearLabel ok; | 242 Label ok; |
243 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); | 243 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
244 __ j(above_equal, &ok); | 244 __ j(above_equal, &ok, Label::kNear); |
245 StackCheckStub stub; | 245 StackCheckStub stub; |
246 __ CallStub(&stub); | 246 __ CallStub(&stub); |
247 __ bind(&ok); | 247 __ bind(&ok); |
248 } | 248 } |
249 | 249 |
250 { Comment cmnt(masm_, "[ Body"); | 250 { Comment cmnt(masm_, "[ Body"); |
251 ASSERT(loop_depth() == 0); | 251 ASSERT(loop_depth() == 0); |
252 VisitStatements(function()->body()); | 252 VisitStatements(function()->body()); |
253 ASSERT(loop_depth() == 0); | 253 ASSERT(loop_depth() == 0); |
254 } | 254 } |
255 } | 255 } |
256 | 256 |
257 // Always emit a 'return undefined' in case control fell off the end of | 257 // Always emit a 'return undefined' in case control fell off the end of |
258 // the body. | 258 // the body. |
259 { Comment cmnt(masm_, "[ return <undefined>;"); | 259 { Comment cmnt(masm_, "[ return <undefined>;"); |
260 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); | 260 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); |
261 EmitReturnSequence(); | 261 EmitReturnSequence(); |
262 } | 262 } |
263 } | 263 } |
264 | 264 |
265 | 265 |
266 void FullCodeGenerator::ClearAccumulator() { | 266 void FullCodeGenerator::ClearAccumulator() { |
267 __ Set(rax, 0); | 267 __ Set(rax, 0); |
268 } | 268 } |
269 | 269 |
270 | 270 |
271 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) { | 271 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) { |
272 Comment cmnt(masm_, "[ Stack check"); | 272 Comment cmnt(masm_, "[ Stack check"); |
273 NearLabel ok; | 273 Label ok; |
274 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); | 274 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
275 __ j(above_equal, &ok); | 275 __ j(above_equal, &ok, Label::kNear); |
276 StackCheckStub stub; | 276 StackCheckStub stub; |
277 __ CallStub(&stub); | 277 __ CallStub(&stub); |
278 // Record a mapping of this PC offset to the OSR id. This is used to find | 278 // Record a mapping of this PC offset to the OSR id. This is used to find |
279 // the AST id from the unoptimized code in order to use it as a key into | 279 // the AST id from the unoptimized code in order to use it as a key into |
280 // the deoptimization input data found in the optimized code. | 280 // the deoptimization input data found in the optimized code. |
281 RecordStackCheck(stmt->OsrEntryId()); | 281 RecordStackCheck(stmt->OsrEntryId()); |
282 | 282 |
283 // Loop stack checks can be patched to perform on-stack replacement. In | 283 // Loop stack checks can be patched to perform on-stack replacement. In |
284 // order to decide whether or not to perform OSR we embed the loop depth | 284 // order to decide whether or not to perform OSR we embed the loop depth |
285 // in a test instruction after the call so we can extract it from the OSR | 285 // in a test instruction after the call so we can extract it from the OSR |
(...skipping 192 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
478 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, | 478 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, |
479 Label* materialize_false) const { | 479 Label* materialize_false) const { |
480 ASSERT(materialize_true == materialize_false); | 480 ASSERT(materialize_true == materialize_false); |
481 __ bind(materialize_true); | 481 __ bind(materialize_true); |
482 } | 482 } |
483 | 483 |
484 | 484 |
485 void FullCodeGenerator::AccumulatorValueContext::Plug( | 485 void FullCodeGenerator::AccumulatorValueContext::Plug( |
486 Label* materialize_true, | 486 Label* materialize_true, |
487 Label* materialize_false) const { | 487 Label* materialize_false) const { |
488 NearLabel done; | 488 Label done; |
489 __ bind(materialize_true); | 489 __ bind(materialize_true); |
490 __ Move(result_register(), isolate()->factory()->true_value()); | 490 __ Move(result_register(), isolate()->factory()->true_value()); |
491 __ jmp(&done); | 491 __ jmp(&done, Label::kNear); |
492 __ bind(materialize_false); | 492 __ bind(materialize_false); |
493 __ Move(result_register(), isolate()->factory()->false_value()); | 493 __ Move(result_register(), isolate()->factory()->false_value()); |
494 __ bind(&done); | 494 __ bind(&done); |
495 } | 495 } |
496 | 496 |
497 | 497 |
498 void FullCodeGenerator::StackValueContext::Plug( | 498 void FullCodeGenerator::StackValueContext::Plug( |
499 Label* materialize_true, | 499 Label* materialize_true, |
500 Label* materialize_false) const { | 500 Label* materialize_false) const { |
501 NearLabel done; | 501 Label done; |
502 __ bind(materialize_true); | 502 __ bind(materialize_true); |
503 __ Push(isolate()->factory()->true_value()); | 503 __ Push(isolate()->factory()->true_value()); |
504 __ jmp(&done); | 504 __ jmp(&done, Label::kNear); |
505 __ bind(materialize_false); | 505 __ bind(materialize_false); |
506 __ Push(isolate()->factory()->false_value()); | 506 __ Push(isolate()->factory()->false_value()); |
507 __ bind(&done); | 507 __ bind(&done); |
508 } | 508 } |
509 | 509 |
510 | 510 |
511 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, | 511 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, |
512 Label* materialize_false) const { | 512 Label* materialize_false) const { |
513 ASSERT(materialize_true == true_label_); | 513 ASSERT(materialize_true == true_label_); |
514 ASSERT(materialize_false == false_label_); | 514 ASSERT(materialize_false == false_label_); |
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
631 | 631 |
632 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state, | 632 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state, |
633 bool should_normalize, | 633 bool should_normalize, |
634 Label* if_true, | 634 Label* if_true, |
635 Label* if_false) { | 635 Label* if_false) { |
636 // Only prepare for bailouts before splits if we're in a test | 636 // Only prepare for bailouts before splits if we're in a test |
637 // context. Otherwise, we let the Visit function deal with the | 637 // context. Otherwise, we let the Visit function deal with the |
638 // preparation to avoid preparing with the same AST id twice. | 638 // preparation to avoid preparing with the same AST id twice. |
639 if (!context()->IsTest() || !info_->IsOptimizable()) return; | 639 if (!context()->IsTest() || !info_->IsOptimizable()) return; |
640 | 640 |
641 NearLabel skip; | 641 Label skip; |
642 if (should_normalize) __ jmp(&skip); | 642 if (should_normalize) __ jmp(&skip, Label::kNear); |
643 | 643 |
644 ForwardBailoutStack* current = forward_bailout_stack_; | 644 ForwardBailoutStack* current = forward_bailout_stack_; |
645 while (current != NULL) { | 645 while (current != NULL) { |
646 PrepareForBailout(current->expr(), state); | 646 PrepareForBailout(current->expr(), state); |
647 current = current->parent(); | 647 current = current->parent(); |
648 } | 648 } |
649 | 649 |
650 if (should_normalize) { | 650 if (should_normalize) { |
651 __ CompareRoot(rax, Heap::kTrueValueRootIndex); | 651 __ CompareRoot(rax, Heap::kTrueValueRootIndex); |
652 Split(equal, if_true, if_false, NULL); | 652 Split(equal, if_true, if_false, NULL); |
(...skipping 258 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
911 __ cmpq(rdx, empty_descriptor_array_value); | 911 __ cmpq(rdx, empty_descriptor_array_value); |
912 __ j(equal, &call_runtime); | 912 __ j(equal, &call_runtime); |
913 | 913 |
914 // Check that there is an enum cache in the non-empty instance | 914 // Check that there is an enum cache in the non-empty instance |
915 // descriptors (rdx). This is the case if the next enumeration | 915 // descriptors (rdx). This is the case if the next enumeration |
916 // index field does not contain a smi. | 916 // index field does not contain a smi. |
917 __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumerationIndexOffset)); | 917 __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumerationIndexOffset)); |
918 __ JumpIfSmi(rdx, &call_runtime); | 918 __ JumpIfSmi(rdx, &call_runtime); |
919 | 919 |
920 // For all objects but the receiver, check that the cache is empty. | 920 // For all objects but the receiver, check that the cache is empty. |
921 NearLabel check_prototype; | 921 Label check_prototype; |
922 __ cmpq(rcx, rax); | 922 __ cmpq(rcx, rax); |
923 __ j(equal, &check_prototype); | 923 __ j(equal, &check_prototype, Label::kNear); |
924 __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumCacheBridgeCacheOffset)); | 924 __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumCacheBridgeCacheOffset)); |
925 __ cmpq(rdx, empty_fixed_array_value); | 925 __ cmpq(rdx, empty_fixed_array_value); |
926 __ j(not_equal, &call_runtime); | 926 __ j(not_equal, &call_runtime); |
927 | 927 |
928 // Load the prototype from the map and loop if non-null. | 928 // Load the prototype from the map and loop if non-null. |
929 __ bind(&check_prototype); | 929 __ bind(&check_prototype); |
930 __ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset)); | 930 __ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset)); |
931 __ cmpq(rcx, null_value); | 931 __ cmpq(rcx, null_value); |
932 __ j(not_equal, &next); | 932 __ j(not_equal, &next); |
933 | 933 |
934 // The enum cache is valid. Load the map of the object being | 934 // The enum cache is valid. Load the map of the object being |
935 // iterated over and use the cache for the iteration. | 935 // iterated over and use the cache for the iteration. |
936 NearLabel use_cache; | 936 Label use_cache; |
937 __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset)); | 937 __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset)); |
938 __ jmp(&use_cache); | 938 __ jmp(&use_cache, Label::kNear); |
939 | 939 |
940 // Get the set of properties to enumerate. | 940 // Get the set of properties to enumerate. |
941 __ bind(&call_runtime); | 941 __ bind(&call_runtime); |
942 __ push(rax); // Duplicate the enumerable object on the stack. | 942 __ push(rax); // Duplicate the enumerable object on the stack. |
943 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1); | 943 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1); |
944 | 944 |
945 // If we got a map from the runtime call, we can do a fast | 945 // If we got a map from the runtime call, we can do a fast |
946 // modification check. Otherwise, we got a fixed array, and we have | 946 // modification check. Otherwise, we got a fixed array, and we have |
947 // to do a slow check. | 947 // to do a slow check. |
948 NearLabel fixed_array; | 948 Label fixed_array; |
949 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), | 949 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), |
950 Heap::kMetaMapRootIndex); | 950 Heap::kMetaMapRootIndex); |
951 __ j(not_equal, &fixed_array); | 951 __ j(not_equal, &fixed_array, Label::kNear); |
952 | 952 |
953 // We got a map in register rax. Get the enumeration cache from it. | 953 // We got a map in register rax. Get the enumeration cache from it. |
954 __ bind(&use_cache); | 954 __ bind(&use_cache); |
955 __ movq(rcx, FieldOperand(rax, Map::kInstanceDescriptorsOffset)); | 955 __ movq(rcx, FieldOperand(rax, Map::kInstanceDescriptorsOffset)); |
956 __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumerationIndexOffset)); | 956 __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumerationIndexOffset)); |
957 __ movq(rdx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset)); | 957 __ movq(rdx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset)); |
958 | 958 |
959 // Setup the four remaining stack slots. | 959 // Setup the four remaining stack slots. |
960 __ push(rax); // Map. | 960 __ push(rax); // Map. |
961 __ push(rdx); // Enumeration cache. | 961 __ push(rdx); // Enumeration cache. |
(...skipping 23 matching lines...) Expand all Loading... |
985 index.reg, | 985 index.reg, |
986 index.scale, | 986 index.scale, |
987 FixedArray::kHeaderSize)); | 987 FixedArray::kHeaderSize)); |
988 | 988 |
989 // Get the expected map from the stack or a zero map in the | 989 // Get the expected map from the stack or a zero map in the |
990 // permanent slow case into register rdx. | 990 // permanent slow case into register rdx. |
991 __ movq(rdx, Operand(rsp, 3 * kPointerSize)); | 991 __ movq(rdx, Operand(rsp, 3 * kPointerSize)); |
992 | 992 |
993 // Check if the expected map still matches that of the enumerable. | 993 // Check if the expected map still matches that of the enumerable. |
994 // If not, we have to filter the key. | 994 // If not, we have to filter the key. |
995 NearLabel update_each; | 995 Label update_each; |
996 __ movq(rcx, Operand(rsp, 4 * kPointerSize)); | 996 __ movq(rcx, Operand(rsp, 4 * kPointerSize)); |
997 __ cmpq(rdx, FieldOperand(rcx, HeapObject::kMapOffset)); | 997 __ cmpq(rdx, FieldOperand(rcx, HeapObject::kMapOffset)); |
998 __ j(equal, &update_each); | 998 __ j(equal, &update_each, Label::kNear); |
999 | 999 |
1000 // Convert the entry to a string or null if it isn't a property | 1000 // Convert the entry to a string or null if it isn't a property |
1001 // anymore. If the property has been removed while iterating, we | 1001 // anymore. If the property has been removed while iterating, we |
1002 // just skip it. | 1002 // just skip it. |
1003 __ push(rcx); // Enumerable. | 1003 __ push(rcx); // Enumerable. |
1004 __ push(rbx); // Current entry. | 1004 __ push(rbx); // Current entry. |
1005 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); | 1005 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); |
1006 __ Cmp(rax, Smi::FromInt(0)); | 1006 __ Cmp(rax, Smi::FromInt(0)); |
1007 __ j(equal, loop_statement.continue_target()); | 1007 __ j(equal, loop_statement.continue_target()); |
1008 __ movq(rbx, rax); | 1008 __ movq(rbx, rax); |
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1096 // If no outer scope calls eval, we do not need to check more | 1096 // If no outer scope calls eval, we do not need to check more |
1097 // context extensions. If we have reached an eval scope, we check | 1097 // context extensions. If we have reached an eval scope, we check |
1098 // all extensions from this point. | 1098 // all extensions from this point. |
1099 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break; | 1099 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break; |
1100 s = s->outer_scope(); | 1100 s = s->outer_scope(); |
1101 } | 1101 } |
1102 | 1102 |
1103 if (s != NULL && s->is_eval_scope()) { | 1103 if (s != NULL && s->is_eval_scope()) { |
1104 // Loop up the context chain. There is no frame effect so it is | 1104 // Loop up the context chain. There is no frame effect so it is |
1105 // safe to use raw labels here. | 1105 // safe to use raw labels here. |
1106 NearLabel next, fast; | 1106 Label next, fast; |
1107 if (!context.is(temp)) { | 1107 if (!context.is(temp)) { |
1108 __ movq(temp, context); | 1108 __ movq(temp, context); |
1109 } | 1109 } |
1110 // Load map for comparison into register, outside loop. | 1110 // Load map for comparison into register, outside loop. |
1111 __ LoadRoot(kScratchRegister, Heap::kGlobalContextMapRootIndex); | 1111 __ LoadRoot(kScratchRegister, Heap::kGlobalContextMapRootIndex); |
1112 __ bind(&next); | 1112 __ bind(&next); |
1113 // Terminate at global context. | 1113 // Terminate at global context. |
1114 __ cmpq(kScratchRegister, FieldOperand(temp, HeapObject::kMapOffset)); | 1114 __ cmpq(kScratchRegister, FieldOperand(temp, HeapObject::kMapOffset)); |
1115 __ j(equal, &fast); | 1115 __ j(equal, &fast, Label::kNear); |
1116 // Check that extension is NULL. | 1116 // Check that extension is NULL. |
1117 __ cmpq(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0)); | 1117 __ cmpq(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0)); |
1118 __ j(not_equal, slow); | 1118 __ j(not_equal, slow); |
1119 // Load next context in chain. | 1119 // Load next context in chain. |
1120 __ movq(temp, ContextOperand(temp, Context::CLOSURE_INDEX)); | 1120 __ movq(temp, ContextOperand(temp, Context::CLOSURE_INDEX)); |
1121 __ movq(temp, FieldOperand(temp, JSFunction::kContextOffset)); | 1121 __ movq(temp, FieldOperand(temp, JSFunction::kContextOffset)); |
1122 __ jmp(&next); | 1122 __ jmp(&next); |
1123 __ bind(&fast); | 1123 __ bind(&fast); |
1124 } | 1124 } |
1125 | 1125 |
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1254 | 1254 |
1255 context()->Plug(rax); | 1255 context()->Plug(rax); |
1256 | 1256 |
1257 } else if (slot != NULL) { | 1257 } else if (slot != NULL) { |
1258 Comment cmnt(masm_, (slot->type() == Slot::CONTEXT) | 1258 Comment cmnt(masm_, (slot->type() == Slot::CONTEXT) |
1259 ? "Context slot" | 1259 ? "Context slot" |
1260 : "Stack slot"); | 1260 : "Stack slot"); |
1261 if (var->mode() == Variable::CONST) { | 1261 if (var->mode() == Variable::CONST) { |
1262 // Constants may be the hole value if they have not been initialized. | 1262 // Constants may be the hole value if they have not been initialized. |
1263 // Unhole them. | 1263 // Unhole them. |
1264 NearLabel done; | 1264 Label done; |
1265 MemOperand slot_operand = EmitSlotSearch(slot, rax); | 1265 MemOperand slot_operand = EmitSlotSearch(slot, rax); |
1266 __ movq(rax, slot_operand); | 1266 __ movq(rax, slot_operand); |
1267 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); | 1267 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); |
1268 __ j(not_equal, &done); | 1268 __ j(not_equal, &done, Label::kNear); |
1269 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); | 1269 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); |
1270 __ bind(&done); | 1270 __ bind(&done); |
1271 context()->Plug(rax); | 1271 context()->Plug(rax); |
1272 } else { | 1272 } else { |
1273 context()->Plug(slot); | 1273 context()->Plug(slot); |
1274 } | 1274 } |
1275 | 1275 |
1276 } else { | 1276 } else { |
1277 Comment cmnt(masm_, "Rewritten parameter"); | 1277 Comment cmnt(masm_, "Rewritten parameter"); |
1278 ASSERT_NOT_NULL(property); | 1278 ASSERT_NOT_NULL(property); |
(...skipping 385 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1664 | 1664 |
1665 | 1665 |
1666 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, | 1666 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, |
1667 Token::Value op, | 1667 Token::Value op, |
1668 OverwriteMode mode, | 1668 OverwriteMode mode, |
1669 Expression* left, | 1669 Expression* left, |
1670 Expression* right) { | 1670 Expression* right) { |
1671 // Do combined smi check of the operands. Left operand is on the | 1671 // Do combined smi check of the operands. Left operand is on the |
1672 // stack (popped into rdx). Right operand is in rax but moved into | 1672 // stack (popped into rdx). Right operand is in rax but moved into |
1673 // rcx to make the shifts easier. | 1673 // rcx to make the shifts easier. |
1674 NearLabel done, stub_call, smi_case; | 1674 NearLabel stub_call, smi_case; |
| 1675 Label done; |
1675 __ pop(rdx); | 1676 __ pop(rdx); |
1676 __ movq(rcx, rax); | 1677 __ movq(rcx, rax); |
1677 __ or_(rax, rdx); | 1678 __ or_(rax, rdx); |
1678 JumpPatchSite patch_site(masm_); | 1679 JumpPatchSite patch_site(masm_); |
1679 patch_site.EmitJumpIfSmi(rax, &smi_case); | 1680 patch_site.EmitJumpIfSmi(rax, &smi_case); |
1680 | 1681 |
1681 __ bind(&stub_call); | 1682 __ bind(&stub_call); |
1682 __ movq(rax, rcx); | 1683 __ movq(rax, rcx); |
1683 TypeRecordingBinaryOpStub stub(op, mode); | 1684 TypeRecordingBinaryOpStub stub(op, mode); |
1684 EmitCallIC(stub.GetCode(), &patch_site, expr->id()); | 1685 EmitCallIC(stub.GetCode(), &patch_site, expr->id()); |
1685 __ jmp(&done); | 1686 __ jmp(&done, Label::kNear); |
1686 | 1687 |
1687 __ bind(&smi_case); | 1688 __ bind(&smi_case); |
1688 switch (op) { | 1689 switch (op) { |
1689 case Token::SAR: | 1690 case Token::SAR: |
1690 __ SmiShiftArithmeticRight(rax, rdx, rcx); | 1691 __ SmiShiftArithmeticRight(rax, rdx, rcx); |
1691 break; | 1692 break; |
1692 case Token::SHL: | 1693 case Token::SHL: |
1693 __ SmiShiftLeft(rax, rdx, rcx); | 1694 __ SmiShiftLeft(rax, rdx, rcx); |
1694 break; | 1695 break; |
1695 case Token::SHR: | 1696 case Token::SHR: |
(...skipping 494 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2190 __ push(context_register()); | 2191 __ push(context_register()); |
2191 __ Push(var->name()); | 2192 __ Push(var->name()); |
2192 __ CallRuntime(Runtime::kLoadContextSlot, 2); | 2193 __ CallRuntime(Runtime::kLoadContextSlot, 2); |
2193 __ push(rax); // Function. | 2194 __ push(rax); // Function. |
2194 __ push(rdx); // Receiver. | 2195 __ push(rdx); // Receiver. |
2195 | 2196 |
2196 // If fast case code has been generated, emit code to push the | 2197 // If fast case code has been generated, emit code to push the |
2197 // function and receiver and have the slow path jump around this | 2198 // function and receiver and have the slow path jump around this |
2198 // code. | 2199 // code. |
2199 if (done.is_linked()) { | 2200 if (done.is_linked()) { |
2200 NearLabel call; | 2201 Label call; |
2201 __ jmp(&call); | 2202 __ jmp(&call, Label::kNear); |
2202 __ bind(&done); | 2203 __ bind(&done); |
2203 // Push function. | 2204 // Push function. |
2204 __ push(rax); | 2205 __ push(rax); |
2205 // Push global receiver. | 2206 // Push global receiver. |
2206 __ movq(rbx, GlobalObjectOperand()); | 2207 __ movq(rbx, GlobalObjectOperand()); |
2207 __ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset)); | 2208 __ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset)); |
2208 __ bind(&call); | 2209 __ bind(&call); |
2209 } | 2210 } |
2210 | 2211 |
2211 // The receiver is either the global receiver or a JSObject found by | 2212 // The receiver is either the global receiver or a JSObject found by |
(...skipping 420 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2632 __ Move(rax, Smi::FromInt(scope()->num_parameters())); | 2633 __ Move(rax, Smi::FromInt(scope()->num_parameters())); |
2633 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); | 2634 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); |
2634 __ CallStub(&stub); | 2635 __ CallStub(&stub); |
2635 context()->Plug(rax); | 2636 context()->Plug(rax); |
2636 } | 2637 } |
2637 | 2638 |
2638 | 2639 |
2639 void FullCodeGenerator::EmitArgumentsLength(ZoneList<Expression*>* args) { | 2640 void FullCodeGenerator::EmitArgumentsLength(ZoneList<Expression*>* args) { |
2640 ASSERT(args->length() == 0); | 2641 ASSERT(args->length() == 0); |
2641 | 2642 |
2642 NearLabel exit; | 2643 Label exit; |
2643 // Get the number of formal parameters. | 2644 // Get the number of formal parameters. |
2644 __ Move(rax, Smi::FromInt(scope()->num_parameters())); | 2645 __ Move(rax, Smi::FromInt(scope()->num_parameters())); |
2645 | 2646 |
2646 // Check if the calling frame is an arguments adaptor frame. | 2647 // Check if the calling frame is an arguments adaptor frame. |
2647 __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 2648 __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
2648 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset), | 2649 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset), |
2649 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | 2650 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
2650 __ j(not_equal, &exit); | 2651 __ j(not_equal, &exit, Label::kNear); |
2651 | 2652 |
2652 // Arguments adaptor case: Read the arguments length from the | 2653 // Arguments adaptor case: Read the arguments length from the |
2653 // adaptor frame. | 2654 // adaptor frame. |
2654 __ movq(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 2655 __ movq(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
2655 | 2656 |
2656 __ bind(&exit); | 2657 __ bind(&exit); |
2657 if (FLAG_debug_code) __ AbortIfNotSmi(rax); | 2658 if (FLAG_debug_code) __ AbortIfNotSmi(rax); |
2658 context()->Plug(rax); | 2659 context()->Plug(rax); |
2659 } | 2660 } |
2660 | 2661 |
(...skipping 515 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3176 Register cache = rbx; | 3177 Register cache = rbx; |
3177 Register tmp = rcx; | 3178 Register tmp = rcx; |
3178 __ movq(cache, ContextOperand(rsi, Context::GLOBAL_INDEX)); | 3179 __ movq(cache, ContextOperand(rsi, Context::GLOBAL_INDEX)); |
3179 __ movq(cache, | 3180 __ movq(cache, |
3180 FieldOperand(cache, GlobalObject::kGlobalContextOffset)); | 3181 FieldOperand(cache, GlobalObject::kGlobalContextOffset)); |
3181 __ movq(cache, | 3182 __ movq(cache, |
3182 ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); | 3183 ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); |
3183 __ movq(cache, | 3184 __ movq(cache, |
3184 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); | 3185 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); |
3185 | 3186 |
3186 NearLabel done, not_found; | 3187 Label done, not_found; |
3187 // tmp now holds finger offset as a smi. | 3188 // tmp now holds finger offset as a smi. |
3188 ASSERT(kSmiTag == 0 && kSmiTagSize == 1); | 3189 ASSERT(kSmiTag == 0 && kSmiTagSize == 1); |
3189 __ movq(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset)); | 3190 __ movq(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset)); |
3190 SmiIndex index = | 3191 SmiIndex index = |
3191 __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2); | 3192 __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2); |
3192 __ cmpq(key, FieldOperand(cache, | 3193 __ cmpq(key, FieldOperand(cache, |
3193 index.reg, | 3194 index.reg, |
3194 index.scale, | 3195 index.scale, |
3195 FixedArray::kHeaderSize)); | 3196 FixedArray::kHeaderSize)); |
3196 __ j(not_equal, ¬_found); | 3197 __ j(not_equal, ¬_found, Label::kNear); |
3197 __ movq(rax, FieldOperand(cache, | 3198 __ movq(rax, FieldOperand(cache, |
3198 index.reg, | 3199 index.reg, |
3199 index.scale, | 3200 index.scale, |
3200 FixedArray::kHeaderSize + kPointerSize)); | 3201 FixedArray::kHeaderSize + kPointerSize)); |
3201 __ jmp(&done); | 3202 __ jmp(&done, Label::kNear); |
3202 | 3203 |
3203 __ bind(¬_found); | 3204 __ bind(¬_found); |
3204 // Call runtime to perform the lookup. | 3205 // Call runtime to perform the lookup. |
3205 __ push(cache); | 3206 __ push(cache); |
3206 __ push(key); | 3207 __ push(key); |
3207 __ CallRuntime(Runtime::kGetFromCache, 2); | 3208 __ CallRuntime(Runtime::kGetFromCache, 2); |
3208 | 3209 |
3209 __ bind(&done); | 3210 __ bind(&done); |
3210 context()->Plug(rax); | 3211 context()->Plug(rax); |
3211 } | 3212 } |
3212 | 3213 |
3213 | 3214 |
3214 void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) { | 3215 void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) { |
3215 ASSERT_EQ(2, args->length()); | 3216 ASSERT_EQ(2, args->length()); |
3216 | 3217 |
3217 Register right = rax; | 3218 Register right = rax; |
3218 Register left = rbx; | 3219 Register left = rbx; |
3219 Register tmp = rcx; | 3220 Register tmp = rcx; |
3220 | 3221 |
3221 VisitForStackValue(args->at(0)); | 3222 VisitForStackValue(args->at(0)); |
3222 VisitForAccumulatorValue(args->at(1)); | 3223 VisitForAccumulatorValue(args->at(1)); |
3223 __ pop(left); | 3224 __ pop(left); |
3224 | 3225 |
3225 NearLabel done, fail, ok; | 3226 Label done, fail, ok; |
3226 __ cmpq(left, right); | 3227 __ cmpq(left, right); |
3227 __ j(equal, &ok); | 3228 __ j(equal, &ok, Label::kNear); |
3228 // Fail if either is a non-HeapObject. | 3229 // Fail if either is a non-HeapObject. |
3229 Condition either_smi = masm()->CheckEitherSmi(left, right, tmp); | 3230 Condition either_smi = masm()->CheckEitherSmi(left, right, tmp); |
3230 __ j(either_smi, &fail); | 3231 __ j(either_smi, &fail, Label::kNear); |
3231 __ j(zero, &fail); | 3232 __ j(zero, &fail, Label::kNear); |
3232 __ movq(tmp, FieldOperand(left, HeapObject::kMapOffset)); | 3233 __ movq(tmp, FieldOperand(left, HeapObject::kMapOffset)); |
3233 __ cmpb(FieldOperand(tmp, Map::kInstanceTypeOffset), | 3234 __ cmpb(FieldOperand(tmp, Map::kInstanceTypeOffset), |
3234 Immediate(JS_REGEXP_TYPE)); | 3235 Immediate(JS_REGEXP_TYPE)); |
3235 __ j(not_equal, &fail); | 3236 __ j(not_equal, &fail, Label::kNear); |
3236 __ cmpq(tmp, FieldOperand(right, HeapObject::kMapOffset)); | 3237 __ cmpq(tmp, FieldOperand(right, HeapObject::kMapOffset)); |
3237 __ j(not_equal, &fail); | 3238 __ j(not_equal, &fail, Label::kNear); |
3238 __ movq(tmp, FieldOperand(left, JSRegExp::kDataOffset)); | 3239 __ movq(tmp, FieldOperand(left, JSRegExp::kDataOffset)); |
3239 __ cmpq(tmp, FieldOperand(right, JSRegExp::kDataOffset)); | 3240 __ cmpq(tmp, FieldOperand(right, JSRegExp::kDataOffset)); |
3240 __ j(equal, &ok); | 3241 __ j(equal, &ok, Label::kNear); |
3241 __ bind(&fail); | 3242 __ bind(&fail); |
3242 __ Move(rax, isolate()->factory()->false_value()); | 3243 __ Move(rax, isolate()->factory()->false_value()); |
3243 __ jmp(&done); | 3244 __ jmp(&done, Label::kNear); |
3244 __ bind(&ok); | 3245 __ bind(&ok); |
3245 __ Move(rax, isolate()->factory()->true_value()); | 3246 __ Move(rax, isolate()->factory()->true_value()); |
3246 __ bind(&done); | 3247 __ bind(&done); |
3247 | 3248 |
3248 context()->Plug(rax); | 3249 context()->Plug(rax); |
3249 } | 3250 } |
3250 | 3251 |
3251 | 3252 |
3252 void FullCodeGenerator::EmitHasCachedArrayIndex(ZoneList<Expression*>* args) { | 3253 void FullCodeGenerator::EmitHasCachedArrayIndex(ZoneList<Expression*>* args) { |
3253 ASSERT(args->length() == 1); | 3254 ASSERT(args->length() == 1); |
(...skipping 547 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3801 | 3802 |
3802 // We need a second deoptimization point after loading the value | 3803 // We need a second deoptimization point after loading the value |
3803 // in case evaluating the property load my have a side effect. | 3804 // in case evaluating the property load my have a side effect. |
3804 if (assign_type == VARIABLE) { | 3805 if (assign_type == VARIABLE) { |
3805 PrepareForBailout(expr->expression(), TOS_REG); | 3806 PrepareForBailout(expr->expression(), TOS_REG); |
3806 } else { | 3807 } else { |
3807 PrepareForBailoutForId(expr->CountId(), TOS_REG); | 3808 PrepareForBailoutForId(expr->CountId(), TOS_REG); |
3808 } | 3809 } |
3809 | 3810 |
3810 // Call ToNumber only if operand is not a smi. | 3811 // Call ToNumber only if operand is not a smi. |
3811 NearLabel no_conversion; | 3812 Label no_conversion; |
3812 Condition is_smi; | 3813 Condition is_smi; |
3813 is_smi = masm_->CheckSmi(rax); | 3814 is_smi = masm_->CheckSmi(rax); |
3814 __ j(is_smi, &no_conversion); | 3815 __ j(is_smi, &no_conversion, Label::kNear); |
3815 ToNumberStub convert_stub; | 3816 ToNumberStub convert_stub; |
3816 __ CallStub(&convert_stub); | 3817 __ CallStub(&convert_stub); |
3817 __ bind(&no_conversion); | 3818 __ bind(&no_conversion); |
3818 | 3819 |
3819 // Save result for postfix expressions. | 3820 // Save result for postfix expressions. |
3820 if (expr->is_postfix()) { | 3821 if (expr->is_postfix()) { |
3821 if (!context()->IsEffect()) { | 3822 if (!context()->IsEffect()) { |
3822 // Save the result on the stack. If we have a named or keyed property | 3823 // Save the result on the stack. If we have a named or keyed property |
3823 // we store the result under the receiver that is currently on top | 3824 // we store the result under the receiver that is currently on top |
3824 // of the stack. | 3825 // of the stack. |
3825 switch (assign_type) { | 3826 switch (assign_type) { |
3826 case VARIABLE: | 3827 case VARIABLE: |
3827 __ push(rax); | 3828 __ push(rax); |
3828 break; | 3829 break; |
3829 case NAMED_PROPERTY: | 3830 case NAMED_PROPERTY: |
3830 __ movq(Operand(rsp, kPointerSize), rax); | 3831 __ movq(Operand(rsp, kPointerSize), rax); |
3831 break; | 3832 break; |
3832 case KEYED_PROPERTY: | 3833 case KEYED_PROPERTY: |
3833 __ movq(Operand(rsp, 2 * kPointerSize), rax); | 3834 __ movq(Operand(rsp, 2 * kPointerSize), rax); |
3834 break; | 3835 break; |
3835 } | 3836 } |
3836 } | 3837 } |
3837 } | 3838 } |
3838 | 3839 |
3839 // Inline smi case if we are in a loop. | 3840 // Inline smi case if we are in a loop. |
3840 NearLabel stub_call, done; | 3841 NearLabel done; |
| 3842 Label stub_call; |
3841 JumpPatchSite patch_site(masm_); | 3843 JumpPatchSite patch_site(masm_); |
3842 | 3844 |
3843 if (ShouldInlineSmiCase(expr->op())) { | 3845 if (ShouldInlineSmiCase(expr->op())) { |
3844 if (expr->op() == Token::INC) { | 3846 if (expr->op() == Token::INC) { |
3845 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); | 3847 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); |
3846 } else { | 3848 } else { |
3847 __ SmiSubConstant(rax, rax, Smi::FromInt(1)); | 3849 __ SmiSubConstant(rax, rax, Smi::FromInt(1)); |
3848 } | 3850 } |
3849 __ j(overflow, &stub_call); | 3851 __ j(overflow, &stub_call, Label::kNear); |
3850 // We could eliminate this smi check if we split the code at | 3852 // We could eliminate this smi check if we split the code at |
3851 // the first smi check before calling ToNumber. | 3853 // the first smi check before calling ToNumber. |
3852 patch_site.EmitJumpIfSmi(rax, &done); | 3854 patch_site.EmitJumpIfSmi(rax, &done); |
3853 | 3855 |
3854 __ bind(&stub_call); | 3856 __ bind(&stub_call); |
3855 // Call stub. Undo operation first. | 3857 // Call stub. Undo operation first. |
3856 if (expr->op() == Token::INC) { | 3858 if (expr->op() == Token::INC) { |
3857 __ SmiSubConstant(rax, rax, Smi::FromInt(1)); | 3859 __ SmiSubConstant(rax, rax, Smi::FromInt(1)); |
3858 } else { | 3860 } else { |
3859 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); | 3861 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); |
(...skipping 443 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4303 __ ret(0); | 4305 __ ret(0); |
4304 } | 4306 } |
4305 | 4307 |
4306 | 4308 |
4307 #undef __ | 4309 #undef __ |
4308 | 4310 |
4309 | 4311 |
4310 } } // namespace v8::internal | 4312 } } // namespace v8::internal |
4311 | 4313 |
4312 #endif // V8_TARGET_ARCH_X64 | 4314 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |