Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(343)

Side by Side Diff: src/ia32/full-codegen-ia32.cc

Issue 6928060: Merge Label and NearLabel (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: address comments Created 9 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/code-stubs-ia32.cc ('k') | src/ia32/ic-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 219 matching lines...) Expand 10 before | Expand all | Expand 10 after
230 // For named function expressions, declare the function name as a 230 // For named function expressions, declare the function name as a
231 // constant. 231 // constant.
232 if (scope()->is_function_scope() && scope()->function() != NULL) { 232 if (scope()->is_function_scope() && scope()->function() != NULL) {
233 EmitDeclaration(scope()->function(), Variable::CONST, NULL); 233 EmitDeclaration(scope()->function(), Variable::CONST, NULL);
234 } 234 }
235 VisitDeclarations(scope()->declarations()); 235 VisitDeclarations(scope()->declarations());
236 } 236 }
237 237
238 { Comment cmnt(masm_, "[ Stack check"); 238 { Comment cmnt(masm_, "[ Stack check");
239 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS); 239 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
240 NearLabel ok; 240 Label ok;
241 ExternalReference stack_limit = 241 ExternalReference stack_limit =
242 ExternalReference::address_of_stack_limit(isolate()); 242 ExternalReference::address_of_stack_limit(isolate());
243 __ cmp(esp, Operand::StaticVariable(stack_limit)); 243 __ cmp(esp, Operand::StaticVariable(stack_limit));
244 __ j(above_equal, &ok, taken); 244 __ j(above_equal, &ok, taken, Label::kNear);
245 StackCheckStub stub; 245 StackCheckStub stub;
246 __ CallStub(&stub); 246 __ CallStub(&stub);
247 __ bind(&ok); 247 __ bind(&ok);
248 } 248 }
249 249
250 { Comment cmnt(masm_, "[ Body"); 250 { Comment cmnt(masm_, "[ Body");
251 ASSERT(loop_depth() == 0); 251 ASSERT(loop_depth() == 0);
252 VisitStatements(function()->body()); 252 VisitStatements(function()->body());
253 ASSERT(loop_depth() == 0); 253 ASSERT(loop_depth() == 0);
254 } 254 }
255 } 255 }
256 256
257 // Always emit a 'return undefined' in case control fell off the end of 257 // Always emit a 'return undefined' in case control fell off the end of
258 // the body. 258 // the body.
259 { Comment cmnt(masm_, "[ return <undefined>;"); 259 { Comment cmnt(masm_, "[ return <undefined>;");
260 __ mov(eax, isolate()->factory()->undefined_value()); 260 __ mov(eax, isolate()->factory()->undefined_value());
261 EmitReturnSequence(); 261 EmitReturnSequence();
262 } 262 }
263 } 263 }
264 264
265 265
266 void FullCodeGenerator::ClearAccumulator() { 266 void FullCodeGenerator::ClearAccumulator() {
267 __ Set(eax, Immediate(Smi::FromInt(0))); 267 __ Set(eax, Immediate(Smi::FromInt(0)));
268 } 268 }
269 269
270 270
271 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) { 271 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
272 Comment cmnt(masm_, "[ Stack check"); 272 Comment cmnt(masm_, "[ Stack check");
273 NearLabel ok; 273 Label ok;
274 ExternalReference stack_limit = 274 ExternalReference stack_limit =
275 ExternalReference::address_of_stack_limit(isolate()); 275 ExternalReference::address_of_stack_limit(isolate());
276 __ cmp(esp, Operand::StaticVariable(stack_limit)); 276 __ cmp(esp, Operand::StaticVariable(stack_limit));
277 __ j(above_equal, &ok, taken); 277 __ j(above_equal, &ok, taken, Label::kNear);
278 StackCheckStub stub; 278 StackCheckStub stub;
279 __ CallStub(&stub); 279 __ CallStub(&stub);
280 // Record a mapping of this PC offset to the OSR id. This is used to find 280 // Record a mapping of this PC offset to the OSR id. This is used to find
281 // the AST id from the unoptimized code in order to use it as a key into 281 // the AST id from the unoptimized code in order to use it as a key into
282 // the deoptimization input data found in the optimized code. 282 // the deoptimization input data found in the optimized code.
283 RecordStackCheck(stmt->OsrEntryId()); 283 RecordStackCheck(stmt->OsrEntryId());
284 284
285 // Loop stack checks can be patched to perform on-stack replacement. In 285 // Loop stack checks can be patched to perform on-stack replacement. In
286 // order to decide whether or not to perform OSR we embed the loop depth 286 // order to decide whether or not to perform OSR we embed the loop depth
287 // in a test instruction after the call so we can extract it from the OSR 287 // in a test instruction after the call so we can extract it from the OSR
(...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after
464 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, 464 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
465 Label* materialize_false) const { 465 Label* materialize_false) const {
466 ASSERT(materialize_true == materialize_false); 466 ASSERT(materialize_true == materialize_false);
467 __ bind(materialize_true); 467 __ bind(materialize_true);
468 } 468 }
469 469
470 470
471 void FullCodeGenerator::AccumulatorValueContext::Plug( 471 void FullCodeGenerator::AccumulatorValueContext::Plug(
472 Label* materialize_true, 472 Label* materialize_true,
473 Label* materialize_false) const { 473 Label* materialize_false) const {
474 NearLabel done; 474 Label done;
475 __ bind(materialize_true); 475 __ bind(materialize_true);
476 __ mov(result_register(), isolate()->factory()->true_value()); 476 __ mov(result_register(), isolate()->factory()->true_value());
477 __ jmp(&done); 477 __ jmp(&done, Label::kNear);
478 __ bind(materialize_false); 478 __ bind(materialize_false);
479 __ mov(result_register(), isolate()->factory()->false_value()); 479 __ mov(result_register(), isolate()->factory()->false_value());
480 __ bind(&done); 480 __ bind(&done);
481 } 481 }
482 482
483 483
484 void FullCodeGenerator::StackValueContext::Plug( 484 void FullCodeGenerator::StackValueContext::Plug(
485 Label* materialize_true, 485 Label* materialize_true,
486 Label* materialize_false) const { 486 Label* materialize_false) const {
487 NearLabel done; 487 Label done;
488 __ bind(materialize_true); 488 __ bind(materialize_true);
489 __ push(Immediate(isolate()->factory()->true_value())); 489 __ push(Immediate(isolate()->factory()->true_value()));
490 __ jmp(&done); 490 __ jmp(&done, Label::kNear);
491 __ bind(materialize_false); 491 __ bind(materialize_false);
492 __ push(Immediate(isolate()->factory()->false_value())); 492 __ push(Immediate(isolate()->factory()->false_value()));
493 __ bind(&done); 493 __ bind(&done);
494 } 494 }
495 495
496 496
497 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, 497 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
498 Label* materialize_false) const { 498 Label* materialize_false) const {
499 ASSERT(materialize_true == true_label_); 499 ASSERT(materialize_true == true_label_);
500 ASSERT(materialize_false == false_label_); 500 ASSERT(materialize_false == false_label_);
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after
620 620
621 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state, 621 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state,
622 bool should_normalize, 622 bool should_normalize,
623 Label* if_true, 623 Label* if_true,
624 Label* if_false) { 624 Label* if_false) {
625 // Only prepare for bailouts before splits if we're in a test 625 // Only prepare for bailouts before splits if we're in a test
626 // context. Otherwise, we let the Visit function deal with the 626 // context. Otherwise, we let the Visit function deal with the
627 // preparation to avoid preparing with the same AST id twice. 627 // preparation to avoid preparing with the same AST id twice.
628 if (!context()->IsTest() || !info_->IsOptimizable()) return; 628 if (!context()->IsTest() || !info_->IsOptimizable()) return;
629 629
630 NearLabel skip; 630 Label skip;
631 if (should_normalize) __ jmp(&skip); 631 if (should_normalize) __ jmp(&skip, Label::kNear);
632 632
633 ForwardBailoutStack* current = forward_bailout_stack_; 633 ForwardBailoutStack* current = forward_bailout_stack_;
634 while (current != NULL) { 634 while (current != NULL) {
635 PrepareForBailout(current->expr(), state); 635 PrepareForBailout(current->expr(), state);
636 current = current->parent(); 636 current = current->parent();
637 } 637 }
638 638
639 if (should_normalize) { 639 if (should_normalize) {
640 __ cmp(eax, isolate()->factory()->true_value()); 640 __ cmp(eax, isolate()->factory()->true_value());
641 Split(equal, if_true, if_false, NULL); 641 Split(equal, if_true, if_false, NULL);
(...skipping 213 matching lines...) Expand 10 before | Expand all | Expand 10 after
855 // Get the object to enumerate over. Both SpiderMonkey and JSC 855 // Get the object to enumerate over. Both SpiderMonkey and JSC
856 // ignore null and undefined in contrast to the specification; see 856 // ignore null and undefined in contrast to the specification; see
857 // ECMA-262 section 12.6.4. 857 // ECMA-262 section 12.6.4.
858 VisitForAccumulatorValue(stmt->enumerable()); 858 VisitForAccumulatorValue(stmt->enumerable());
859 __ cmp(eax, isolate()->factory()->undefined_value()); 859 __ cmp(eax, isolate()->factory()->undefined_value());
860 __ j(equal, &exit); 860 __ j(equal, &exit);
861 __ cmp(eax, isolate()->factory()->null_value()); 861 __ cmp(eax, isolate()->factory()->null_value());
862 __ j(equal, &exit); 862 __ j(equal, &exit);
863 863
864 // Convert the object to a JS object. 864 // Convert the object to a JS object.
865 NearLabel convert, done_convert; 865 Label convert, done_convert;
866 __ test(eax, Immediate(kSmiTagMask)); 866 __ test(eax, Immediate(kSmiTagMask));
867 __ j(zero, &convert); 867 __ j(zero, &convert, Label::kNear);
868 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx); 868 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
869 __ j(above_equal, &done_convert); 869 __ j(above_equal, &done_convert, Label::kNear);
870 __ bind(&convert); 870 __ bind(&convert);
871 __ push(eax); 871 __ push(eax);
872 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 872 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
873 __ bind(&done_convert); 873 __ bind(&done_convert);
874 __ push(eax); 874 __ push(eax);
875 875
876 // Check cache validity in generated code. This is a fast case for 876 // Check cache validity in generated code. This is a fast case for
877 // the JSObject::IsSimpleEnum cache validity checks. If we cannot 877 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
878 // guarantee cache validity, call the runtime system to check cache 878 // guarantee cache validity, call the runtime system to check cache
879 // validity or get the property names in a fixed array. 879 // validity or get the property names in a fixed array.
(...skipping 16 matching lines...) Expand all
896 __ j(equal, &call_runtime); 896 __ j(equal, &call_runtime);
897 897
898 // Check that there is an enum cache in the non-empty instance 898 // Check that there is an enum cache in the non-empty instance
899 // descriptors (edx). This is the case if the next enumeration 899 // descriptors (edx). This is the case if the next enumeration
900 // index field does not contain a smi. 900 // index field does not contain a smi.
901 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumerationIndexOffset)); 901 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumerationIndexOffset));
902 __ test(edx, Immediate(kSmiTagMask)); 902 __ test(edx, Immediate(kSmiTagMask));
903 __ j(zero, &call_runtime); 903 __ j(zero, &call_runtime);
904 904
905 // For all objects but the receiver, check that the cache is empty. 905 // For all objects but the receiver, check that the cache is empty.
906 NearLabel check_prototype; 906 Label check_prototype;
907 __ cmp(ecx, Operand(eax)); 907 __ cmp(ecx, Operand(eax));
908 __ j(equal, &check_prototype); 908 __ j(equal, &check_prototype, Label::kNear);
909 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset)); 909 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset));
910 __ cmp(edx, isolate()->factory()->empty_fixed_array()); 910 __ cmp(edx, isolate()->factory()->empty_fixed_array());
911 __ j(not_equal, &call_runtime); 911 __ j(not_equal, &call_runtime);
912 912
913 // Load the prototype from the map and loop if non-null. 913 // Load the prototype from the map and loop if non-null.
914 __ bind(&check_prototype); 914 __ bind(&check_prototype);
915 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset)); 915 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
916 __ cmp(ecx, isolate()->factory()->null_value()); 916 __ cmp(ecx, isolate()->factory()->null_value());
917 __ j(not_equal, &next); 917 __ j(not_equal, &next);
918 918
919 // The enum cache is valid. Load the map of the object being 919 // The enum cache is valid. Load the map of the object being
920 // iterated over and use the cache for the iteration. 920 // iterated over and use the cache for the iteration.
921 NearLabel use_cache; 921 Label use_cache;
922 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset)); 922 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
923 __ jmp(&use_cache); 923 __ jmp(&use_cache, Label::kNear);
924 924
925 // Get the set of properties to enumerate. 925 // Get the set of properties to enumerate.
926 __ bind(&call_runtime); 926 __ bind(&call_runtime);
927 __ push(eax); // Duplicate the enumerable object on the stack. 927 __ push(eax); // Duplicate the enumerable object on the stack.
928 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1); 928 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
929 929
930 // If we got a map from the runtime call, we can do a fast 930 // If we got a map from the runtime call, we can do a fast
931 // modification check. Otherwise, we got a fixed array, and we have 931 // modification check. Otherwise, we got a fixed array, and we have
932 // to do a slow check. 932 // to do a slow check.
933 NearLabel fixed_array; 933 Label fixed_array;
934 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), 934 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
935 isolate()->factory()->meta_map()); 935 isolate()->factory()->meta_map());
936 __ j(not_equal, &fixed_array); 936 __ j(not_equal, &fixed_array, Label::kNear);
937 937
938 // We got a map in register eax. Get the enumeration cache from it. 938 // We got a map in register eax. Get the enumeration cache from it.
939 __ bind(&use_cache); 939 __ bind(&use_cache);
940 __ mov(ecx, FieldOperand(eax, Map::kInstanceDescriptorsOffset)); 940 __ mov(ecx, FieldOperand(eax, Map::kInstanceDescriptorsOffset));
941 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumerationIndexOffset)); 941 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumerationIndexOffset));
942 __ mov(edx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset)); 942 __ mov(edx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
943 943
944 // Setup the four remaining stack slots. 944 // Setup the four remaining stack slots.
945 __ push(eax); // Map. 945 __ push(eax); // Map.
946 __ push(edx); // Enumeration cache. 946 __ push(edx); // Enumeration cache.
(...skipping 19 matching lines...) Expand all
966 // Get the current entry of the array into register ebx. 966 // Get the current entry of the array into register ebx.
967 __ mov(ebx, Operand(esp, 2 * kPointerSize)); 967 __ mov(ebx, Operand(esp, 2 * kPointerSize));
968 __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize)); 968 __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
969 969
970 // Get the expected map from the stack or a zero map in the 970 // Get the expected map from the stack or a zero map in the
971 // permanent slow case into register edx. 971 // permanent slow case into register edx.
972 __ mov(edx, Operand(esp, 3 * kPointerSize)); 972 __ mov(edx, Operand(esp, 3 * kPointerSize));
973 973
974 // Check if the expected map still matches that of the enumerable. 974 // Check if the expected map still matches that of the enumerable.
975 // If not, we have to filter the key. 975 // If not, we have to filter the key.
976 NearLabel update_each; 976 Label update_each;
977 __ mov(ecx, Operand(esp, 4 * kPointerSize)); 977 __ mov(ecx, Operand(esp, 4 * kPointerSize));
978 __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset)); 978 __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
979 __ j(equal, &update_each); 979 __ j(equal, &update_each, Label::kNear);
980 980
981 // Convert the entry to a string or null if it isn't a property 981 // Convert the entry to a string or null if it isn't a property
982 // anymore. If the property has been removed while iterating, we 982 // anymore. If the property has been removed while iterating, we
983 // just skip it. 983 // just skip it.
984 __ push(ecx); // Enumerable. 984 __ push(ecx); // Enumerable.
985 __ push(ebx); // Current entry. 985 __ push(ebx); // Current entry.
986 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); 986 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
987 __ test(eax, Operand(eax)); 987 __ test(eax, Operand(eax));
988 __ j(equal, loop_statement.continue_target()); 988 __ j(equal, loop_statement.continue_target());
989 __ mov(ebx, Operand(eax)); 989 __ mov(ebx, Operand(eax));
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
1077 // If no outer scope calls eval, we do not need to check more 1077 // If no outer scope calls eval, we do not need to check more
1078 // context extensions. If we have reached an eval scope, we check 1078 // context extensions. If we have reached an eval scope, we check
1079 // all extensions from this point. 1079 // all extensions from this point.
1080 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break; 1080 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
1081 s = s->outer_scope(); 1081 s = s->outer_scope();
1082 } 1082 }
1083 1083
1084 if (s != NULL && s->is_eval_scope()) { 1084 if (s != NULL && s->is_eval_scope()) {
1085 // Loop up the context chain. There is no frame effect so it is 1085 // Loop up the context chain. There is no frame effect so it is
1086 // safe to use raw labels here. 1086 // safe to use raw labels here.
1087 NearLabel next, fast; 1087 Label next, fast;
1088 if (!context.is(temp)) { 1088 if (!context.is(temp)) {
1089 __ mov(temp, context); 1089 __ mov(temp, context);
1090 } 1090 }
1091 __ bind(&next); 1091 __ bind(&next);
1092 // Terminate at global context. 1092 // Terminate at global context.
1093 __ cmp(FieldOperand(temp, HeapObject::kMapOffset), 1093 __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
1094 Immediate(isolate()->factory()->global_context_map())); 1094 Immediate(isolate()->factory()->global_context_map()));
1095 __ j(equal, &fast); 1095 __ j(equal, &fast, Label::kNear);
1096 // Check that extension is NULL. 1096 // Check that extension is NULL.
1097 __ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0)); 1097 __ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1098 __ j(not_equal, slow); 1098 __ j(not_equal, slow);
1099 // Load next context in chain. 1099 // Load next context in chain.
1100 __ mov(temp, ContextOperand(temp, Context::CLOSURE_INDEX)); 1100 __ mov(temp, ContextOperand(temp, Context::CLOSURE_INDEX));
1101 __ mov(temp, FieldOperand(temp, JSFunction::kContextOffset)); 1101 __ mov(temp, FieldOperand(temp, JSFunction::kContextOffset));
1102 __ jmp(&next); 1102 __ jmp(&next);
1103 __ bind(&fast); 1103 __ bind(&fast);
1104 } 1104 }
1105 1105
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after
1234 1234
1235 context()->Plug(eax); 1235 context()->Plug(eax);
1236 1236
1237 } else if (slot != NULL) { 1237 } else if (slot != NULL) {
1238 Comment cmnt(masm_, (slot->type() == Slot::CONTEXT) 1238 Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
1239 ? "Context slot" 1239 ? "Context slot"
1240 : "Stack slot"); 1240 : "Stack slot");
1241 if (var->mode() == Variable::CONST) { 1241 if (var->mode() == Variable::CONST) {
1242 // Constants may be the hole value if they have not been initialized. 1242 // Constants may be the hole value if they have not been initialized.
1243 // Unhole them. 1243 // Unhole them.
1244 NearLabel done; 1244 Label done;
1245 MemOperand slot_operand = EmitSlotSearch(slot, eax); 1245 MemOperand slot_operand = EmitSlotSearch(slot, eax);
1246 __ mov(eax, slot_operand); 1246 __ mov(eax, slot_operand);
1247 __ cmp(eax, isolate()->factory()->the_hole_value()); 1247 __ cmp(eax, isolate()->factory()->the_hole_value());
1248 __ j(not_equal, &done); 1248 __ j(not_equal, &done, Label::kNear);
1249 __ mov(eax, isolate()->factory()->undefined_value()); 1249 __ mov(eax, isolate()->factory()->undefined_value());
1250 __ bind(&done); 1250 __ bind(&done);
1251 context()->Plug(eax); 1251 context()->Plug(eax);
1252 } else { 1252 } else {
1253 context()->Plug(slot); 1253 context()->Plug(slot);
1254 } 1254 }
1255 1255
1256 } else { 1256 } else {
1257 Comment cmnt(masm_, "Rewritten parameter"); 1257 Comment cmnt(masm_, "Rewritten parameter");
1258 ASSERT_NOT_NULL(property); 1258 ASSERT_NOT_NULL(property);
(...skipping 22 matching lines...) Expand all
1281 EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(property)); 1281 EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
1282 1282
1283 // Drop key and object left on the stack by IC. 1283 // Drop key and object left on the stack by IC.
1284 context()->Plug(eax); 1284 context()->Plug(eax);
1285 } 1285 }
1286 } 1286 }
1287 1287
1288 1288
1289 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { 1289 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1290 Comment cmnt(masm_, "[ RegExpLiteral"); 1290 Comment cmnt(masm_, "[ RegExpLiteral");
1291 NearLabel materialized; 1291 Label materialized;
1292 // Registers will be used as follows: 1292 // Registers will be used as follows:
1293 // edi = JS function. 1293 // edi = JS function.
1294 // ecx = literals array. 1294 // ecx = literals array.
1295 // ebx = regexp literal. 1295 // ebx = regexp literal.
1296 // eax = regexp literal clone. 1296 // eax = regexp literal clone.
1297 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 1297 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1298 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset)); 1298 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
1299 int literal_offset = 1299 int literal_offset =
1300 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; 1300 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1301 __ mov(ebx, FieldOperand(ecx, literal_offset)); 1301 __ mov(ebx, FieldOperand(ecx, literal_offset));
1302 __ cmp(ebx, isolate()->factory()->undefined_value()); 1302 __ cmp(ebx, isolate()->factory()->undefined_value());
1303 __ j(not_equal, &materialized); 1303 __ j(not_equal, &materialized, Label::kNear);
1304 1304
1305 // Create regexp literal using runtime function 1305 // Create regexp literal using runtime function
1306 // Result will be in eax. 1306 // Result will be in eax.
1307 __ push(ecx); 1307 __ push(ecx);
1308 __ push(Immediate(Smi::FromInt(expr->literal_index()))); 1308 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1309 __ push(Immediate(expr->pattern())); 1309 __ push(Immediate(expr->pattern()));
1310 __ push(Immediate(expr->flags())); 1310 __ push(Immediate(expr->flags()));
1311 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); 1311 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1312 __ mov(ebx, eax); 1312 __ mov(ebx, eax);
1313 1313
(...skipping 336 matching lines...) Expand 10 before | Expand all | Expand 10 after
1650 } 1650 }
1651 1651
1652 1652
1653 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, 1653 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1654 Token::Value op, 1654 Token::Value op,
1655 OverwriteMode mode, 1655 OverwriteMode mode,
1656 Expression* left, 1656 Expression* left,
1657 Expression* right) { 1657 Expression* right) {
1658 // Do combined smi check of the operands. Left operand is on the 1658 // Do combined smi check of the operands. Left operand is on the
1659 // stack. Right operand is in eax. 1659 // stack. Right operand is in eax.
1660 NearLabel done, smi_case, stub_call; 1660 NearLabel smi_case;
1661 Label done, stub_call;
1661 __ pop(edx); 1662 __ pop(edx);
1662 __ mov(ecx, eax); 1663 __ mov(ecx, eax);
1663 __ or_(eax, Operand(edx)); 1664 __ or_(eax, Operand(edx));
1664 JumpPatchSite patch_site(masm_); 1665 JumpPatchSite patch_site(masm_);
1665 patch_site.EmitJumpIfSmi(eax, &smi_case); 1666 patch_site.EmitJumpIfSmi(eax, &smi_case);
1666 1667
1667 __ bind(&stub_call); 1668 __ bind(&stub_call);
1668 __ mov(eax, ecx); 1669 __ mov(eax, ecx);
1669 TypeRecordingBinaryOpStub stub(op, mode); 1670 TypeRecordingBinaryOpStub stub(op, mode);
1670 EmitCallIC(stub.GetCode(), &patch_site, expr->id()); 1671 EmitCallIC(stub.GetCode(), &patch_site, expr->id());
1671 __ jmp(&done); 1672 __ jmp(&done, Label::kNear);
1672 1673
1673 // Smi case. 1674 // Smi case.
1674 __ bind(&smi_case); 1675 __ bind(&smi_case);
1675 __ mov(eax, edx); // Copy left operand in case of a stub call. 1676 __ mov(eax, edx); // Copy left operand in case of a stub call.
1676 1677
1677 switch (op) { 1678 switch (op) {
1678 case Token::SAR: 1679 case Token::SAR:
1679 __ SmiUntag(eax); 1680 __ SmiUntag(eax);
1680 __ SmiUntag(ecx); 1681 __ SmiUntag(ecx);
1681 __ sar_cl(eax); // No checks of result necessary 1682 __ sar_cl(eax); // No checks of result necessary
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
1714 break; 1715 break;
1715 case Token::SUB: 1716 case Token::SUB:
1716 __ sub(eax, Operand(ecx)); 1717 __ sub(eax, Operand(ecx));
1717 __ j(overflow, &stub_call); 1718 __ j(overflow, &stub_call);
1718 break; 1719 break;
1719 case Token::MUL: { 1720 case Token::MUL: {
1720 __ SmiUntag(eax); 1721 __ SmiUntag(eax);
1721 __ imul(eax, Operand(ecx)); 1722 __ imul(eax, Operand(ecx));
1722 __ j(overflow, &stub_call); 1723 __ j(overflow, &stub_call);
1723 __ test(eax, Operand(eax)); 1724 __ test(eax, Operand(eax));
1724 __ j(not_zero, &done, taken); 1725 __ j(not_zero, &done, taken, Label::kNear);
1725 __ mov(ebx, edx); 1726 __ mov(ebx, edx);
1726 __ or_(ebx, Operand(ecx)); 1727 __ or_(ebx, Operand(ecx));
1727 __ j(negative, &stub_call); 1728 __ j(negative, &stub_call);
1728 break; 1729 break;
1729 } 1730 }
1730 case Token::BIT_OR: 1731 case Token::BIT_OR:
1731 __ or_(eax, Operand(ecx)); 1732 __ or_(eax, Operand(ecx));
1732 break; 1733 break;
1733 case Token::BIT_AND: 1734 case Token::BIT_AND:
1734 __ and_(eax, Operand(ecx)); 1735 __ and_(eax, Operand(ecx));
(...skipping 1101 matching lines...) Expand 10 before | Expand all | Expand 10 after
2836 __ CallStub(&stub); 2837 __ CallStub(&stub);
2837 context()->Plug(eax); 2838 context()->Plug(eax);
2838 } 2839 }
2839 2840
2840 2841
2841 void FullCodeGenerator::EmitValueOf(ZoneList<Expression*>* args) { 2842 void FullCodeGenerator::EmitValueOf(ZoneList<Expression*>* args) {
2842 ASSERT(args->length() == 1); 2843 ASSERT(args->length() == 1);
2843 2844
2844 VisitForAccumulatorValue(args->at(0)); // Load the object. 2845 VisitForAccumulatorValue(args->at(0)); // Load the object.
2845 2846
2846 NearLabel done; 2847 Label done;
2847 // If the object is a smi return the object. 2848 // If the object is a smi return the object.
2848 __ test(eax, Immediate(kSmiTagMask)); 2849 __ test(eax, Immediate(kSmiTagMask));
2849 __ j(zero, &done); 2850 __ j(zero, &done, Label::kNear);
2850 // If the object is not a value type, return the object. 2851 // If the object is not a value type, return the object.
2851 __ CmpObjectType(eax, JS_VALUE_TYPE, ebx); 2852 __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
2852 __ j(not_equal, &done); 2853 __ j(not_equal, &done, Label::kNear);
2853 __ mov(eax, FieldOperand(eax, JSValue::kValueOffset)); 2854 __ mov(eax, FieldOperand(eax, JSValue::kValueOffset));
2854 2855
2855 __ bind(&done); 2856 __ bind(&done);
2856 context()->Plug(eax); 2857 context()->Plug(eax);
2857 } 2858 }
2858 2859
2859 2860
2860 void FullCodeGenerator::EmitMathPow(ZoneList<Expression*>* args) { 2861 void FullCodeGenerator::EmitMathPow(ZoneList<Expression*>* args) {
2861 // Load the arguments on the stack and call the runtime function. 2862 // Load the arguments on the stack and call the runtime function.
2862 ASSERT(args->length() == 2); 2863 ASSERT(args->length() == 2);
(...skipping 10 matching lines...) Expand all
2873 } 2874 }
2874 2875
2875 2876
2876 void FullCodeGenerator::EmitSetValueOf(ZoneList<Expression*>* args) { 2877 void FullCodeGenerator::EmitSetValueOf(ZoneList<Expression*>* args) {
2877 ASSERT(args->length() == 2); 2878 ASSERT(args->length() == 2);
2878 2879
2879 VisitForStackValue(args->at(0)); // Load the object. 2880 VisitForStackValue(args->at(0)); // Load the object.
2880 VisitForAccumulatorValue(args->at(1)); // Load the value. 2881 VisitForAccumulatorValue(args->at(1)); // Load the value.
2881 __ pop(ebx); // eax = value. ebx = object. 2882 __ pop(ebx); // eax = value. ebx = object.
2882 2883
2883 NearLabel done; 2884 Label done;
2884 // If the object is a smi, return the value. 2885 // If the object is a smi, return the value.
2885 __ test(ebx, Immediate(kSmiTagMask)); 2886 __ test(ebx, Immediate(kSmiTagMask));
2886 __ j(zero, &done); 2887 __ j(zero, &done, Label::kNear);
2887 2888
2888 // If the object is not a value type, return the value. 2889 // If the object is not a value type, return the value.
2889 __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx); 2890 __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx);
2890 __ j(not_equal, &done); 2891 __ j(not_equal, &done, Label::kNear);
2891 2892
2892 // Store the value. 2893 // Store the value.
2893 __ mov(FieldOperand(ebx, JSValue::kValueOffset), eax); 2894 __ mov(FieldOperand(ebx, JSValue::kValueOffset), eax);
2894 // Update the write barrier. Save the value as it will be 2895 // Update the write barrier. Save the value as it will be
2895 // overwritten by the write barrier code and is needed afterward. 2896 // overwritten by the write barrier code and is needed afterward.
2896 __ mov(edx, eax); 2897 __ mov(edx, eax);
2897 __ RecordWrite(ebx, JSValue::kValueOffset, edx, ecx); 2898 __ RecordWrite(ebx, JSValue::kValueOffset, edx, ecx);
2898 2899
2899 __ bind(&done); 2900 __ bind(&done);
2900 context()->Plug(eax); 2901 context()->Plug(eax);
(...skipping 921 matching lines...) Expand 10 before | Expand all | Expand 10 after
3822 3823
3823 // We need a second deoptimization point after loading the value 3824 // We need a second deoptimization point after loading the value
3824 // in case evaluating the property load my have a side effect. 3825 // in case evaluating the property load my have a side effect.
3825 if (assign_type == VARIABLE) { 3826 if (assign_type == VARIABLE) {
3826 PrepareForBailout(expr->expression(), TOS_REG); 3827 PrepareForBailout(expr->expression(), TOS_REG);
3827 } else { 3828 } else {
3828 PrepareForBailoutForId(expr->CountId(), TOS_REG); 3829 PrepareForBailoutForId(expr->CountId(), TOS_REG);
3829 } 3830 }
3830 3831
3831 // Call ToNumber only if operand is not a smi. 3832 // Call ToNumber only if operand is not a smi.
3832 NearLabel no_conversion; 3833 Label no_conversion;
3833 if (ShouldInlineSmiCase(expr->op())) { 3834 if (ShouldInlineSmiCase(expr->op())) {
3834 __ test(eax, Immediate(kSmiTagMask)); 3835 __ test(eax, Immediate(kSmiTagMask));
3835 __ j(zero, &no_conversion); 3836 __ j(zero, &no_conversion, Label::kNear);
3836 } 3837 }
3837 ToNumberStub convert_stub; 3838 ToNumberStub convert_stub;
3838 __ CallStub(&convert_stub); 3839 __ CallStub(&convert_stub);
3839 __ bind(&no_conversion); 3840 __ bind(&no_conversion);
3840 3841
3841 // Save result for postfix expressions. 3842 // Save result for postfix expressions.
3842 if (expr->is_postfix()) { 3843 if (expr->is_postfix()) {
3843 if (!context()->IsEffect()) { 3844 if (!context()->IsEffect()) {
3844 // Save the result on the stack. If we have a named or keyed property 3845 // Save the result on the stack. If we have a named or keyed property
3845 // we store the result under the receiver that is currently on top 3846 // we store the result under the receiver that is currently on top
3846 // of the stack. 3847 // of the stack.
3847 switch (assign_type) { 3848 switch (assign_type) {
3848 case VARIABLE: 3849 case VARIABLE:
3849 __ push(eax); 3850 __ push(eax);
3850 break; 3851 break;
3851 case NAMED_PROPERTY: 3852 case NAMED_PROPERTY:
3852 __ mov(Operand(esp, kPointerSize), eax); 3853 __ mov(Operand(esp, kPointerSize), eax);
3853 break; 3854 break;
3854 case KEYED_PROPERTY: 3855 case KEYED_PROPERTY:
3855 __ mov(Operand(esp, 2 * kPointerSize), eax); 3856 __ mov(Operand(esp, 2 * kPointerSize), eax);
3856 break; 3857 break;
3857 } 3858 }
3858 } 3859 }
3859 } 3860 }
3860 3861
3861 // Inline smi case if we are in a loop. 3862 // Inline smi case if we are in a loop.
3862 NearLabel stub_call, done; 3863 NearLabel done;
3864 Label stub_call;
3863 JumpPatchSite patch_site(masm_); 3865 JumpPatchSite patch_site(masm_);
3864 3866
3865 if (ShouldInlineSmiCase(expr->op())) { 3867 if (ShouldInlineSmiCase(expr->op())) {
3866 if (expr->op() == Token::INC) { 3868 if (expr->op() == Token::INC) {
3867 __ add(Operand(eax), Immediate(Smi::FromInt(1))); 3869 __ add(Operand(eax), Immediate(Smi::FromInt(1)));
3868 } else { 3870 } else {
3869 __ sub(Operand(eax), Immediate(Smi::FromInt(1))); 3871 __ sub(Operand(eax), Immediate(Smi::FromInt(1)));
3870 } 3872 }
3871 __ j(overflow, &stub_call); 3873 __ j(overflow, &stub_call, Label::kNear);
3872 // We could eliminate this smi check if we split the code at 3874 // We could eliminate this smi check if we split the code at
3873 // the first smi check before calling ToNumber. 3875 // the first smi check before calling ToNumber.
3874 patch_site.EmitJumpIfSmi(eax, &done); 3876 patch_site.EmitJumpIfSmi(eax, &done);
3875 3877
3876 __ bind(&stub_call); 3878 __ bind(&stub_call);
3877 // Call stub. Undo operation first. 3879 // Call stub. Undo operation first.
3878 if (expr->op() == Token::INC) { 3880 if (expr->op() == Token::INC) {
3879 __ sub(Operand(eax), Immediate(Smi::FromInt(1))); 3881 __ sub(Operand(eax), Immediate(Smi::FromInt(1)));
3880 } else { 3882 } else {
3881 __ add(Operand(eax), Immediate(Smi::FromInt(1))); 3883 __ add(Operand(eax), Immediate(Smi::FromInt(1)));
(...skipping 436 matching lines...) Expand 10 before | Expand all | Expand 10 after
4318 // And return. 4320 // And return.
4319 __ ret(0); 4321 __ ret(0);
4320 } 4322 }
4321 4323
4322 4324
4323 #undef __ 4325 #undef __
4324 4326
4325 } } // namespace v8::internal 4327 } } // namespace v8::internal
4326 4328
4327 #endif // V8_TARGET_ARCH_IA32 4329 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/code-stubs-ia32.cc ('k') | src/ia32/ic-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698