Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(9)

Side by Side Diff: src/x64/codegen-x64.cc

Issue 149005: X64: Added for-in (Closed)
Patch Set: Created 11 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2009 the V8 project authors. All rights reserved. 1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 420 matching lines...) Expand 10 before | Expand all | Expand 10 after
431 ExternalReference stack_guard_limit = 431 ExternalReference stack_guard_limit =
432 ExternalReference::address_of_stack_guard_limit(); 432 ExternalReference::address_of_stack_guard_limit();
433 __ movq(kScratchRegister, stack_guard_limit); 433 __ movq(kScratchRegister, stack_guard_limit);
434 __ cmpq(rsp, Operand(kScratchRegister, 0)); 434 __ cmpq(rsp, Operand(kScratchRegister, 0));
435 deferred->Branch(below); 435 deferred->Branch(below);
436 deferred->BindExit(); 436 deferred->BindExit();
437 } 437 }
438 } 438 }
439 439
440 440
441 void CodeGenerator::VisitAndSpill(Statement* statement) {
442 // TODO(X64): No architecture specific code. Move to shared location.
443 ASSERT(in_spilled_code());
444 set_in_spilled_code(false);
445 Visit(statement);
446 if (frame_ != NULL) {
447 frame_->SpillAll();
448 }
449 set_in_spilled_code(true);
450 }
451
452
441 void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) { 453 void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) {
442 ASSERT(!in_spilled_code()); 454 ASSERT(!in_spilled_code());
443 for (int i = 0; has_valid_frame() && i < statements->length(); i++) { 455 for (int i = 0; has_valid_frame() && i < statements->length(); i++) {
444 Visit(statements->at(i)); 456 Visit(statements->at(i));
445 } 457 }
446 } 458 }
447 459
448 460
449 void CodeGenerator::VisitBlock(Block* node) { 461 void CodeGenerator::VisitBlock(Block* node) {
450 ASSERT(!in_spilled_code()); 462 ASSERT(!in_spilled_code());
(...skipping 563 matching lines...) Expand 10 before | Expand all | Expand 10 after
1014 break; 1026 break;
1015 } 1027 }
1016 } 1028 }
1017 1029
1018 DecrementLoopNesting(); 1030 DecrementLoopNesting();
1019 node->continue_target()->Unuse(); 1031 node->continue_target()->Unuse();
1020 node->break_target()->Unuse(); 1032 node->break_target()->Unuse();
1021 } 1033 }
1022 1034
1023 1035
1024 void CodeGenerator::VisitForInStatement(ForInStatement* a) { 1036 void CodeGenerator::VisitForInStatement(ForInStatement* node) {
1025 UNIMPLEMENTED(); 1037 ASSERT(!in_spilled_code());
1038 VirtualFrame::SpilledScope spilled_scope;
1039 Comment cmnt(masm_, "[ ForInStatement");
1040 CodeForStatementPosition(node);
1041
1042 JumpTarget primitive;
1043 JumpTarget jsobject;
1044 JumpTarget fixed_array;
1045 JumpTarget entry(JumpTarget::BIDIRECTIONAL);
1046 JumpTarget end_del_check;
1047 JumpTarget exit;
1048
1049 // Get the object to enumerate over (converted to JSObject).
1050 LoadAndSpill(node->enumerable());
1051
1052 // Both SpiderMonkey and kjs ignore null and undefined in contrast
1053 // to the specification. 12.6.4 mandates a call to ToObject.
1054 frame_->EmitPop(rax);
1055
1056 // rax: value to be iterated over
1057 __ Cmp(rax, Factory::undefined_value());
1058 exit.Branch(equal);
1059 __ Cmp(rax, Factory::null_value());
1060 exit.Branch(equal);
1061
1062 // Stack layout in body:
1063 // [iteration counter (smi)] <- slot 0
1064 // [length of array] <- slot 1
1065 // [FixedArray] <- slot 2
1066 // [Map or 0] <- slot 3
1067 // [Object] <- slot 4
1068
1069 // Check if enumerable is already a JSObject
1070 // rax: value to be iterated over
1071 __ testl(rax, Immediate(kSmiTagMask));
1072 primitive.Branch(zero);
1073 __ movq(rcx, FieldOperand(rax, HeapObject::kMapOffset));
1074 __ movzxbq(rcx, FieldOperand(rcx, Map::kInstanceTypeOffset));
1075 __ cmpq(rcx, Immediate(FIRST_JS_OBJECT_TYPE));
William Hesse 2009/06/25 22:39:10 Why not CmpObjectType macro? Or why not movb and
1076 jsobject.Branch(above_equal);
1077
1078 primitive.Bind();
1079 frame_->EmitPush(rax);
1080 frame_->InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION, 1);
1081 // function call returns the value in rax, which is where we want it below
1082
1083 jsobject.Bind();
1084 // Get the set of properties (as a FixedArray or Map).
1085 // rax: value to be iterated over
1086 frame_->EmitPush(rax); // push the object being iterated over (slot 4)
1087
1088 frame_->EmitPush(rax); // push the Object (slot 4) for the runtime call
1089 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1090
1091 // If we got a Map, we can do a fast modification check.
1092 // Otherwise, we got a FixedArray, and we have to do a slow check.
1093 // rax: map or fixed array (result from call to
1094 // Runtime::kGetPropertyNamesFast)
1095 __ movq(rdx, rax);
1096 __ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
1097 __ Cmp(rcx, Factory::meta_map());
1098 fixed_array.Branch(not_equal);
1099
1100 // Get enum cache
1101 // rax: map (result from call to Runtime::kGetPropertyNamesFast)
1102 __ movq(rcx, rax);
1103 __ movq(rcx, FieldOperand(rcx, Map::kInstanceDescriptorsOffset));
1104 // Get the bridge array held in the enumeration index field.
1105 __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumerationIndexOffset));
1106 // Get the cache from the bridge array.
1107 __ movq(rdx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1108
1109 frame_->EmitPush(rax); // <- slot 3
1110 frame_->EmitPush(rdx); // <- slot 2
1111 __ movq(rax, FieldOperand(rdx, FixedArray::kLengthOffset));
1112 __ shl(rax, Immediate(kSmiTagSize));
1113 frame_->EmitPush(rax); // <- slot 1
1114 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0
1115 entry.Jump();
1116
1117 fixed_array.Bind();
1118 // rax: fixed array (result from call to Runtime::kGetPropertyNamesFast)
1119 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 3
1120 frame_->EmitPush(rax); // <- slot 2
1121
1122 // Push the length of the array and the initial index onto the stack.
1123 __ movq(rax, FieldOperand(rax, FixedArray::kLengthOffset));
1124 __ shl(rax, Immediate(kSmiTagSize));
1125 frame_->EmitPush(rax); // <- slot 1
1126 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0
1127
1128 // Condition.
1129 entry.Bind();
1130 // Grab the current frame's height for the break and continue
1131 // targets only after all the state is pushed on the frame.
1132 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
1133 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
1134
1135 __ movq(rax, frame_->ElementAt(0)); // load the current count
1136 __ cmpq(rax, frame_->ElementAt(1)); // compare to the array length
1137 node->break_target()->Branch(above_equal);
1138
1139 // Get the i'th entry of the array.
1140 __ movq(rdx, frame_->ElementAt(2));
1141 ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
1142 // Multiplier is times_4 since rax is already a Smi.
1143 __ movq(rbx, Operand(rdx, rax, times_4,
1144 FixedArray::kHeaderSize - kHeapObjectTag));
1145
1146 // Get the expected map from the stack or a zero map in the
1147 // permanent slow case rax: current iteration count rbx: i'th entry
1148 // of the enum cache
1149 __ movq(rdx, frame_->ElementAt(3));
1150 // Check if the expected map still matches that of the enumerable.
1151 // If not, we have to filter the key.
1152 // rax: current iteration count
1153 // rbx: i'th entry of the enum cache
1154 // rdx: expected map value
1155 __ movq(rcx, frame_->ElementAt(4));
1156 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
1157 __ cmpq(rcx, rdx);
1158 end_del_check.Branch(equal);
1159
1160 // Convert the entry to a string (or null if it isn't a property anymore).
1161 frame_->EmitPush(frame_->ElementAt(4)); // push enumerable
1162 frame_->EmitPush(rbx); // push entry
1163 frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION, 2);
1164 __ movq(rbx, rax);
1165
1166 // If the property has been removed while iterating, we just skip it.
1167 __ Cmp(rbx, Factory::null_value());
1168 node->continue_target()->Branch(equal);
1169
1170 end_del_check.Bind();
1171 // Store the entry in the 'each' expression and take another spin in the
1172 // loop. rdx: i'th entry of the enum cache (or string there of)
1173 frame_->EmitPush(rbx);
1174 { Reference each(this, node->each());
1175 // Loading a reference may leave the frame in an unspilled state.
1176 frame_->SpillAll();
1177 if (!each.is_illegal()) {
1178 if (each.size() > 0) {
1179 frame_->EmitPush(frame_->ElementAt(each.size()));
1180 }
1181 // If the reference was to a slot we rely on the convenient property
1182 // that it doesn't matter whether a value (eg, ebx pushed above) is
1183 // right on top of or right underneath a zero-sized reference.
1184 each.SetValue(NOT_CONST_INIT);
1185 if (each.size() > 0) {
1186 // It's safe to pop the value lying on top of the reference before
1187 // unloading the reference itself (which preserves the top of stack,
1188 // ie, now the topmost value of the non-zero sized reference), since
1189 // we will discard the top of stack after unloading the reference
1190 // anyway.
1191 frame_->Drop();
1192 }
1193 }
1194 }
1195 // Unloading a reference may leave the frame in an unspilled state.
1196 frame_->SpillAll();
1197
1198 // Discard the i'th entry pushed above or else the remainder of the
1199 // reference, whichever is currently on top of the stack.
1200 frame_->Drop();
1201
1202 // Body.
1203 CheckStack(); // TODO(1222600): ignore if body contains calls.
1204 VisitAndSpill(node->body());
1205
1206 // Next. Reestablish a spilled frame in case we are coming here via
1207 // a continue in the body.
1208 node->continue_target()->Bind();
William Hesse 2009/06/25 22:39:10 // TODO(make an issue?) Consider making a SpilledT
1209 frame_->SpillAll();
1210 frame_->EmitPop(rax);
1211 __ addq(rax, Immediate(Smi::FromInt(1)));
1212 frame_->EmitPush(rax);
1213 entry.Jump();
1214
1215 // Cleanup. No need to spill because VirtualFrame::Drop is safe for
1216 // any frame.
1217 node->break_target()->Bind();
1218 frame_->Drop(5);
1219
1220 // Exit.
1221 exit.Bind();
1222
1223 node->continue_target()->Unuse();
1224 node->break_target()->Unuse();
1026 } 1225 }
1027 1226
1028 void CodeGenerator::VisitTryCatch(TryCatch* a) { 1227 void CodeGenerator::VisitTryCatch(TryCatch* a) {
1029 UNIMPLEMENTED(); 1228 UNIMPLEMENTED();
1030 } 1229 }
1031 1230
1032 void CodeGenerator::VisitTryFinally(TryFinally* a) { 1231 void CodeGenerator::VisitTryFinally(TryFinally* a) {
1033 UNIMPLEMENTED(); 1232 UNIMPLEMENTED();
1034 } 1233 }
1035 1234
(...skipping 1549 matching lines...) Expand 10 before | Expand all | Expand 10 after
2585 } 2784 }
2586 2785
2587 2786
2588 void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) { 2787 void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) {
2589 UNIMPLEMENTED(); 2788 UNIMPLEMENTED();
2590 } 2789 }
2591 2790
2592 // ----------------------------------------------------------------------------- 2791 // -----------------------------------------------------------------------------
2593 // CodeGenerator implementation of Expressions 2792 // CodeGenerator implementation of Expressions
2594 2793
2794 void CodeGenerator::LoadAndSpill(Expression* expression,
2795 TypeofState typeof_state) {
2796 // TODO(x64): No architecture specific code. Move to shared location.
2797 ASSERT(in_spilled_code());
2798 set_in_spilled_code(false);
2799 Load(expression, typeof_state);
2800 frame_->SpillAll();
2801 set_in_spilled_code(true);
2802 }
2803
2804
2595 void CodeGenerator::Load(Expression* x, TypeofState typeof_state) { 2805 void CodeGenerator::Load(Expression* x, TypeofState typeof_state) {
2596 #ifdef DEBUG 2806 #ifdef DEBUG
2597 int original_height = frame_->height(); 2807 int original_height = frame_->height();
2598 #endif 2808 #endif
2599 ASSERT(!in_spilled_code()); 2809 ASSERT(!in_spilled_code());
2600 JumpTarget true_target; 2810 JumpTarget true_target;
2601 JumpTarget false_target; 2811 JumpTarget false_target;
2602 ControlDestination dest(&true_target, &false_target, true); 2812 ControlDestination dest(&true_target, &false_target, true);
2603 LoadCondition(x, typeof_state, &dest, false); 2813 LoadCondition(x, typeof_state, &dest, false);
2604 2814
(...skipping 3199 matching lines...) Expand 10 before | Expand all | Expand 10 after
5804 break; 6014 break;
5805 default: 6015 default:
5806 UNREACHABLE(); 6016 UNREACHABLE();
5807 } 6017 }
5808 } 6018 }
5809 6019
5810 6020
5811 #undef __ 6021 #undef __
5812 6022
5813 } } // namespace v8::internal 6023 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698