Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(9)

Side by Side Diff: src/x64/full-codegen-x64.cc

Issue 199903002: Introduce Push and Pop macro instructions for x64 (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Rebased with bleeding_edge Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/deoptimizer-x64.cc ('k') | src/x64/ic-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 155 matching lines...) Expand 10 before | Expand all | Expand 10 after
166 166
167 { Comment cmnt(masm_, "[ Allocate locals"); 167 { Comment cmnt(masm_, "[ Allocate locals");
168 int locals_count = info->scope()->num_stack_slots(); 168 int locals_count = info->scope()->num_stack_slots();
169 // Generators allocate locals, if any, in context slots. 169 // Generators allocate locals, if any, in context slots.
170 ASSERT(!info->function()->is_generator() || locals_count == 0); 170 ASSERT(!info->function()->is_generator() || locals_count == 0);
171 if (locals_count == 1) { 171 if (locals_count == 1) {
172 __ PushRoot(Heap::kUndefinedValueRootIndex); 172 __ PushRoot(Heap::kUndefinedValueRootIndex);
173 } else if (locals_count > 1) { 173 } else if (locals_count > 1) {
174 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); 174 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
175 for (int i = 0; i < locals_count; i++) { 175 for (int i = 0; i < locals_count; i++) {
176 __ push(rdx); 176 __ Push(rdx);
177 } 177 }
178 } 178 }
179 } 179 }
180 180
181 bool function_in_register = true; 181 bool function_in_register = true;
182 182
183 // Possibly allocate a local context. 183 // Possibly allocate a local context.
184 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 184 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
185 if (heap_slots > 0) { 185 if (heap_slots > 0) {
186 Comment cmnt(masm_, "[ Allocate context"); 186 Comment cmnt(masm_, "[ Allocate context");
187 // Argument to NewContext is the function, which is still in rdi. 187 // Argument to NewContext is the function, which is still in rdi.
188 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) { 188 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
189 __ push(rdi); 189 __ Push(rdi);
190 __ Push(info->scope()->GetScopeInfo()); 190 __ Push(info->scope()->GetScopeInfo());
191 __ CallRuntime(Runtime::kNewGlobalContext, 2); 191 __ CallRuntime(Runtime::kNewGlobalContext, 2);
192 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) { 192 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
193 FastNewContextStub stub(heap_slots); 193 FastNewContextStub stub(heap_slots);
194 __ CallStub(&stub); 194 __ CallStub(&stub);
195 } else { 195 } else {
196 __ push(rdi); 196 __ Push(rdi);
197 __ CallRuntime(Runtime::kNewFunctionContext, 1); 197 __ CallRuntime(Runtime::kNewFunctionContext, 1);
198 } 198 }
199 function_in_register = false; 199 function_in_register = false;
200 // Context is returned in rax. It replaces the context passed to us. 200 // Context is returned in rax. It replaces the context passed to us.
201 // It's saved in the stack and kept live in rsi. 201 // It's saved in the stack and kept live in rsi.
202 __ movp(rsi, rax); 202 __ movp(rsi, rax);
203 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rax); 203 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rax);
204 204
205 // Copy any necessary parameters into the context. 205 // Copy any necessary parameters into the context.
206 int num_parameters = info->scope()->num_parameters(); 206 int num_parameters = info->scope()->num_parameters();
(...skipping 14 matching lines...) Expand all
221 } 221 }
222 } 222 }
223 223
224 // Possibly allocate an arguments object. 224 // Possibly allocate an arguments object.
225 Variable* arguments = scope()->arguments(); 225 Variable* arguments = scope()->arguments();
226 if (arguments != NULL) { 226 if (arguments != NULL) {
227 // Arguments object must be allocated after the context object, in 227 // Arguments object must be allocated after the context object, in
228 // case the "arguments" or ".arguments" variables are in the context. 228 // case the "arguments" or ".arguments" variables are in the context.
229 Comment cmnt(masm_, "[ Allocate arguments object"); 229 Comment cmnt(masm_, "[ Allocate arguments object");
230 if (function_in_register) { 230 if (function_in_register) {
231 __ push(rdi); 231 __ Push(rdi);
232 } else { 232 } else {
233 __ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 233 __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
234 } 234 }
235 // The receiver is just before the parameters on the caller's stack. 235 // The receiver is just before the parameters on the caller's stack.
236 int num_parameters = info->scope()->num_parameters(); 236 int num_parameters = info->scope()->num_parameters();
237 int offset = num_parameters * kPointerSize; 237 int offset = num_parameters * kPointerSize;
238 __ lea(rdx, 238 __ lea(rdx,
239 Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset)); 239 Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset));
240 __ push(rdx); 240 __ Push(rdx);
241 __ Push(Smi::FromInt(num_parameters)); 241 __ Push(Smi::FromInt(num_parameters));
242 // Arguments to ArgumentsAccessStub: 242 // Arguments to ArgumentsAccessStub:
243 // function, receiver address, parameter count. 243 // function, receiver address, parameter count.
244 // The stub will rewrite receiver and parameter count if the previous 244 // The stub will rewrite receiver and parameter count if the previous
245 // stack frame was an arguments adapter frame. 245 // stack frame was an arguments adapter frame.
246 ArgumentsAccessStub::Type type; 246 ArgumentsAccessStub::Type type;
247 if (strict_mode() == STRICT) { 247 if (strict_mode() == STRICT) {
248 type = ArgumentsAccessStub::NEW_STRICT; 248 type = ArgumentsAccessStub::NEW_STRICT;
249 } else if (function()->has_duplicate_parameters()) { 249 } else if (function()->has_duplicate_parameters()) {
250 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW; 250 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after
356 } 356 }
357 357
358 358
359 void FullCodeGenerator::EmitReturnSequence() { 359 void FullCodeGenerator::EmitReturnSequence() {
360 Comment cmnt(masm_, "[ Return sequence"); 360 Comment cmnt(masm_, "[ Return sequence");
361 if (return_label_.is_bound()) { 361 if (return_label_.is_bound()) {
362 __ jmp(&return_label_); 362 __ jmp(&return_label_);
363 } else { 363 } else {
364 __ bind(&return_label_); 364 __ bind(&return_label_);
365 if (FLAG_trace) { 365 if (FLAG_trace) {
366 __ push(rax); 366 __ Push(rax);
367 __ CallRuntime(Runtime::kTraceExit, 1); 367 __ CallRuntime(Runtime::kTraceExit, 1);
368 } 368 }
369 // Pretend that the exit is a backwards jump to the entry. 369 // Pretend that the exit is a backwards jump to the entry.
370 int weight = 1; 370 int weight = 1;
371 if (info_->ShouldSelfOptimize()) { 371 if (info_->ShouldSelfOptimize()) {
372 weight = FLAG_interrupt_budget / FLAG_self_opt_count; 372 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
373 } else { 373 } else {
374 int distance = masm_->pc_offset(); 374 int distance = masm_->pc_offset();
375 weight = Min(kMaxBackEdgeWeight, 375 weight = Min(kMaxBackEdgeWeight,
376 Max(1, distance / kCodeSizeMultiplier)); 376 Max(1, distance / kCodeSizeMultiplier));
377 } 377 }
378 EmitProfilingCounterDecrement(weight); 378 EmitProfilingCounterDecrement(weight);
379 Label ok; 379 Label ok;
380 __ j(positive, &ok, Label::kNear); 380 __ j(positive, &ok, Label::kNear);
381 __ push(rax); 381 __ Push(rax);
382 __ call(isolate()->builtins()->InterruptCheck(), 382 __ call(isolate()->builtins()->InterruptCheck(),
383 RelocInfo::CODE_TARGET); 383 RelocInfo::CODE_TARGET);
384 __ pop(rax); 384 __ Pop(rax);
385 EmitProfilingCounterReset(); 385 EmitProfilingCounterReset();
386 __ bind(&ok); 386 __ bind(&ok);
387 #ifdef DEBUG 387 #ifdef DEBUG
388 // Add a label for checking the size of the code used for returning. 388 // Add a label for checking the size of the code used for returning.
389 Label check_exit_codesize; 389 Label check_exit_codesize;
390 masm_->bind(&check_exit_codesize); 390 masm_->bind(&check_exit_codesize);
391 #endif 391 #endif
392 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); 392 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
393 __ RecordJSReturn(); 393 __ RecordJSReturn();
394 // Do not use the leave instruction here because it is too short to 394 // Do not use the leave instruction here because it is too short to
395 // patch with the code required by the debugger. 395 // patch with the code required by the debugger.
396 __ movp(rsp, rbp); 396 __ movp(rsp, rbp);
397 __ pop(rbp); 397 __ popq(rbp);
398 int no_frame_start = masm_->pc_offset(); 398 int no_frame_start = masm_->pc_offset();
399 399
400 int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize; 400 int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
401 __ Ret(arguments_bytes, rcx); 401 __ Ret(arguments_bytes, rcx);
402 402
403 #ifdef ENABLE_DEBUGGER_SUPPORT 403 #ifdef ENABLE_DEBUGGER_SUPPORT
404 // Add padding that will be overwritten by a debugger breakpoint. We 404 // Add padding that will be overwritten by a debugger breakpoint. We
405 // have just generated at least 7 bytes: "movq rsp, rbp; pop rbp; ret k" 405 // have just generated at least 7 bytes: "movq rsp, rbp; pop rbp; ret k"
406 // (3 + 1 + 3). 406 // (3 + 1 + 3).
407 const int kPadding = Assembler::kJSReturnSequenceLength - 7; 407 const int kPadding = Assembler::kJSReturnSequenceLength - 7;
(...skipping 17 matching lines...) Expand all
425 425
426 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const { 426 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
427 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 427 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
428 codegen()->GetVar(result_register(), var); 428 codegen()->GetVar(result_register(), var);
429 } 429 }
430 430
431 431
432 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const { 432 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
433 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 433 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
434 MemOperand operand = codegen()->VarOperand(var, result_register()); 434 MemOperand operand = codegen()->VarOperand(var, result_register());
435 __ push(operand); 435 __ Push(operand);
436 } 436 }
437 437
438 438
439 void FullCodeGenerator::TestContext::Plug(Variable* var) const { 439 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
440 codegen()->GetVar(result_register(), var); 440 codegen()->GetVar(result_register(), var);
441 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 441 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
442 codegen()->DoTest(this); 442 codegen()->DoTest(this);
443 } 443 }
444 444
445 445
(...skipping 337 matching lines...) Expand 10 before | Expand all | Expand 10 after
783 EmitDebugCheckDeclarationContext(variable); 783 EmitDebugCheckDeclarationContext(variable);
784 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); 784 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
785 __ movp(ContextOperand(rsi, variable->index()), kScratchRegister); 785 __ movp(ContextOperand(rsi, variable->index()), kScratchRegister);
786 // No write barrier since the hole value is in old space. 786 // No write barrier since the hole value is in old space.
787 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 787 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
788 } 788 }
789 break; 789 break;
790 790
791 case Variable::LOOKUP: { 791 case Variable::LOOKUP: {
792 Comment cmnt(masm_, "[ VariableDeclaration"); 792 Comment cmnt(masm_, "[ VariableDeclaration");
793 __ push(rsi); 793 __ Push(rsi);
794 __ Push(variable->name()); 794 __ Push(variable->name());
795 // Declaration nodes are always introduced in one of four modes. 795 // Declaration nodes are always introduced in one of four modes.
796 ASSERT(IsDeclaredVariableMode(mode)); 796 ASSERT(IsDeclaredVariableMode(mode));
797 PropertyAttributes attr = 797 PropertyAttributes attr =
798 IsImmutableVariableMode(mode) ? READ_ONLY : NONE; 798 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
799 __ Push(Smi::FromInt(attr)); 799 __ Push(Smi::FromInt(attr));
800 // Push initial value, if any. 800 // Push initial value, if any.
801 // Note: For variables we must not push an initial value (such as 801 // Note: For variables we must not push an initial value (such as
802 // 'undefined') because we may have a (legal) redeclaration and we 802 // 'undefined') because we may have a (legal) redeclaration and we
803 // must not destroy the current value. 803 // must not destroy the current value.
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
849 rcx, 849 rcx,
850 kDontSaveFPRegs, 850 kDontSaveFPRegs,
851 EMIT_REMEMBERED_SET, 851 EMIT_REMEMBERED_SET,
852 OMIT_SMI_CHECK); 852 OMIT_SMI_CHECK);
853 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 853 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
854 break; 854 break;
855 } 855 }
856 856
857 case Variable::LOOKUP: { 857 case Variable::LOOKUP: {
858 Comment cmnt(masm_, "[ FunctionDeclaration"); 858 Comment cmnt(masm_, "[ FunctionDeclaration");
859 __ push(rsi); 859 __ Push(rsi);
860 __ Push(variable->name()); 860 __ Push(variable->name());
861 __ Push(Smi::FromInt(NONE)); 861 __ Push(Smi::FromInt(NONE));
862 VisitForStackValue(declaration->fun()); 862 VisitForStackValue(declaration->fun());
863 __ CallRuntime(Runtime::kDeclareContextSlot, 4); 863 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
864 break; 864 break;
865 } 865 }
866 } 866 }
867 } 867 }
868 868
869 869
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
920 } 920 }
921 921
922 922
923 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) { 923 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
924 // TODO(rossberg) 924 // TODO(rossberg)
925 } 925 }
926 926
927 927
928 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { 928 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
929 // Call the runtime to declare the globals. 929 // Call the runtime to declare the globals.
930 __ push(rsi); // The context is the first argument. 930 __ Push(rsi); // The context is the first argument.
931 __ Push(pairs); 931 __ Push(pairs);
932 __ Push(Smi::FromInt(DeclareGlobalsFlags())); 932 __ Push(Smi::FromInt(DeclareGlobalsFlags()));
933 __ CallRuntime(Runtime::kDeclareGlobals, 3); 933 __ CallRuntime(Runtime::kDeclareGlobals, 3);
934 // Return value is ignored. 934 // Return value is ignored.
935 } 935 }
936 936
937 937
938 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) { 938 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
939 // Call the runtime to declare the modules. 939 // Call the runtime to declare the modules.
940 __ Push(descriptions); 940 __ Push(descriptions);
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after
1056 __ j(equal, &exit); 1056 __ j(equal, &exit);
1057 1057
1058 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG); 1058 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1059 1059
1060 // Convert the object to a JS object. 1060 // Convert the object to a JS object.
1061 Label convert, done_convert; 1061 Label convert, done_convert;
1062 __ JumpIfSmi(rax, &convert); 1062 __ JumpIfSmi(rax, &convert);
1063 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); 1063 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
1064 __ j(above_equal, &done_convert); 1064 __ j(above_equal, &done_convert);
1065 __ bind(&convert); 1065 __ bind(&convert);
1066 __ push(rax); 1066 __ Push(rax);
1067 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1067 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1068 __ bind(&done_convert); 1068 __ bind(&done_convert);
1069 __ push(rax); 1069 __ Push(rax);
1070 1070
1071 // Check for proxies. 1071 // Check for proxies.
1072 Label call_runtime; 1072 Label call_runtime;
1073 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 1073 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1074 __ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx); 1074 __ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx);
1075 __ j(below_equal, &call_runtime); 1075 __ j(below_equal, &call_runtime);
1076 1076
1077 // Check cache validity in generated code. This is a fast case for 1077 // Check cache validity in generated code. This is a fast case for
1078 // the JSObject::IsSimpleEnum cache validity checks. If we cannot 1078 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1079 // guarantee cache validity, call the runtime system to check cache 1079 // guarantee cache validity, call the runtime system to check cache
1080 // validity or get the property names in a fixed array. 1080 // validity or get the property names in a fixed array.
1081 __ CheckEnumCache(null_value, &call_runtime); 1081 __ CheckEnumCache(null_value, &call_runtime);
1082 1082
1083 // The enum cache is valid. Load the map of the object being 1083 // The enum cache is valid. Load the map of the object being
1084 // iterated over and use the cache for the iteration. 1084 // iterated over and use the cache for the iteration.
1085 Label use_cache; 1085 Label use_cache;
1086 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset)); 1086 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
1087 __ jmp(&use_cache, Label::kNear); 1087 __ jmp(&use_cache, Label::kNear);
1088 1088
1089 // Get the set of properties to enumerate. 1089 // Get the set of properties to enumerate.
1090 __ bind(&call_runtime); 1090 __ bind(&call_runtime);
1091 __ push(rax); // Duplicate the enumerable object on the stack. 1091 __ Push(rax); // Duplicate the enumerable object on the stack.
1092 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1); 1092 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1093 1093
1094 // If we got a map from the runtime call, we can do a fast 1094 // If we got a map from the runtime call, we can do a fast
1095 // modification check. Otherwise, we got a fixed array, and we have 1095 // modification check. Otherwise, we got a fixed array, and we have
1096 // to do a slow check. 1096 // to do a slow check.
1097 Label fixed_array; 1097 Label fixed_array;
1098 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), 1098 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
1099 Heap::kMetaMapRootIndex); 1099 Heap::kMetaMapRootIndex);
1100 __ j(not_equal, &fixed_array); 1100 __ j(not_equal, &fixed_array);
1101 1101
1102 // We got a map in register rax. Get the enumeration cache from it. 1102 // We got a map in register rax. Get the enumeration cache from it.
1103 __ bind(&use_cache); 1103 __ bind(&use_cache);
1104 1104
1105 Label no_descriptors; 1105 Label no_descriptors;
1106 1106
1107 __ EnumLength(rdx, rax); 1107 __ EnumLength(rdx, rax);
1108 __ Cmp(rdx, Smi::FromInt(0)); 1108 __ Cmp(rdx, Smi::FromInt(0));
1109 __ j(equal, &no_descriptors); 1109 __ j(equal, &no_descriptors);
1110 1110
1111 __ LoadInstanceDescriptors(rax, rcx); 1111 __ LoadInstanceDescriptors(rax, rcx);
1112 __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheOffset)); 1112 __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheOffset));
1113 __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset)); 1113 __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1114 1114
1115 // Set up the four remaining stack slots. 1115 // Set up the four remaining stack slots.
1116 __ push(rax); // Map. 1116 __ Push(rax); // Map.
1117 __ push(rcx); // Enumeration cache. 1117 __ Push(rcx); // Enumeration cache.
1118 __ push(rdx); // Number of valid entries for the map in the enum cache. 1118 __ Push(rdx); // Number of valid entries for the map in the enum cache.
1119 __ Push(Smi::FromInt(0)); // Initial index. 1119 __ Push(Smi::FromInt(0)); // Initial index.
1120 __ jmp(&loop); 1120 __ jmp(&loop);
1121 1121
1122 __ bind(&no_descriptors); 1122 __ bind(&no_descriptors);
1123 __ addq(rsp, Immediate(kPointerSize)); 1123 __ addq(rsp, Immediate(kPointerSize));
1124 __ jmp(&exit); 1124 __ jmp(&exit);
1125 1125
1126 // We got a fixed array in register rax. Iterate through that. 1126 // We got a fixed array in register rax. Iterate through that.
1127 Label non_proxy; 1127 Label non_proxy;
1128 __ bind(&fixed_array); 1128 __ bind(&fixed_array);
1129 1129
1130 Handle<Object> feedback = Handle<Object>( 1130 Handle<Object> feedback = Handle<Object>(
1131 Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker), 1131 Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker),
1132 isolate()); 1132 isolate());
1133 StoreFeedbackVectorSlot(slot, feedback); 1133 StoreFeedbackVectorSlot(slot, feedback);
1134 1134
1135 // No need for a write barrier, we are storing a Smi in the feedback vector. 1135 // No need for a write barrier, we are storing a Smi in the feedback vector.
1136 __ Move(rbx, FeedbackVector()); 1136 __ Move(rbx, FeedbackVector());
1137 __ Move(FieldOperand(rbx, FixedArray::OffsetOfElementAt(slot)), 1137 __ Move(FieldOperand(rbx, FixedArray::OffsetOfElementAt(slot)),
1138 Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker)); 1138 Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker));
1139 __ Move(rbx, Smi::FromInt(1)); // Smi indicates slow check 1139 __ Move(rbx, Smi::FromInt(1)); // Smi indicates slow check
1140 __ movp(rcx, Operand(rsp, 0 * kPointerSize)); // Get enumerated object 1140 __ movp(rcx, Operand(rsp, 0 * kPointerSize)); // Get enumerated object
1141 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 1141 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1142 __ CmpObjectType(rcx, LAST_JS_PROXY_TYPE, rcx); 1142 __ CmpObjectType(rcx, LAST_JS_PROXY_TYPE, rcx);
1143 __ j(above, &non_proxy); 1143 __ j(above, &non_proxy);
1144 __ Move(rbx, Smi::FromInt(0)); // Zero indicates proxy 1144 __ Move(rbx, Smi::FromInt(0)); // Zero indicates proxy
1145 __ bind(&non_proxy); 1145 __ bind(&non_proxy);
1146 __ push(rbx); // Smi 1146 __ Push(rbx); // Smi
1147 __ push(rax); // Array 1147 __ Push(rax); // Array
1148 __ movp(rax, FieldOperand(rax, FixedArray::kLengthOffset)); 1148 __ movp(rax, FieldOperand(rax, FixedArray::kLengthOffset));
1149 __ push(rax); // Fixed array length (as smi). 1149 __ Push(rax); // Fixed array length (as smi).
1150 __ Push(Smi::FromInt(0)); // Initial index. 1150 __ Push(Smi::FromInt(0)); // Initial index.
1151 1151
1152 // Generate code for doing the condition check. 1152 // Generate code for doing the condition check.
1153 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); 1153 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1154 __ bind(&loop); 1154 __ bind(&loop);
1155 __ movp(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index. 1155 __ movp(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index.
1156 __ cmpq(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length. 1156 __ cmpq(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length.
1157 __ j(above_equal, loop_statement.break_label()); 1157 __ j(above_equal, loop_statement.break_label());
1158 1158
1159 // Get the current entry of the array into register rbx. 1159 // Get the current entry of the array into register rbx.
(...skipping 16 matching lines...) Expand all
1176 __ j(equal, &update_each, Label::kNear); 1176 __ j(equal, &update_each, Label::kNear);
1177 1177
1178 // For proxies, no filtering is done. 1178 // For proxies, no filtering is done.
1179 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. 1179 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1180 __ Cmp(rdx, Smi::FromInt(0)); 1180 __ Cmp(rdx, Smi::FromInt(0));
1181 __ j(equal, &update_each, Label::kNear); 1181 __ j(equal, &update_each, Label::kNear);
1182 1182
1183 // Convert the entry to a string or null if it isn't a property 1183 // Convert the entry to a string or null if it isn't a property
1184 // anymore. If the property has been removed while iterating, we 1184 // anymore. If the property has been removed while iterating, we
1185 // just skip it. 1185 // just skip it.
1186 __ push(rcx); // Enumerable. 1186 __ Push(rcx); // Enumerable.
1187 __ push(rbx); // Current entry. 1187 __ Push(rbx); // Current entry.
1188 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); 1188 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1189 __ Cmp(rax, Smi::FromInt(0)); 1189 __ Cmp(rax, Smi::FromInt(0));
1190 __ j(equal, loop_statement.continue_label()); 1190 __ j(equal, loop_statement.continue_label());
1191 __ movp(rbx, rax); 1191 __ movp(rbx, rax);
1192 1192
1193 // Update the 'each' property or variable from the possibly filtered 1193 // Update the 'each' property or variable from the possibly filtered
1194 // entry in register rbx. 1194 // entry in register rbx.
1195 __ bind(&update_each); 1195 __ bind(&update_each);
1196 __ movp(result_register(), rbx); 1196 __ movp(result_register(), rbx);
1197 // Perform the assignment as if via '='. 1197 // Perform the assignment as if via '='.
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
1236 __ j(equal, loop_statement.break_label()); 1236 __ j(equal, loop_statement.break_label());
1237 __ CompareRoot(rax, Heap::kNullValueRootIndex); 1237 __ CompareRoot(rax, Heap::kNullValueRootIndex);
1238 __ j(equal, loop_statement.break_label()); 1238 __ j(equal, loop_statement.break_label());
1239 1239
1240 // Convert the iterator to a JS object. 1240 // Convert the iterator to a JS object.
1241 Label convert, done_convert; 1241 Label convert, done_convert;
1242 __ JumpIfSmi(rax, &convert); 1242 __ JumpIfSmi(rax, &convert);
1243 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); 1243 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
1244 __ j(above_equal, &done_convert); 1244 __ j(above_equal, &done_convert);
1245 __ bind(&convert); 1245 __ bind(&convert);
1246 __ push(rax); 1246 __ Push(rax);
1247 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1247 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1248 __ bind(&done_convert); 1248 __ bind(&done_convert);
1249 1249
1250 // Loop entry. 1250 // Loop entry.
1251 __ bind(loop_statement.continue_label()); 1251 __ bind(loop_statement.continue_label());
1252 1252
1253 // result = iterator.next() 1253 // result = iterator.next()
1254 VisitForEffect(stmt->next_result()); 1254 VisitForEffect(stmt->next_result());
1255 1255
1256 // if (result.done) break; 1256 // if (result.done) break;
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
1289 // doesn't just get a copy of the existing unoptimized code. 1289 // doesn't just get a copy of the existing unoptimized code.
1290 if (!FLAG_always_opt && 1290 if (!FLAG_always_opt &&
1291 !FLAG_prepare_always_opt && 1291 !FLAG_prepare_always_opt &&
1292 !pretenure && 1292 !pretenure &&
1293 scope()->is_function_scope() && 1293 scope()->is_function_scope() &&
1294 info->num_literals() == 0) { 1294 info->num_literals() == 0) {
1295 FastNewClosureStub stub(info->strict_mode(), info->is_generator()); 1295 FastNewClosureStub stub(info->strict_mode(), info->is_generator());
1296 __ Move(rbx, info); 1296 __ Move(rbx, info);
1297 __ CallStub(&stub); 1297 __ CallStub(&stub);
1298 } else { 1298 } else {
1299 __ push(rsi); 1299 __ Push(rsi);
1300 __ Push(info); 1300 __ Push(info);
1301 __ Push(pretenure 1301 __ Push(pretenure
1302 ? isolate()->factory()->true_value() 1302 ? isolate()->factory()->true_value()
1303 : isolate()->factory()->false_value()); 1303 : isolate()->factory()->false_value());
1304 __ CallRuntime(Runtime::kNewClosure, 3); 1304 __ CallRuntime(Runtime::kNewClosure, 3);
1305 } 1305 }
1306 context()->Plug(rax); 1306 context()->Plug(rax);
1307 } 1307 }
1308 1308
1309 1309
(...skipping 208 matching lines...) Expand 10 before | Expand all | Expand 10 after
1518 break; 1518 break;
1519 } 1519 }
1520 1520
1521 case Variable::LOOKUP: { 1521 case Variable::LOOKUP: {
1522 Comment cmnt(masm_, "[ Lookup slot"); 1522 Comment cmnt(masm_, "[ Lookup slot");
1523 Label done, slow; 1523 Label done, slow;
1524 // Generate code for loading from variables potentially shadowed 1524 // Generate code for loading from variables potentially shadowed
1525 // by eval-introduced variables. 1525 // by eval-introduced variables.
1526 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done); 1526 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1527 __ bind(&slow); 1527 __ bind(&slow);
1528 __ push(rsi); // Context. 1528 __ Push(rsi); // Context.
1529 __ Push(var->name()); 1529 __ Push(var->name());
1530 __ CallRuntime(Runtime::kLoadContextSlot, 2); 1530 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1531 __ bind(&done); 1531 __ bind(&done);
1532 context()->Plug(rax); 1532 context()->Plug(rax);
1533 break; 1533 break;
1534 } 1534 }
1535 } 1535 }
1536 } 1536 }
1537 1537
1538 1538
1539 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { 1539 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1540 Comment cmnt(masm_, "[ RegExpLiteral"); 1540 Comment cmnt(masm_, "[ RegExpLiteral");
1541 Label materialized; 1541 Label materialized;
1542 // Registers will be used as follows: 1542 // Registers will be used as follows:
1543 // rdi = JS function. 1543 // rdi = JS function.
1544 // rcx = literals array. 1544 // rcx = literals array.
1545 // rbx = regexp literal. 1545 // rbx = regexp literal.
1546 // rax = regexp literal clone. 1546 // rax = regexp literal clone.
1547 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 1547 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1548 __ movp(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset)); 1548 __ movp(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
1549 int literal_offset = 1549 int literal_offset =
1550 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; 1550 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1551 __ movp(rbx, FieldOperand(rcx, literal_offset)); 1551 __ movp(rbx, FieldOperand(rcx, literal_offset));
1552 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); 1552 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
1553 __ j(not_equal, &materialized, Label::kNear); 1553 __ j(not_equal, &materialized, Label::kNear);
1554 1554
1555 // Create regexp literal using runtime function 1555 // Create regexp literal using runtime function
1556 // Result will be in rax. 1556 // Result will be in rax.
1557 __ push(rcx); 1557 __ Push(rcx);
1558 __ Push(Smi::FromInt(expr->literal_index())); 1558 __ Push(Smi::FromInt(expr->literal_index()));
1559 __ Push(expr->pattern()); 1559 __ Push(expr->pattern());
1560 __ Push(expr->flags()); 1560 __ Push(expr->flags());
1561 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); 1561 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1562 __ movp(rbx, rax); 1562 __ movp(rbx, rax);
1563 1563
1564 __ bind(&materialized); 1564 __ bind(&materialized);
1565 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; 1565 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1566 Label allocated, runtime_allocate; 1566 Label allocated, runtime_allocate;
1567 __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT); 1567 __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
1568 __ jmp(&allocated); 1568 __ jmp(&allocated);
1569 1569
1570 __ bind(&runtime_allocate); 1570 __ bind(&runtime_allocate);
1571 __ push(rbx); 1571 __ Push(rbx);
1572 __ Push(Smi::FromInt(size)); 1572 __ Push(Smi::FromInt(size));
1573 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); 1573 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1574 __ pop(rbx); 1574 __ Pop(rbx);
1575 1575
1576 __ bind(&allocated); 1576 __ bind(&allocated);
1577 // Copy the content into the newly allocated memory. 1577 // Copy the content into the newly allocated memory.
1578 // (Unroll copy loop once for better throughput). 1578 // (Unroll copy loop once for better throughput).
1579 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) { 1579 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1580 __ movp(rdx, FieldOperand(rbx, i)); 1580 __ movp(rdx, FieldOperand(rbx, i));
1581 __ movp(rcx, FieldOperand(rbx, i + kPointerSize)); 1581 __ movp(rcx, FieldOperand(rbx, i + kPointerSize));
1582 __ movp(FieldOperand(rax, i), rdx); 1582 __ movp(FieldOperand(rax, i), rdx);
1583 __ movp(FieldOperand(rax, i + kPointerSize), rcx); 1583 __ movp(FieldOperand(rax, i + kPointerSize), rcx);
1584 } 1584 }
(...skipping 23 matching lines...) Expand all
1608 ? ObjectLiteral::kFastElements 1608 ? ObjectLiteral::kFastElements
1609 : ObjectLiteral::kNoFlags; 1609 : ObjectLiteral::kNoFlags;
1610 flags |= expr->has_function() 1610 flags |= expr->has_function()
1611 ? ObjectLiteral::kHasFunction 1611 ? ObjectLiteral::kHasFunction
1612 : ObjectLiteral::kNoFlags; 1612 : ObjectLiteral::kNoFlags;
1613 int properties_count = constant_properties->length() / 2; 1613 int properties_count = constant_properties->length() / 2;
1614 if (expr->may_store_doubles() || expr->depth() > 1 || Serializer::enabled() || 1614 if (expr->may_store_doubles() || expr->depth() > 1 || Serializer::enabled() ||
1615 flags != ObjectLiteral::kFastElements || 1615 flags != ObjectLiteral::kFastElements ||
1616 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { 1616 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1617 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 1617 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1618 __ push(FieldOperand(rdi, JSFunction::kLiteralsOffset)); 1618 __ Push(FieldOperand(rdi, JSFunction::kLiteralsOffset));
1619 __ Push(Smi::FromInt(expr->literal_index())); 1619 __ Push(Smi::FromInt(expr->literal_index()));
1620 __ Push(constant_properties); 1620 __ Push(constant_properties);
1621 __ Push(Smi::FromInt(flags)); 1621 __ Push(Smi::FromInt(flags));
1622 __ CallRuntime(Runtime::kCreateObjectLiteral, 4); 1622 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1623 } else { 1623 } else {
1624 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 1624 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1625 __ movp(rax, FieldOperand(rdi, JSFunction::kLiteralsOffset)); 1625 __ movp(rax, FieldOperand(rdi, JSFunction::kLiteralsOffset));
1626 __ Move(rbx, Smi::FromInt(expr->literal_index())); 1626 __ Move(rbx, Smi::FromInt(expr->literal_index()));
1627 __ Move(rcx, constant_properties); 1627 __ Move(rcx, constant_properties);
1628 __ Move(rdx, Smi::FromInt(flags)); 1628 __ Move(rdx, Smi::FromInt(flags));
(...skipping 11 matching lines...) Expand all
1640 expr->CalculateEmitStore(zone()); 1640 expr->CalculateEmitStore(zone());
1641 1641
1642 AccessorTable accessor_table(zone()); 1642 AccessorTable accessor_table(zone());
1643 for (int i = 0; i < expr->properties()->length(); i++) { 1643 for (int i = 0; i < expr->properties()->length(); i++) {
1644 ObjectLiteral::Property* property = expr->properties()->at(i); 1644 ObjectLiteral::Property* property = expr->properties()->at(i);
1645 if (property->IsCompileTimeValue()) continue; 1645 if (property->IsCompileTimeValue()) continue;
1646 1646
1647 Literal* key = property->key(); 1647 Literal* key = property->key();
1648 Expression* value = property->value(); 1648 Expression* value = property->value();
1649 if (!result_saved) { 1649 if (!result_saved) {
1650 __ push(rax); // Save result on the stack 1650 __ Push(rax); // Save result on the stack
1651 result_saved = true; 1651 result_saved = true;
1652 } 1652 }
1653 switch (property->kind()) { 1653 switch (property->kind()) {
1654 case ObjectLiteral::Property::CONSTANT: 1654 case ObjectLiteral::Property::CONSTANT:
1655 UNREACHABLE(); 1655 UNREACHABLE();
1656 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1656 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1657 ASSERT(!CompileTimeValue::IsCompileTimeValue(value)); 1657 ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
1658 // Fall through. 1658 // Fall through.
1659 case ObjectLiteral::Property::COMPUTED: 1659 case ObjectLiteral::Property::COMPUTED:
1660 if (key->value()->IsInternalizedString()) { 1660 if (key->value()->IsInternalizedString()) {
1661 if (property->emit_store()) { 1661 if (property->emit_store()) {
1662 VisitForAccumulatorValue(value); 1662 VisitForAccumulatorValue(value);
1663 __ Move(rcx, key->value()); 1663 __ Move(rcx, key->value());
1664 __ movp(rdx, Operand(rsp, 0)); 1664 __ movp(rdx, Operand(rsp, 0));
1665 CallStoreIC(key->LiteralFeedbackId()); 1665 CallStoreIC(key->LiteralFeedbackId());
1666 PrepareForBailoutForId(key->id(), NO_REGISTERS); 1666 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1667 } else { 1667 } else {
1668 VisitForEffect(value); 1668 VisitForEffect(value);
1669 } 1669 }
1670 break; 1670 break;
1671 } 1671 }
1672 __ push(Operand(rsp, 0)); // Duplicate receiver. 1672 __ Push(Operand(rsp, 0)); // Duplicate receiver.
1673 VisitForStackValue(key); 1673 VisitForStackValue(key);
1674 VisitForStackValue(value); 1674 VisitForStackValue(value);
1675 if (property->emit_store()) { 1675 if (property->emit_store()) {
1676 __ Push(Smi::FromInt(NONE)); // PropertyAttributes 1676 __ Push(Smi::FromInt(NONE)); // PropertyAttributes
1677 __ CallRuntime(Runtime::kSetProperty, 4); 1677 __ CallRuntime(Runtime::kSetProperty, 4);
1678 } else { 1678 } else {
1679 __ Drop(3); 1679 __ Drop(3);
1680 } 1680 }
1681 break; 1681 break;
1682 case ObjectLiteral::Property::PROTOTYPE: 1682 case ObjectLiteral::Property::PROTOTYPE:
1683 __ push(Operand(rsp, 0)); // Duplicate receiver. 1683 __ Push(Operand(rsp, 0)); // Duplicate receiver.
1684 VisitForStackValue(value); 1684 VisitForStackValue(value);
1685 if (property->emit_store()) { 1685 if (property->emit_store()) {
1686 __ CallRuntime(Runtime::kSetPrototype, 2); 1686 __ CallRuntime(Runtime::kSetPrototype, 2);
1687 } else { 1687 } else {
1688 __ Drop(2); 1688 __ Drop(2);
1689 } 1689 }
1690 break; 1690 break;
1691 case ObjectLiteral::Property::GETTER: 1691 case ObjectLiteral::Property::GETTER:
1692 accessor_table.lookup(key)->second->getter = value; 1692 accessor_table.lookup(key)->second->getter = value;
1693 break; 1693 break;
1694 case ObjectLiteral::Property::SETTER: 1694 case ObjectLiteral::Property::SETTER:
1695 accessor_table.lookup(key)->second->setter = value; 1695 accessor_table.lookup(key)->second->setter = value;
1696 break; 1696 break;
1697 } 1697 }
1698 } 1698 }
1699 1699
1700 // Emit code to define accessors, using only a single call to the runtime for 1700 // Emit code to define accessors, using only a single call to the runtime for
1701 // each pair of corresponding getters and setters. 1701 // each pair of corresponding getters and setters.
1702 for (AccessorTable::Iterator it = accessor_table.begin(); 1702 for (AccessorTable::Iterator it = accessor_table.begin();
1703 it != accessor_table.end(); 1703 it != accessor_table.end();
1704 ++it) { 1704 ++it) {
1705 __ push(Operand(rsp, 0)); // Duplicate receiver. 1705 __ Push(Operand(rsp, 0)); // Duplicate receiver.
1706 VisitForStackValue(it->first); 1706 VisitForStackValue(it->first);
1707 EmitAccessor(it->second->getter); 1707 EmitAccessor(it->second->getter);
1708 EmitAccessor(it->second->setter); 1708 EmitAccessor(it->second->setter);
1709 __ Push(Smi::FromInt(NONE)); 1709 __ Push(Smi::FromInt(NONE));
1710 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5); 1710 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1711 } 1711 }
1712 1712
1713 if (expr->has_function()) { 1713 if (expr->has_function()) {
1714 ASSERT(result_saved); 1714 ASSERT(result_saved);
1715 __ push(Operand(rsp, 0)); 1715 __ Push(Operand(rsp, 0));
1716 __ CallRuntime(Runtime::kToFastProperties, 1); 1716 __ CallRuntime(Runtime::kToFastProperties, 1);
1717 } 1717 }
1718 1718
1719 if (result_saved) { 1719 if (result_saved) {
1720 context()->PlugTOS(); 1720 context()->PlugTOS();
1721 } else { 1721 } else {
1722 context()->Plug(rax); 1722 context()->Plug(rax);
1723 } 1723 }
1724 } 1724 }
1725 1725
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
1761 __ Move(rbx, Smi::FromInt(expr->literal_index())); 1761 __ Move(rbx, Smi::FromInt(expr->literal_index()));
1762 __ Move(rcx, constant_elements); 1762 __ Move(rcx, constant_elements);
1763 FastCloneShallowArrayStub stub( 1763 FastCloneShallowArrayStub stub(
1764 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, 1764 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
1765 allocation_site_mode, 1765 allocation_site_mode,
1766 length); 1766 length);
1767 __ CallStub(&stub); 1767 __ CallStub(&stub);
1768 } else if (expr->depth() > 1 || Serializer::enabled() || 1768 } else if (expr->depth() > 1 || Serializer::enabled() ||
1769 length > FastCloneShallowArrayStub::kMaximumClonedLength) { 1769 length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1770 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 1770 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1771 __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset)); 1771 __ Push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
1772 __ Push(Smi::FromInt(expr->literal_index())); 1772 __ Push(Smi::FromInt(expr->literal_index()));
1773 __ Push(constant_elements); 1773 __ Push(constant_elements);
1774 __ Push(Smi::FromInt(flags)); 1774 __ Push(Smi::FromInt(flags));
1775 __ CallRuntime(Runtime::kCreateArrayLiteral, 4); 1775 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1776 } else { 1776 } else {
1777 ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) || 1777 ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1778 FLAG_smi_only_arrays); 1778 FLAG_smi_only_arrays);
1779 FastCloneShallowArrayStub::Mode mode = 1779 FastCloneShallowArrayStub::Mode mode =
1780 FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS; 1780 FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
1781 1781
(...skipping 15 matching lines...) Expand all
1797 1797
1798 // Emit code to evaluate all the non-constant subexpressions and to store 1798 // Emit code to evaluate all the non-constant subexpressions and to store
1799 // them into the newly cloned array. 1799 // them into the newly cloned array.
1800 for (int i = 0; i < length; i++) { 1800 for (int i = 0; i < length; i++) {
1801 Expression* subexpr = subexprs->at(i); 1801 Expression* subexpr = subexprs->at(i);
1802 // If the subexpression is a literal or a simple materialized literal it 1802 // If the subexpression is a literal or a simple materialized literal it
1803 // is already set in the cloned array. 1803 // is already set in the cloned array.
1804 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; 1804 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1805 1805
1806 if (!result_saved) { 1806 if (!result_saved) {
1807 __ push(rax); // array literal 1807 __ Push(rax); // array literal
1808 __ Push(Smi::FromInt(expr->literal_index())); 1808 __ Push(Smi::FromInt(expr->literal_index()));
1809 result_saved = true; 1809 result_saved = true;
1810 } 1810 }
1811 VisitForAccumulatorValue(subexpr); 1811 VisitForAccumulatorValue(subexpr);
1812 1812
1813 if (IsFastObjectElementsKind(constant_elements_kind)) { 1813 if (IsFastObjectElementsKind(constant_elements_kind)) {
1814 // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they 1814 // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1815 // cannot transition and don't need to call the runtime stub. 1815 // cannot transition and don't need to call the runtime stub.
1816 int offset = FixedArray::kHeaderSize + (i * kPointerSize); 1816 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1817 __ movp(rbx, Operand(rsp, kPointerSize)); // Copy of array literal. 1817 __ movp(rbx, Operand(rsp, kPointerSize)); // Copy of array literal.
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
1860 1860
1861 // Evaluate LHS expression. 1861 // Evaluate LHS expression.
1862 switch (assign_type) { 1862 switch (assign_type) {
1863 case VARIABLE: 1863 case VARIABLE:
1864 // Nothing to do here. 1864 // Nothing to do here.
1865 break; 1865 break;
1866 case NAMED_PROPERTY: 1866 case NAMED_PROPERTY:
1867 if (expr->is_compound()) { 1867 if (expr->is_compound()) {
1868 // We need the receiver both on the stack and in the accumulator. 1868 // We need the receiver both on the stack and in the accumulator.
1869 VisitForAccumulatorValue(property->obj()); 1869 VisitForAccumulatorValue(property->obj());
1870 __ push(result_register()); 1870 __ Push(result_register());
1871 } else { 1871 } else {
1872 VisitForStackValue(property->obj()); 1872 VisitForStackValue(property->obj());
1873 } 1873 }
1874 break; 1874 break;
1875 case KEYED_PROPERTY: { 1875 case KEYED_PROPERTY: {
1876 if (expr->is_compound()) { 1876 if (expr->is_compound()) {
1877 VisitForStackValue(property->obj()); 1877 VisitForStackValue(property->obj());
1878 VisitForAccumulatorValue(property->key()); 1878 VisitForAccumulatorValue(property->key());
1879 __ movp(rdx, Operand(rsp, 0)); 1879 __ movp(rdx, Operand(rsp, 0));
1880 __ push(rax); 1880 __ Push(rax);
1881 } else { 1881 } else {
1882 VisitForStackValue(property->obj()); 1882 VisitForStackValue(property->obj());
1883 VisitForStackValue(property->key()); 1883 VisitForStackValue(property->key());
1884 } 1884 }
1885 break; 1885 break;
1886 } 1886 }
1887 } 1887 }
1888 1888
1889 // For compound assignments we need another deoptimization point after the 1889 // For compound assignments we need another deoptimization point after the
1890 // variable/property load. 1890 // variable/property load.
1891 if (expr->is_compound()) { 1891 if (expr->is_compound()) {
1892 { AccumulatorValueContext context(this); 1892 { AccumulatorValueContext context(this);
1893 switch (assign_type) { 1893 switch (assign_type) {
1894 case VARIABLE: 1894 case VARIABLE:
1895 EmitVariableLoad(expr->target()->AsVariableProxy()); 1895 EmitVariableLoad(expr->target()->AsVariableProxy());
1896 PrepareForBailout(expr->target(), TOS_REG); 1896 PrepareForBailout(expr->target(), TOS_REG);
1897 break; 1897 break;
1898 case NAMED_PROPERTY: 1898 case NAMED_PROPERTY:
1899 EmitNamedPropertyLoad(property); 1899 EmitNamedPropertyLoad(property);
1900 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1900 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1901 break; 1901 break;
1902 case KEYED_PROPERTY: 1902 case KEYED_PROPERTY:
1903 EmitKeyedPropertyLoad(property); 1903 EmitKeyedPropertyLoad(property);
1904 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1904 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1905 break; 1905 break;
1906 } 1906 }
1907 } 1907 }
1908 1908
1909 Token::Value op = expr->binary_op(); 1909 Token::Value op = expr->binary_op();
1910 __ push(rax); // Left operand goes on the stack. 1910 __ Push(rax); // Left operand goes on the stack.
1911 VisitForAccumulatorValue(expr->value()); 1911 VisitForAccumulatorValue(expr->value());
1912 1912
1913 OverwriteMode mode = expr->value()->ResultOverwriteAllowed() 1913 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1914 ? OVERWRITE_RIGHT 1914 ? OVERWRITE_RIGHT
1915 : NO_OVERWRITE; 1915 : NO_OVERWRITE;
1916 SetSourcePosition(expr->position() + 1); 1916 SetSourcePosition(expr->position() + 1);
1917 AccumulatorValueContext context(this); 1917 AccumulatorValueContext context(this);
1918 if (ShouldInlineSmiCase(op)) { 1918 if (ShouldInlineSmiCase(op)) {
1919 EmitInlineSmiBinaryOp(expr->binary_operation(), 1919 EmitInlineSmiBinaryOp(expr->binary_operation(),
1920 op, 1920 op,
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
1954 void FullCodeGenerator::VisitYield(Yield* expr) { 1954 void FullCodeGenerator::VisitYield(Yield* expr) {
1955 Comment cmnt(masm_, "[ Yield"); 1955 Comment cmnt(masm_, "[ Yield");
1956 // Evaluate yielded value first; the initial iterator definition depends on 1956 // Evaluate yielded value first; the initial iterator definition depends on
1957 // this. It stays on the stack while we update the iterator. 1957 // this. It stays on the stack while we update the iterator.
1958 VisitForStackValue(expr->expression()); 1958 VisitForStackValue(expr->expression());
1959 1959
1960 switch (expr->yield_kind()) { 1960 switch (expr->yield_kind()) {
1961 case Yield::SUSPEND: 1961 case Yield::SUSPEND:
1962 // Pop value from top-of-stack slot; box result into result register. 1962 // Pop value from top-of-stack slot; box result into result register.
1963 EmitCreateIteratorResult(false); 1963 EmitCreateIteratorResult(false);
1964 __ push(result_register()); 1964 __ Push(result_register());
1965 // Fall through. 1965 // Fall through.
1966 case Yield::INITIAL: { 1966 case Yield::INITIAL: {
1967 Label suspend, continuation, post_runtime, resume; 1967 Label suspend, continuation, post_runtime, resume;
1968 1968
1969 __ jmp(&suspend); 1969 __ jmp(&suspend);
1970 1970
1971 __ bind(&continuation); 1971 __ bind(&continuation);
1972 __ jmp(&resume); 1972 __ jmp(&resume);
1973 1973
1974 __ bind(&suspend); 1974 __ bind(&suspend);
1975 VisitForAccumulatorValue(expr->generator_object()); 1975 VisitForAccumulatorValue(expr->generator_object());
1976 ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos())); 1976 ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1977 __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset), 1977 __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset),
1978 Smi::FromInt(continuation.pos())); 1978 Smi::FromInt(continuation.pos()));
1979 __ movp(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi); 1979 __ movp(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi);
1980 __ movp(rcx, rsi); 1980 __ movp(rcx, rsi);
1981 __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx, 1981 __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
1982 kDontSaveFPRegs); 1982 kDontSaveFPRegs);
1983 __ lea(rbx, Operand(rbp, StandardFrameConstants::kExpressionsOffset)); 1983 __ lea(rbx, Operand(rbp, StandardFrameConstants::kExpressionsOffset));
1984 __ cmpq(rsp, rbx); 1984 __ cmpq(rsp, rbx);
1985 __ j(equal, &post_runtime); 1985 __ j(equal, &post_runtime);
1986 __ push(rax); // generator object 1986 __ Push(rax); // generator object
1987 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); 1987 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1988 __ movp(context_register(), 1988 __ movp(context_register(),
1989 Operand(rbp, StandardFrameConstants::kContextOffset)); 1989 Operand(rbp, StandardFrameConstants::kContextOffset));
1990 __ bind(&post_runtime); 1990 __ bind(&post_runtime);
1991 1991
1992 __ pop(result_register()); 1992 __ Pop(result_register());
1993 EmitReturnSequence(); 1993 EmitReturnSequence();
1994 1994
1995 __ bind(&resume); 1995 __ bind(&resume);
1996 context()->Plug(result_register()); 1996 context()->Plug(result_register());
1997 break; 1997 break;
1998 } 1998 }
1999 1999
2000 case Yield::FINAL: { 2000 case Yield::FINAL: {
2001 VisitForAccumulatorValue(expr->generator_object()); 2001 VisitForAccumulatorValue(expr->generator_object());
2002 __ Move(FieldOperand(result_register(), 2002 __ Move(FieldOperand(result_register(),
(...skipping 16 matching lines...) Expand all
2019 Label l_catch, l_try, l_suspend, l_continuation, l_resume; 2019 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2020 Label l_next, l_call, l_loop; 2020 Label l_next, l_call, l_loop;
2021 // Initial send value is undefined. 2021 // Initial send value is undefined.
2022 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); 2022 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
2023 __ jmp(&l_next); 2023 __ jmp(&l_next);
2024 2024
2025 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; } 2025 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2026 __ bind(&l_catch); 2026 __ bind(&l_catch);
2027 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos())); 2027 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2028 __ LoadRoot(rcx, Heap::kthrow_stringRootIndex); // "throw" 2028 __ LoadRoot(rcx, Heap::kthrow_stringRootIndex); // "throw"
2029 __ push(rcx); 2029 __ Push(rcx);
2030 __ push(Operand(rsp, 2 * kPointerSize)); // iter 2030 __ Push(Operand(rsp, 2 * kPointerSize)); // iter
2031 __ push(rax); // exception 2031 __ Push(rax); // exception
2032 __ jmp(&l_call); 2032 __ jmp(&l_call);
2033 2033
2034 // try { received = %yield result } 2034 // try { received = %yield result }
2035 // Shuffle the received result above a try handler and yield it without 2035 // Shuffle the received result above a try handler and yield it without
2036 // re-boxing. 2036 // re-boxing.
2037 __ bind(&l_try); 2037 __ bind(&l_try);
2038 __ pop(rax); // result 2038 __ Pop(rax); // result
2039 __ PushTryHandler(StackHandler::CATCH, expr->index()); 2039 __ PushTryHandler(StackHandler::CATCH, expr->index());
2040 const int handler_size = StackHandlerConstants::kSize; 2040 const int handler_size = StackHandlerConstants::kSize;
2041 __ push(rax); // result 2041 __ Push(rax); // result
2042 __ jmp(&l_suspend); 2042 __ jmp(&l_suspend);
2043 __ bind(&l_continuation); 2043 __ bind(&l_continuation);
2044 __ jmp(&l_resume); 2044 __ jmp(&l_resume);
2045 __ bind(&l_suspend); 2045 __ bind(&l_suspend);
2046 const int generator_object_depth = kPointerSize + handler_size; 2046 const int generator_object_depth = kPointerSize + handler_size;
2047 __ movp(rax, Operand(rsp, generator_object_depth)); 2047 __ movp(rax, Operand(rsp, generator_object_depth));
2048 __ push(rax); // g 2048 __ Push(rax); // g
2049 ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos())); 2049 ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2050 __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset), 2050 __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset),
2051 Smi::FromInt(l_continuation.pos())); 2051 Smi::FromInt(l_continuation.pos()));
2052 __ movp(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi); 2052 __ movp(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi);
2053 __ movp(rcx, rsi); 2053 __ movp(rcx, rsi);
2054 __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx, 2054 __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
2055 kDontSaveFPRegs); 2055 kDontSaveFPRegs);
2056 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); 2056 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2057 __ movp(context_register(), 2057 __ movp(context_register(),
2058 Operand(rbp, StandardFrameConstants::kContextOffset)); 2058 Operand(rbp, StandardFrameConstants::kContextOffset));
2059 __ pop(rax); // result 2059 __ Pop(rax); // result
2060 EmitReturnSequence(); 2060 EmitReturnSequence();
2061 __ bind(&l_resume); // received in rax 2061 __ bind(&l_resume); // received in rax
2062 __ PopTryHandler(); 2062 __ PopTryHandler();
2063 2063
2064 // receiver = iter; f = 'next'; arg = received; 2064 // receiver = iter; f = 'next'; arg = received;
2065 __ bind(&l_next); 2065 __ bind(&l_next);
2066 __ LoadRoot(rcx, Heap::knext_stringRootIndex); // "next" 2066 __ LoadRoot(rcx, Heap::knext_stringRootIndex); // "next"
2067 __ push(rcx); 2067 __ Push(rcx);
2068 __ push(Operand(rsp, 2 * kPointerSize)); // iter 2068 __ Push(Operand(rsp, 2 * kPointerSize)); // iter
2069 __ push(rax); // received 2069 __ Push(rax); // received
2070 2070
2071 // result = receiver[f](arg); 2071 // result = receiver[f](arg);
2072 __ bind(&l_call); 2072 __ bind(&l_call);
2073 __ movp(rdx, Operand(rsp, kPointerSize)); 2073 __ movp(rdx, Operand(rsp, kPointerSize));
2074 __ movp(rax, Operand(rsp, 2 * kPointerSize)); 2074 __ movp(rax, Operand(rsp, 2 * kPointerSize));
2075 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); 2075 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2076 CallIC(ic, TypeFeedbackId::None()); 2076 CallIC(ic, TypeFeedbackId::None());
2077 __ movp(rdi, rax); 2077 __ movp(rdi, rax);
2078 __ movp(Operand(rsp, 2 * kPointerSize), rdi); 2078 __ movp(Operand(rsp, 2 * kPointerSize), rdi);
2079 CallFunctionStub stub(1, CALL_AS_METHOD); 2079 CallFunctionStub stub(1, CALL_AS_METHOD);
2080 __ CallStub(&stub); 2080 __ CallStub(&stub);
2081 2081
2082 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 2082 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2083 __ Drop(1); // The function is still on the stack; drop it. 2083 __ Drop(1); // The function is still on the stack; drop it.
2084 2084
2085 // if (!result.done) goto l_try; 2085 // if (!result.done) goto l_try;
2086 __ bind(&l_loop); 2086 __ bind(&l_loop);
2087 __ push(rax); // save result 2087 __ Push(rax); // save result
2088 __ LoadRoot(rcx, Heap::kdone_stringRootIndex); // "done" 2088 __ LoadRoot(rcx, Heap::kdone_stringRootIndex); // "done"
2089 CallLoadIC(NOT_CONTEXTUAL); // result.done in rax 2089 CallLoadIC(NOT_CONTEXTUAL); // result.done in rax
2090 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate()); 2090 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2091 CallIC(bool_ic); 2091 CallIC(bool_ic);
2092 __ testq(result_register(), result_register()); 2092 __ testq(result_register(), result_register());
2093 __ j(zero, &l_try); 2093 __ j(zero, &l_try);
2094 2094
2095 // result.value 2095 // result.value
2096 __ pop(rax); // result 2096 __ Pop(rax); // result
2097 __ LoadRoot(rcx, Heap::kvalue_stringRootIndex); // "value" 2097 __ LoadRoot(rcx, Heap::kvalue_stringRootIndex); // "value"
2098 CallLoadIC(NOT_CONTEXTUAL); // result.value in rax 2098 CallLoadIC(NOT_CONTEXTUAL); // result.value in rax
2099 context()->DropAndPlug(2, rax); // drop iter and g 2099 context()->DropAndPlug(2, rax); // drop iter and g
2100 break; 2100 break;
2101 } 2101 }
2102 } 2102 }
2103 } 2103 }
2104 2104
2105 2105
2106 void FullCodeGenerator::EmitGeneratorResume(Expression *generator, 2106 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2107 Expression *value, 2107 Expression *value,
2108 JSGeneratorObject::ResumeMode resume_mode) { 2108 JSGeneratorObject::ResumeMode resume_mode) {
2109 // The value stays in rax, and is ultimately read by the resumed generator, as 2109 // The value stays in rax, and is ultimately read by the resumed generator, as
2110 // if the CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it 2110 // if the CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2111 // is read to throw the value when the resumed generator is already closed. 2111 // is read to throw the value when the resumed generator is already closed.
2112 // rbx will hold the generator object until the activation has been resumed. 2112 // rbx will hold the generator object until the activation has been resumed.
2113 VisitForStackValue(generator); 2113 VisitForStackValue(generator);
2114 VisitForAccumulatorValue(value); 2114 VisitForAccumulatorValue(value);
2115 __ pop(rbx); 2115 __ Pop(rbx);
2116 2116
2117 // Check generator state. 2117 // Check generator state.
2118 Label wrong_state, closed_state, done; 2118 Label wrong_state, closed_state, done;
2119 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0); 2119 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
2120 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0); 2120 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
2121 __ SmiCompare(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset), 2121 __ SmiCompare(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset),
2122 Smi::FromInt(0)); 2122 Smi::FromInt(0));
2123 __ j(equal, &closed_state); 2123 __ j(equal, &closed_state);
2124 __ j(less, &wrong_state); 2124 __ j(less, &wrong_state);
2125 2125
2126 // Load suspended function and context. 2126 // Load suspended function and context.
2127 __ movp(rsi, FieldOperand(rbx, JSGeneratorObject::kContextOffset)); 2127 __ movp(rsi, FieldOperand(rbx, JSGeneratorObject::kContextOffset));
2128 __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset)); 2128 __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
2129 2129
2130 // Push receiver. 2130 // Push receiver.
2131 __ push(FieldOperand(rbx, JSGeneratorObject::kReceiverOffset)); 2131 __ Push(FieldOperand(rbx, JSGeneratorObject::kReceiverOffset));
2132 2132
2133 // Push holes for arguments to generator function. 2133 // Push holes for arguments to generator function.
2134 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 2134 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2135 __ movsxlq(rdx, 2135 __ movsxlq(rdx,
2136 FieldOperand(rdx, 2136 FieldOperand(rdx,
2137 SharedFunctionInfo::kFormalParameterCountOffset)); 2137 SharedFunctionInfo::kFormalParameterCountOffset));
2138 __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex); 2138 __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex);
2139 Label push_argument_holes, push_frame; 2139 Label push_argument_holes, push_frame;
2140 __ bind(&push_argument_holes); 2140 __ bind(&push_argument_holes);
2141 __ subq(rdx, Immediate(1)); 2141 __ subq(rdx, Immediate(1));
2142 __ j(carry, &push_frame); 2142 __ j(carry, &push_frame);
2143 __ push(rcx); 2143 __ Push(rcx);
2144 __ jmp(&push_argument_holes); 2144 __ jmp(&push_argument_holes);
2145 2145
2146 // Enter a new JavaScript frame, and initialize its slots as they were when 2146 // Enter a new JavaScript frame, and initialize its slots as they were when
2147 // the generator was suspended. 2147 // the generator was suspended.
2148 Label resume_frame; 2148 Label resume_frame;
2149 __ bind(&push_frame); 2149 __ bind(&push_frame);
2150 __ call(&resume_frame); 2150 __ call(&resume_frame);
2151 __ jmp(&done); 2151 __ jmp(&done);
2152 __ bind(&resume_frame); 2152 __ bind(&resume_frame);
2153 __ push(rbp); // Caller's frame pointer. 2153 __ pushq(rbp); // Caller's frame pointer.
2154 __ movp(rbp, rsp); 2154 __ movp(rbp, rsp);
2155 __ push(rsi); // Callee's context. 2155 __ Push(rsi); // Callee's context.
2156 __ push(rdi); // Callee's JS Function. 2156 __ Push(rdi); // Callee's JS Function.
2157 2157
2158 // Load the operand stack size. 2158 // Load the operand stack size.
2159 __ movp(rdx, FieldOperand(rbx, JSGeneratorObject::kOperandStackOffset)); 2159 __ movp(rdx, FieldOperand(rbx, JSGeneratorObject::kOperandStackOffset));
2160 __ movp(rdx, FieldOperand(rdx, FixedArray::kLengthOffset)); 2160 __ movp(rdx, FieldOperand(rdx, FixedArray::kLengthOffset));
2161 __ SmiToInteger32(rdx, rdx); 2161 __ SmiToInteger32(rdx, rdx);
2162 2162
2163 // If we are sending a value and there is no operand stack, we can jump back 2163 // If we are sending a value and there is no operand stack, we can jump back
2164 // in directly. 2164 // in directly.
2165 if (resume_mode == JSGeneratorObject::NEXT) { 2165 if (resume_mode == JSGeneratorObject::NEXT) {
2166 Label slow_resume; 2166 Label slow_resume;
2167 __ cmpq(rdx, Immediate(0)); 2167 __ cmpq(rdx, Immediate(0));
2168 __ j(not_zero, &slow_resume); 2168 __ j(not_zero, &slow_resume);
2169 __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); 2169 __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2170 __ SmiToInteger64(rcx, 2170 __ SmiToInteger64(rcx,
2171 FieldOperand(rbx, JSGeneratorObject::kContinuationOffset)); 2171 FieldOperand(rbx, JSGeneratorObject::kContinuationOffset));
2172 __ addq(rdx, rcx); 2172 __ addq(rdx, rcx);
2173 __ Move(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset), 2173 __ Move(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset),
2174 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)); 2174 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
2175 __ jmp(rdx); 2175 __ jmp(rdx);
2176 __ bind(&slow_resume); 2176 __ bind(&slow_resume);
2177 } 2177 }
2178 2178
2179 // Otherwise, we push holes for the operand stack and call the runtime to fix 2179 // Otherwise, we push holes for the operand stack and call the runtime to fix
2180 // up the stack and the handlers. 2180 // up the stack and the handlers.
2181 Label push_operand_holes, call_resume; 2181 Label push_operand_holes, call_resume;
2182 __ bind(&push_operand_holes); 2182 __ bind(&push_operand_holes);
2183 __ subq(rdx, Immediate(1)); 2183 __ subq(rdx, Immediate(1));
2184 __ j(carry, &call_resume); 2184 __ j(carry, &call_resume);
2185 __ push(rcx); 2185 __ Push(rcx);
2186 __ jmp(&push_operand_holes); 2186 __ jmp(&push_operand_holes);
2187 __ bind(&call_resume); 2187 __ bind(&call_resume);
2188 __ push(rbx); 2188 __ Push(rbx);
2189 __ push(result_register()); 2189 __ Push(result_register());
2190 __ Push(Smi::FromInt(resume_mode)); 2190 __ Push(Smi::FromInt(resume_mode));
2191 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3); 2191 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2192 // Not reached: the runtime call returns elsewhere. 2192 // Not reached: the runtime call returns elsewhere.
2193 __ Abort(kGeneratorFailedToResume); 2193 __ Abort(kGeneratorFailedToResume);
2194 2194
2195 // Reach here when generator is closed. 2195 // Reach here when generator is closed.
2196 __ bind(&closed_state); 2196 __ bind(&closed_state);
2197 if (resume_mode == JSGeneratorObject::NEXT) { 2197 if (resume_mode == JSGeneratorObject::NEXT) {
2198 // Return completed iterator result when generator is closed. 2198 // Return completed iterator result when generator is closed.
2199 __ PushRoot(Heap::kUndefinedValueRootIndex); 2199 __ PushRoot(Heap::kUndefinedValueRootIndex);
2200 // Pop value from top-of-stack slot; box result into result register. 2200 // Pop value from top-of-stack slot; box result into result register.
2201 EmitCreateIteratorResult(true); 2201 EmitCreateIteratorResult(true);
2202 } else { 2202 } else {
2203 // Throw the provided value. 2203 // Throw the provided value.
2204 __ push(rax); 2204 __ Push(rax);
2205 __ CallRuntime(Runtime::kThrow, 1); 2205 __ CallRuntime(Runtime::kThrow, 1);
2206 } 2206 }
2207 __ jmp(&done); 2207 __ jmp(&done);
2208 2208
2209 // Throw error if we attempt to operate on a running generator. 2209 // Throw error if we attempt to operate on a running generator.
2210 __ bind(&wrong_state); 2210 __ bind(&wrong_state);
2211 __ push(rbx); 2211 __ Push(rbx);
2212 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1); 2212 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2213 2213
2214 __ bind(&done); 2214 __ bind(&done);
2215 context()->Plug(result_register()); 2215 context()->Plug(result_register());
2216 } 2216 }
2217 2217
2218 2218
2219 void FullCodeGenerator::EmitCreateIteratorResult(bool done) { 2219 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2220 Label gc_required; 2220 Label gc_required;
2221 Label allocated; 2221 Label allocated;
2222 2222
2223 Handle<Map> map(isolate()->native_context()->generator_result_map()); 2223 Handle<Map> map(isolate()->native_context()->generator_result_map());
2224 2224
2225 __ Allocate(map->instance_size(), rax, rcx, rdx, &gc_required, TAG_OBJECT); 2225 __ Allocate(map->instance_size(), rax, rcx, rdx, &gc_required, TAG_OBJECT);
2226 __ jmp(&allocated); 2226 __ jmp(&allocated);
2227 2227
2228 __ bind(&gc_required); 2228 __ bind(&gc_required);
2229 __ Push(Smi::FromInt(map->instance_size())); 2229 __ Push(Smi::FromInt(map->instance_size()));
2230 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); 2230 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2231 __ movp(context_register(), 2231 __ movp(context_register(),
2232 Operand(rbp, StandardFrameConstants::kContextOffset)); 2232 Operand(rbp, StandardFrameConstants::kContextOffset));
2233 2233
2234 __ bind(&allocated); 2234 __ bind(&allocated);
2235 __ Move(rbx, map); 2235 __ Move(rbx, map);
2236 __ pop(rcx); 2236 __ Pop(rcx);
2237 __ Move(rdx, isolate()->factory()->ToBoolean(done)); 2237 __ Move(rdx, isolate()->factory()->ToBoolean(done));
2238 ASSERT_EQ(map->instance_size(), 5 * kPointerSize); 2238 ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
2239 __ movp(FieldOperand(rax, HeapObject::kMapOffset), rbx); 2239 __ movp(FieldOperand(rax, HeapObject::kMapOffset), rbx);
2240 __ Move(FieldOperand(rax, JSObject::kPropertiesOffset), 2240 __ Move(FieldOperand(rax, JSObject::kPropertiesOffset),
2241 isolate()->factory()->empty_fixed_array()); 2241 isolate()->factory()->empty_fixed_array());
2242 __ Move(FieldOperand(rax, JSObject::kElementsOffset), 2242 __ Move(FieldOperand(rax, JSObject::kElementsOffset),
2243 isolate()->factory()->empty_fixed_array()); 2243 isolate()->factory()->empty_fixed_array());
2244 __ movp(FieldOperand(rax, JSGeneratorObject::kResultValuePropertyOffset), 2244 __ movp(FieldOperand(rax, JSGeneratorObject::kResultValuePropertyOffset),
2245 rcx); 2245 rcx);
2246 __ movp(FieldOperand(rax, JSGeneratorObject::kResultDonePropertyOffset), 2246 __ movp(FieldOperand(rax, JSGeneratorObject::kResultDonePropertyOffset),
(...skipping 23 matching lines...) Expand all
2270 2270
2271 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, 2271 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2272 Token::Value op, 2272 Token::Value op,
2273 OverwriteMode mode, 2273 OverwriteMode mode,
2274 Expression* left, 2274 Expression* left,
2275 Expression* right) { 2275 Expression* right) {
2276 // Do combined smi check of the operands. Left operand is on the 2276 // Do combined smi check of the operands. Left operand is on the
2277 // stack (popped into rdx). Right operand is in rax but moved into 2277 // stack (popped into rdx). Right operand is in rax but moved into
2278 // rcx to make the shifts easier. 2278 // rcx to make the shifts easier.
2279 Label done, stub_call, smi_case; 2279 Label done, stub_call, smi_case;
2280 __ pop(rdx); 2280 __ Pop(rdx);
2281 __ movp(rcx, rax); 2281 __ movp(rcx, rax);
2282 __ or_(rax, rdx); 2282 __ or_(rax, rdx);
2283 JumpPatchSite patch_site(masm_); 2283 JumpPatchSite patch_site(masm_);
2284 patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear); 2284 patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear);
2285 2285
2286 __ bind(&stub_call); 2286 __ bind(&stub_call);
2287 __ movp(rax, rcx); 2287 __ movp(rax, rcx);
2288 BinaryOpICStub stub(op, mode); 2288 BinaryOpICStub stub(op, mode);
2289 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId()); 2289 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2290 patch_site.EmitPatchInfo(); 2290 patch_site.EmitPatchInfo();
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
2325 } 2325 }
2326 2326
2327 __ bind(&done); 2327 __ bind(&done);
2328 context()->Plug(rax); 2328 context()->Plug(rax);
2329 } 2329 }
2330 2330
2331 2331
2332 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, 2332 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2333 Token::Value op, 2333 Token::Value op,
2334 OverwriteMode mode) { 2334 OverwriteMode mode) {
2335 __ pop(rdx); 2335 __ Pop(rdx);
2336 BinaryOpICStub stub(op, mode); 2336 BinaryOpICStub stub(op, mode);
2337 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. 2337 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2338 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId()); 2338 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2339 patch_site.EmitPatchInfo(); 2339 patch_site.EmitPatchInfo();
2340 context()->Plug(rax); 2340 context()->Plug(rax);
2341 } 2341 }
2342 2342
2343 2343
2344 void FullCodeGenerator::EmitAssignment(Expression* expr) { 2344 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2345 ASSERT(expr->IsValidLeftHandSide()); 2345 ASSERT(expr->IsValidLeftHandSide());
(...skipping 10 matching lines...) Expand all
2356 } 2356 }
2357 2357
2358 switch (assign_type) { 2358 switch (assign_type) {
2359 case VARIABLE: { 2359 case VARIABLE: {
2360 Variable* var = expr->AsVariableProxy()->var(); 2360 Variable* var = expr->AsVariableProxy()->var();
2361 EffectContext context(this); 2361 EffectContext context(this);
2362 EmitVariableAssignment(var, Token::ASSIGN); 2362 EmitVariableAssignment(var, Token::ASSIGN);
2363 break; 2363 break;
2364 } 2364 }
2365 case NAMED_PROPERTY: { 2365 case NAMED_PROPERTY: {
2366 __ push(rax); // Preserve value. 2366 __ Push(rax); // Preserve value.
2367 VisitForAccumulatorValue(prop->obj()); 2367 VisitForAccumulatorValue(prop->obj());
2368 __ movp(rdx, rax); 2368 __ movp(rdx, rax);
2369 __ pop(rax); // Restore value. 2369 __ Pop(rax); // Restore value.
2370 __ Move(rcx, prop->key()->AsLiteral()->value()); 2370 __ Move(rcx, prop->key()->AsLiteral()->value());
2371 CallStoreIC(); 2371 CallStoreIC();
2372 break; 2372 break;
2373 } 2373 }
2374 case KEYED_PROPERTY: { 2374 case KEYED_PROPERTY: {
2375 __ push(rax); // Preserve value. 2375 __ Push(rax); // Preserve value.
2376 VisitForStackValue(prop->obj()); 2376 VisitForStackValue(prop->obj());
2377 VisitForAccumulatorValue(prop->key()); 2377 VisitForAccumulatorValue(prop->key());
2378 __ movp(rcx, rax); 2378 __ movp(rcx, rax);
2379 __ pop(rdx); 2379 __ Pop(rdx);
2380 __ pop(rax); // Restore value. 2380 __ Pop(rax); // Restore value.
2381 Handle<Code> ic = strict_mode() == SLOPPY 2381 Handle<Code> ic = strict_mode() == SLOPPY
2382 ? isolate()->builtins()->KeyedStoreIC_Initialize() 2382 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2383 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 2383 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2384 CallIC(ic); 2384 CallIC(ic);
2385 break; 2385 break;
2386 } 2386 }
2387 } 2387 }
2388 context()->Plug(rax); 2388 context()->Plug(rax);
2389 } 2389 }
2390 2390
2391 2391
2392 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot( 2392 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2393 Variable* var, MemOperand location) { 2393 Variable* var, MemOperand location) {
2394 __ movp(location, rax); 2394 __ movp(location, rax);
2395 if (var->IsContextSlot()) { 2395 if (var->IsContextSlot()) {
2396 __ movp(rdx, rax); 2396 __ movp(rdx, rax);
2397 __ RecordWriteContextSlot( 2397 __ RecordWriteContextSlot(
2398 rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs); 2398 rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
2399 } 2399 }
2400 } 2400 }
2401 2401
2402 2402
2403 void FullCodeGenerator::EmitCallStoreContextSlot( 2403 void FullCodeGenerator::EmitCallStoreContextSlot(
2404 Handle<String> name, StrictMode strict_mode) { 2404 Handle<String> name, StrictMode strict_mode) {
2405 __ push(rax); // Value. 2405 __ Push(rax); // Value.
2406 __ push(rsi); // Context. 2406 __ Push(rsi); // Context.
2407 __ Push(name); 2407 __ Push(name);
2408 __ Push(Smi::FromInt(strict_mode)); 2408 __ Push(Smi::FromInt(strict_mode));
2409 __ CallRuntime(Runtime::kStoreContextSlot, 4); 2409 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2410 } 2410 }
2411 2411
2412 2412
2413 void FullCodeGenerator::EmitVariableAssignment(Variable* var, 2413 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2414 Token::Value op) { 2414 Token::Value op) {
2415 if (var->IsUnallocated()) { 2415 if (var->IsUnallocated()) {
2416 // Global var, const, or let. 2416 // Global var, const, or let.
2417 __ Move(rcx, var->name()); 2417 __ Move(rcx, var->name());
2418 __ movp(rdx, GlobalObjectOperand()); 2418 __ movp(rdx, GlobalObjectOperand());
2419 CallStoreIC(); 2419 CallStoreIC();
2420 2420
2421 } else if (op == Token::INIT_CONST_LEGACY) { 2421 } else if (op == Token::INIT_CONST_LEGACY) {
2422 // Const initializers need a write barrier. 2422 // Const initializers need a write barrier.
2423 ASSERT(!var->IsParameter()); // No const parameters. 2423 ASSERT(!var->IsParameter()); // No const parameters.
2424 if (var->IsLookupSlot()) { 2424 if (var->IsLookupSlot()) {
2425 __ push(rax); 2425 __ Push(rax);
2426 __ push(rsi); 2426 __ Push(rsi);
2427 __ Push(var->name()); 2427 __ Push(var->name());
2428 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); 2428 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2429 } else { 2429 } else {
2430 ASSERT(var->IsStackLocal() || var->IsContextSlot()); 2430 ASSERT(var->IsStackLocal() || var->IsContextSlot());
2431 Label skip; 2431 Label skip;
2432 MemOperand location = VarOperand(var, rcx); 2432 MemOperand location = VarOperand(var, rcx);
2433 __ movp(rdx, location); 2433 __ movp(rdx, location);
2434 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex); 2434 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2435 __ j(not_equal, &skip); 2435 __ j(not_equal, &skip);
2436 EmitStoreToStackLocalOrContextSlot(var, location); 2436 EmitStoreToStackLocalOrContextSlot(var, location);
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
2477 2477
2478 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 2478 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2479 // Assignment to a property, using a named store IC. 2479 // Assignment to a property, using a named store IC.
2480 Property* prop = expr->target()->AsProperty(); 2480 Property* prop = expr->target()->AsProperty();
2481 ASSERT(prop != NULL); 2481 ASSERT(prop != NULL);
2482 ASSERT(prop->key()->AsLiteral() != NULL); 2482 ASSERT(prop->key()->AsLiteral() != NULL);
2483 2483
2484 // Record source code position before IC call. 2484 // Record source code position before IC call.
2485 SetSourcePosition(expr->position()); 2485 SetSourcePosition(expr->position());
2486 __ Move(rcx, prop->key()->AsLiteral()->value()); 2486 __ Move(rcx, prop->key()->AsLiteral()->value());
2487 __ pop(rdx); 2487 __ Pop(rdx);
2488 CallStoreIC(expr->AssignmentFeedbackId()); 2488 CallStoreIC(expr->AssignmentFeedbackId());
2489 2489
2490 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2490 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2491 context()->Plug(rax); 2491 context()->Plug(rax);
2492 } 2492 }
2493 2493
2494 2494
2495 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { 2495 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2496 // Assignment to a property, using a keyed store IC. 2496 // Assignment to a property, using a keyed store IC.
2497 2497
2498 __ pop(rcx); 2498 __ Pop(rcx);
2499 __ pop(rdx); 2499 __ Pop(rdx);
2500 // Record source code position before IC call. 2500 // Record source code position before IC call.
2501 SetSourcePosition(expr->position()); 2501 SetSourcePosition(expr->position());
2502 Handle<Code> ic = strict_mode() == SLOPPY 2502 Handle<Code> ic = strict_mode() == SLOPPY
2503 ? isolate()->builtins()->KeyedStoreIC_Initialize() 2503 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2504 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 2504 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2505 CallIC(ic, expr->AssignmentFeedbackId()); 2505 CallIC(ic, expr->AssignmentFeedbackId());
2506 2506
2507 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2507 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2508 context()->Plug(rax); 2508 context()->Plug(rax);
2509 } 2509 }
2510 2510
2511 2511
2512 void FullCodeGenerator::VisitProperty(Property* expr) { 2512 void FullCodeGenerator::VisitProperty(Property* expr) {
2513 Comment cmnt(masm_, "[ Property"); 2513 Comment cmnt(masm_, "[ Property");
2514 Expression* key = expr->key(); 2514 Expression* key = expr->key();
2515 2515
2516 if (key->IsPropertyName()) { 2516 if (key->IsPropertyName()) {
2517 VisitForAccumulatorValue(expr->obj()); 2517 VisitForAccumulatorValue(expr->obj());
2518 EmitNamedPropertyLoad(expr); 2518 EmitNamedPropertyLoad(expr);
2519 PrepareForBailoutForId(expr->LoadId(), TOS_REG); 2519 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2520 context()->Plug(rax); 2520 context()->Plug(rax);
2521 } else { 2521 } else {
2522 VisitForStackValue(expr->obj()); 2522 VisitForStackValue(expr->obj());
2523 VisitForAccumulatorValue(expr->key()); 2523 VisitForAccumulatorValue(expr->key());
2524 __ pop(rdx); 2524 __ Pop(rdx);
2525 EmitKeyedPropertyLoad(expr); 2525 EmitKeyedPropertyLoad(expr);
2526 context()->Plug(rax); 2526 context()->Plug(rax);
2527 } 2527 }
2528 } 2528 }
2529 2529
2530 2530
2531 void FullCodeGenerator::CallIC(Handle<Code> code, 2531 void FullCodeGenerator::CallIC(Handle<Code> code,
2532 TypeFeedbackId ast_id) { 2532 TypeFeedbackId ast_id) {
2533 ic_total_count_++; 2533 ic_total_count_++;
2534 __ call(code, RelocInfo::CODE_TARGET, ast_id); 2534 __ call(code, RelocInfo::CODE_TARGET, ast_id);
(...skipping 17 matching lines...) Expand all
2552 // is a sloppy mode method. 2552 // is a sloppy mode method.
2553 __ Push(isolate()->factory()->undefined_value()); 2553 __ Push(isolate()->factory()->undefined_value());
2554 flags = NO_CALL_FUNCTION_FLAGS; 2554 flags = NO_CALL_FUNCTION_FLAGS;
2555 } else { 2555 } else {
2556 // Load the function from the receiver. 2556 // Load the function from the receiver.
2557 ASSERT(callee->IsProperty()); 2557 ASSERT(callee->IsProperty());
2558 __ movp(rax, Operand(rsp, 0)); 2558 __ movp(rax, Operand(rsp, 0));
2559 EmitNamedPropertyLoad(callee->AsProperty()); 2559 EmitNamedPropertyLoad(callee->AsProperty());
2560 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2560 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2561 // Push the target function under the receiver. 2561 // Push the target function under the receiver.
2562 __ push(Operand(rsp, 0)); 2562 __ Push(Operand(rsp, 0));
2563 __ movp(Operand(rsp, kPointerSize), rax); 2563 __ movp(Operand(rsp, kPointerSize), rax);
2564 flags = CALL_AS_METHOD; 2564 flags = CALL_AS_METHOD;
2565 } 2565 }
2566 2566
2567 // Load the arguments. 2567 // Load the arguments.
2568 { PreservePositionScope scope(masm()->positions_recorder()); 2568 { PreservePositionScope scope(masm()->positions_recorder());
2569 for (int i = 0; i < arg_count; i++) { 2569 for (int i = 0; i < arg_count; i++) {
2570 VisitForStackValue(args->at(i)); 2570 VisitForStackValue(args->at(i));
2571 } 2571 }
2572 } 2572 }
(...skipping 23 matching lines...) Expand all
2596 ZoneList<Expression*>* args = expr->arguments(); 2596 ZoneList<Expression*>* args = expr->arguments();
2597 int arg_count = args->length(); 2597 int arg_count = args->length();
2598 2598
2599 // Load the function from the receiver. 2599 // Load the function from the receiver.
2600 ASSERT(callee->IsProperty()); 2600 ASSERT(callee->IsProperty());
2601 __ movp(rdx, Operand(rsp, 0)); 2601 __ movp(rdx, Operand(rsp, 0));
2602 EmitKeyedPropertyLoad(callee->AsProperty()); 2602 EmitKeyedPropertyLoad(callee->AsProperty());
2603 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2603 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2604 2604
2605 // Push the target function under the receiver. 2605 // Push the target function under the receiver.
2606 __ push(Operand(rsp, 0)); 2606 __ Push(Operand(rsp, 0));
2607 __ movp(Operand(rsp, kPointerSize), rax); 2607 __ movp(Operand(rsp, kPointerSize), rax);
2608 2608
2609 // Load the arguments. 2609 // Load the arguments.
2610 { PreservePositionScope scope(masm()->positions_recorder()); 2610 { PreservePositionScope scope(masm()->positions_recorder());
2611 for (int i = 0; i < arg_count; i++) { 2611 for (int i = 0; i < arg_count; i++) {
2612 VisitForStackValue(args->at(i)); 2612 VisitForStackValue(args->at(i));
2613 } 2613 }
2614 } 2614 }
2615 2615
2616 // Record source position for debugger. 2616 // Record source position for debugger.
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
2653 // Restore context register. 2653 // Restore context register.
2654 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 2654 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2655 // Discard the function left on TOS. 2655 // Discard the function left on TOS.
2656 context()->DropAndPlug(1, rax); 2656 context()->DropAndPlug(1, rax);
2657 } 2657 }
2658 2658
2659 2659
2660 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { 2660 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2661 // Push copy of the first argument or undefined if it doesn't exist. 2661 // Push copy of the first argument or undefined if it doesn't exist.
2662 if (arg_count > 0) { 2662 if (arg_count > 0) {
2663 __ push(Operand(rsp, arg_count * kPointerSize)); 2663 __ Push(Operand(rsp, arg_count * kPointerSize));
2664 } else { 2664 } else {
2665 __ PushRoot(Heap::kUndefinedValueRootIndex); 2665 __ PushRoot(Heap::kUndefinedValueRootIndex);
2666 } 2666 }
2667 2667
2668 // Push the receiver of the enclosing function and do runtime call. 2668 // Push the receiver of the enclosing function and do runtime call.
2669 StackArgumentsAccessor args(rbp, info_->scope()->num_parameters()); 2669 StackArgumentsAccessor args(rbp, info_->scope()->num_parameters());
2670 __ push(args.GetReceiverOperand()); 2670 __ Push(args.GetReceiverOperand());
2671 2671
2672 // Push the language mode. 2672 // Push the language mode.
2673 __ Push(Smi::FromInt(strict_mode())); 2673 __ Push(Smi::FromInt(strict_mode()));
2674 2674
2675 // Push the start position of the scope the calls resides in. 2675 // Push the start position of the scope the calls resides in.
2676 __ Push(Smi::FromInt(scope()->start_position())); 2676 __ Push(Smi::FromInt(scope()->start_position()));
2677 2677
2678 // Do the runtime call. 2678 // Do the runtime call.
2679 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5); 2679 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2680 } 2680 }
(...skipping 20 matching lines...) Expand all
2701 VisitForStackValue(callee); 2701 VisitForStackValue(callee);
2702 __ PushRoot(Heap::kUndefinedValueRootIndex); // Reserved receiver slot. 2702 __ PushRoot(Heap::kUndefinedValueRootIndex); // Reserved receiver slot.
2703 2703
2704 // Push the arguments. 2704 // Push the arguments.
2705 for (int i = 0; i < arg_count; i++) { 2705 for (int i = 0; i < arg_count; i++) {
2706 VisitForStackValue(args->at(i)); 2706 VisitForStackValue(args->at(i));
2707 } 2707 }
2708 2708
2709 // Push a copy of the function (found below the arguments) and resolve 2709 // Push a copy of the function (found below the arguments) and resolve
2710 // eval. 2710 // eval.
2711 __ push(Operand(rsp, (arg_count + 1) * kPointerSize)); 2711 __ Push(Operand(rsp, (arg_count + 1) * kPointerSize));
2712 EmitResolvePossiblyDirectEval(arg_count); 2712 EmitResolvePossiblyDirectEval(arg_count);
2713 2713
2714 // The runtime call returns a pair of values in rax (function) and 2714 // The runtime call returns a pair of values in rax (function) and
2715 // rdx (receiver). Touch up the stack with the right values. 2715 // rdx (receiver). Touch up the stack with the right values.
2716 __ movp(Operand(rsp, (arg_count + 0) * kPointerSize), rdx); 2716 __ movp(Operand(rsp, (arg_count + 0) * kPointerSize), rdx);
2717 __ movp(Operand(rsp, (arg_count + 1) * kPointerSize), rax); 2717 __ movp(Operand(rsp, (arg_count + 1) * kPointerSize), rax);
2718 } 2718 }
2719 // Record source position for debugger. 2719 // Record source position for debugger.
2720 SetSourcePosition(expr->position()); 2720 SetSourcePosition(expr->position());
2721 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS); 2721 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
(...skipping 12 matching lines...) Expand all
2734 Label slow, done; 2734 Label slow, done;
2735 2735
2736 { PreservePositionScope scope(masm()->positions_recorder()); 2736 { PreservePositionScope scope(masm()->positions_recorder());
2737 // Generate code for loading from variables potentially shadowed by 2737 // Generate code for loading from variables potentially shadowed by
2738 // eval-introduced variables. 2738 // eval-introduced variables.
2739 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done); 2739 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2740 } 2740 }
2741 __ bind(&slow); 2741 __ bind(&slow);
2742 // Call the runtime to find the function to call (returned in rax) and 2742 // Call the runtime to find the function to call (returned in rax) and
2743 // the object holding it (returned in rdx). 2743 // the object holding it (returned in rdx).
2744 __ push(context_register()); 2744 __ Push(context_register());
2745 __ Push(proxy->name()); 2745 __ Push(proxy->name());
2746 __ CallRuntime(Runtime::kLoadContextSlot, 2); 2746 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2747 __ push(rax); // Function. 2747 __ Push(rax); // Function.
2748 __ push(rdx); // Receiver. 2748 __ Push(rdx); // Receiver.
2749 2749
2750 // If fast case code has been generated, emit code to push the function 2750 // If fast case code has been generated, emit code to push the function
2751 // and receiver and have the slow path jump around this code. 2751 // and receiver and have the slow path jump around this code.
2752 if (done.is_linked()) { 2752 if (done.is_linked()) {
2753 Label call; 2753 Label call;
2754 __ jmp(&call, Label::kNear); 2754 __ jmp(&call, Label::kNear);
2755 __ bind(&done); 2755 __ bind(&done);
2756 // Push function. 2756 // Push function.
2757 __ push(rax); 2757 __ Push(rax);
2758 // The receiver is implicitly the global receiver. Indicate this by 2758 // The receiver is implicitly the global receiver. Indicate this by
2759 // passing the hole to the call function stub. 2759 // passing the hole to the call function stub.
2760 __ PushRoot(Heap::kUndefinedValueRootIndex); 2760 __ PushRoot(Heap::kUndefinedValueRootIndex);
2761 __ bind(&call); 2761 __ bind(&call);
2762 } 2762 }
2763 2763
2764 // The receiver is either the global receiver or an object found by 2764 // The receiver is either the global receiver or an object found by
2765 // LoadContextSlot. 2765 // LoadContextSlot.
2766 EmitCallWithStub(expr); 2766 EmitCallWithStub(expr);
2767 } else if (call_type == Call::PROPERTY_CALL) { 2767 } else if (call_type == Call::PROPERTY_CALL) {
(...skipping 404 matching lines...) Expand 10 before | Expand all | Expand 10 after
3172 VisitForStackValue(args->at(0)); 3172 VisitForStackValue(args->at(0));
3173 VisitForAccumulatorValue(args->at(1)); 3173 VisitForAccumulatorValue(args->at(1));
3174 3174
3175 Label materialize_true, materialize_false; 3175 Label materialize_true, materialize_false;
3176 Label* if_true = NULL; 3176 Label* if_true = NULL;
3177 Label* if_false = NULL; 3177 Label* if_false = NULL;
3178 Label* fall_through = NULL; 3178 Label* fall_through = NULL;
3179 context()->PrepareTest(&materialize_true, &materialize_false, 3179 context()->PrepareTest(&materialize_true, &materialize_false,
3180 &if_true, &if_false, &fall_through); 3180 &if_true, &if_false, &fall_through);
3181 3181
3182 __ pop(rbx); 3182 __ Pop(rbx);
3183 __ cmpq(rax, rbx); 3183 __ cmpq(rax, rbx);
3184 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3184 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3185 Split(equal, if_true, if_false, fall_through); 3185 Split(equal, if_true, if_false, fall_through);
3186 3186
3187 context()->Plug(if_true, if_false); 3187 context()->Plug(if_true, if_false);
3188 } 3188 }
3189 3189
3190 3190
3191 void FullCodeGenerator::EmitArguments(CallRuntime* expr) { 3191 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3192 ZoneList<Expression*>* args = expr->arguments(); 3192 ZoneList<Expression*>* args = expr->arguments();
(...skipping 212 matching lines...) Expand 10 before | Expand all | Expand 10 after
3405 ZoneList<Expression*>* args = expr->arguments(); 3405 ZoneList<Expression*>* args = expr->arguments();
3406 ASSERT_EQ(3, args->length()); 3406 ASSERT_EQ(3, args->length());
3407 3407
3408 Register string = rax; 3408 Register string = rax;
3409 Register index = rbx; 3409 Register index = rbx;
3410 Register value = rcx; 3410 Register value = rcx;
3411 3411
3412 VisitForStackValue(args->at(1)); // index 3412 VisitForStackValue(args->at(1)); // index
3413 VisitForStackValue(args->at(2)); // value 3413 VisitForStackValue(args->at(2)); // value
3414 VisitForAccumulatorValue(args->at(0)); // string 3414 VisitForAccumulatorValue(args->at(0)); // string
3415 __ pop(value); 3415 __ Pop(value);
3416 __ pop(index); 3416 __ Pop(index);
3417 3417
3418 if (FLAG_debug_code) { 3418 if (FLAG_debug_code) {
3419 __ Check(__ CheckSmi(value), kNonSmiValue); 3419 __ Check(__ CheckSmi(value), kNonSmiValue);
3420 __ Check(__ CheckSmi(index), kNonSmiValue); 3420 __ Check(__ CheckSmi(index), kNonSmiValue);
3421 } 3421 }
3422 3422
3423 __ SmiToInteger32(value, value); 3423 __ SmiToInteger32(value, value);
3424 __ SmiToInteger32(index, index); 3424 __ SmiToInteger32(index, index);
3425 3425
3426 if (FLAG_debug_code) { 3426 if (FLAG_debug_code) {
(...skipping 11 matching lines...) Expand all
3438 ZoneList<Expression*>* args = expr->arguments(); 3438 ZoneList<Expression*>* args = expr->arguments();
3439 ASSERT_EQ(3, args->length()); 3439 ASSERT_EQ(3, args->length());
3440 3440
3441 Register string = rax; 3441 Register string = rax;
3442 Register index = rbx; 3442 Register index = rbx;
3443 Register value = rcx; 3443 Register value = rcx;
3444 3444
3445 VisitForStackValue(args->at(1)); // index 3445 VisitForStackValue(args->at(1)); // index
3446 VisitForStackValue(args->at(2)); // value 3446 VisitForStackValue(args->at(2)); // value
3447 VisitForAccumulatorValue(args->at(0)); // string 3447 VisitForAccumulatorValue(args->at(0)); // string
3448 __ pop(value); 3448 __ Pop(value);
3449 __ pop(index); 3449 __ Pop(index);
3450 3450
3451 if (FLAG_debug_code) { 3451 if (FLAG_debug_code) {
3452 __ Check(__ CheckSmi(value), kNonSmiValue); 3452 __ Check(__ CheckSmi(value), kNonSmiValue);
3453 __ Check(__ CheckSmi(index), kNonSmiValue); 3453 __ Check(__ CheckSmi(index), kNonSmiValue);
3454 } 3454 }
3455 3455
3456 __ SmiToInteger32(value, value); 3456 __ SmiToInteger32(value, value);
3457 __ SmiToInteger32(index, index); 3457 __ SmiToInteger32(index, index);
3458 3458
3459 if (FLAG_debug_code) { 3459 if (FLAG_debug_code) {
(...skipping 18 matching lines...) Expand all
3478 context()->Plug(rax); 3478 context()->Plug(rax);
3479 } 3479 }
3480 3480
3481 3481
3482 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) { 3482 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3483 ZoneList<Expression*>* args = expr->arguments(); 3483 ZoneList<Expression*>* args = expr->arguments();
3484 ASSERT(args->length() == 2); 3484 ASSERT(args->length() == 2);
3485 3485
3486 VisitForStackValue(args->at(0)); // Load the object. 3486 VisitForStackValue(args->at(0)); // Load the object.
3487 VisitForAccumulatorValue(args->at(1)); // Load the value. 3487 VisitForAccumulatorValue(args->at(1)); // Load the value.
3488 __ pop(rbx); // rax = value. rbx = object. 3488 __ Pop(rbx); // rax = value. rbx = object.
3489 3489
3490 Label done; 3490 Label done;
3491 // If the object is a smi, return the value. 3491 // If the object is a smi, return the value.
3492 __ JumpIfSmi(rbx, &done); 3492 __ JumpIfSmi(rbx, &done);
3493 3493
3494 // If the object is not a value type, return the value. 3494 // If the object is not a value type, return the value.
3495 __ CmpObjectType(rbx, JS_VALUE_TYPE, rcx); 3495 __ CmpObjectType(rbx, JS_VALUE_TYPE, rcx);
3496 __ j(not_equal, &done); 3496 __ j(not_equal, &done);
3497 3497
3498 // Store the value. 3498 // Store the value.
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
3543 ZoneList<Expression*>* args = expr->arguments(); 3543 ZoneList<Expression*>* args = expr->arguments();
3544 ASSERT(args->length() == 2); 3544 ASSERT(args->length() == 2);
3545 3545
3546 VisitForStackValue(args->at(0)); 3546 VisitForStackValue(args->at(0));
3547 VisitForAccumulatorValue(args->at(1)); 3547 VisitForAccumulatorValue(args->at(1));
3548 3548
3549 Register object = rbx; 3549 Register object = rbx;
3550 Register index = rax; 3550 Register index = rax;
3551 Register result = rdx; 3551 Register result = rdx;
3552 3552
3553 __ pop(object); 3553 __ Pop(object);
3554 3554
3555 Label need_conversion; 3555 Label need_conversion;
3556 Label index_out_of_range; 3556 Label index_out_of_range;
3557 Label done; 3557 Label done;
3558 StringCharCodeAtGenerator generator(object, 3558 StringCharCodeAtGenerator generator(object,
3559 index, 3559 index,
3560 result, 3560 result,
3561 &need_conversion, 3561 &need_conversion,
3562 &need_conversion, 3562 &need_conversion,
3563 &index_out_of_range, 3563 &index_out_of_range,
(...skipping 26 matching lines...) Expand all
3590 ASSERT(args->length() == 2); 3590 ASSERT(args->length() == 2);
3591 3591
3592 VisitForStackValue(args->at(0)); 3592 VisitForStackValue(args->at(0));
3593 VisitForAccumulatorValue(args->at(1)); 3593 VisitForAccumulatorValue(args->at(1));
3594 3594
3595 Register object = rbx; 3595 Register object = rbx;
3596 Register index = rax; 3596 Register index = rax;
3597 Register scratch = rdx; 3597 Register scratch = rdx;
3598 Register result = rax; 3598 Register result = rax;
3599 3599
3600 __ pop(object); 3600 __ Pop(object);
3601 3601
3602 Label need_conversion; 3602 Label need_conversion;
3603 Label index_out_of_range; 3603 Label index_out_of_range;
3604 Label done; 3604 Label done;
3605 StringCharAtGenerator generator(object, 3605 StringCharAtGenerator generator(object,
3606 index, 3606 index,
3607 scratch, 3607 scratch,
3608 result, 3608 result,
3609 &need_conversion, 3609 &need_conversion,
3610 &need_conversion, 3610 &need_conversion,
(...skipping 21 matching lines...) Expand all
3632 context()->Plug(result); 3632 context()->Plug(result);
3633 } 3633 }
3634 3634
3635 3635
3636 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) { 3636 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3637 ZoneList<Expression*>* args = expr->arguments(); 3637 ZoneList<Expression*>* args = expr->arguments();
3638 ASSERT_EQ(2, args->length()); 3638 ASSERT_EQ(2, args->length());
3639 VisitForStackValue(args->at(0)); 3639 VisitForStackValue(args->at(0));
3640 VisitForAccumulatorValue(args->at(1)); 3640 VisitForAccumulatorValue(args->at(1));
3641 3641
3642 __ pop(rdx); 3642 __ Pop(rdx);
3643 StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED); 3643 StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED);
3644 __ CallStub(&stub); 3644 __ CallStub(&stub);
3645 context()->Plug(rax); 3645 context()->Plug(rax);
3646 } 3646 }
3647 3647
3648 3648
3649 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) { 3649 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3650 ZoneList<Expression*>* args = expr->arguments(); 3650 ZoneList<Expression*>* args = expr->arguments();
3651 ASSERT_EQ(2, args->length()); 3651 ASSERT_EQ(2, args->length());
3652 3652
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
3696 __ j(not_equal, &runtime); 3696 __ j(not_equal, &runtime);
3697 3697
3698 // InvokeFunction requires the function in rdi. Move it in there. 3698 // InvokeFunction requires the function in rdi. Move it in there.
3699 __ movp(rdi, result_register()); 3699 __ movp(rdi, result_register());
3700 ParameterCount count(arg_count); 3700 ParameterCount count(arg_count);
3701 __ InvokeFunction(rdi, count, CALL_FUNCTION, NullCallWrapper()); 3701 __ InvokeFunction(rdi, count, CALL_FUNCTION, NullCallWrapper());
3702 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 3702 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3703 __ jmp(&done); 3703 __ jmp(&done);
3704 3704
3705 __ bind(&runtime); 3705 __ bind(&runtime);
3706 __ push(rax); 3706 __ Push(rax);
3707 __ CallRuntime(Runtime::kCall, args->length()); 3707 __ CallRuntime(Runtime::kCall, args->length());
3708 __ bind(&done); 3708 __ bind(&done);
3709 3709
3710 context()->Plug(rax); 3710 context()->Plug(rax);
3711 } 3711 }
3712 3712
3713 3713
3714 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) { 3714 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3715 RegExpConstructResultStub stub; 3715 RegExpConstructResultStub stub;
3716 ZoneList<Expression*>* args = expr->arguments(); 3716 ZoneList<Expression*>* args = expr->arguments();
3717 ASSERT(args->length() == 3); 3717 ASSERT(args->length() == 3);
3718 VisitForStackValue(args->at(0)); 3718 VisitForStackValue(args->at(0));
3719 VisitForStackValue(args->at(1)); 3719 VisitForStackValue(args->at(1));
3720 VisitForAccumulatorValue(args->at(2)); 3720 VisitForAccumulatorValue(args->at(2));
3721 __ pop(rbx); 3721 __ Pop(rbx);
3722 __ pop(rcx); 3722 __ Pop(rcx);
3723 __ CallStub(&stub); 3723 __ CallStub(&stub);
3724 context()->Plug(rax); 3724 context()->Plug(rax);
3725 } 3725 }
3726 3726
3727 3727
3728 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) { 3728 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3729 ZoneList<Expression*>* args = expr->arguments(); 3729 ZoneList<Expression*>* args = expr->arguments();
3730 ASSERT_EQ(2, args->length()); 3730 ASSERT_EQ(2, args->length());
3731 3731
3732 ASSERT_NE(NULL, args->at(0)->AsLiteral()); 3732 ASSERT_NE(NULL, args->at(0)->AsLiteral());
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
3766 FixedArray::kHeaderSize)); 3766 FixedArray::kHeaderSize));
3767 __ j(not_equal, &not_found, Label::kNear); 3767 __ j(not_equal, &not_found, Label::kNear);
3768 __ movp(rax, FieldOperand(cache, 3768 __ movp(rax, FieldOperand(cache,
3769 index.reg, 3769 index.reg,
3770 index.scale, 3770 index.scale,
3771 FixedArray::kHeaderSize + kPointerSize)); 3771 FixedArray::kHeaderSize + kPointerSize));
3772 __ jmp(&done, Label::kNear); 3772 __ jmp(&done, Label::kNear);
3773 3773
3774 __ bind(&not_found); 3774 __ bind(&not_found);
3775 // Call runtime to perform the lookup. 3775 // Call runtime to perform the lookup.
3776 __ push(cache); 3776 __ Push(cache);
3777 __ push(key); 3777 __ Push(key);
3778 __ CallRuntime(Runtime::kGetFromCache, 2); 3778 __ CallRuntime(Runtime::kGetFromCache, 2);
3779 3779
3780 __ bind(&done); 3780 __ bind(&done);
3781 context()->Plug(rax); 3781 context()->Plug(rax);
3782 } 3782 }
3783 3783
3784 3784
3785 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) { 3785 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3786 ZoneList<Expression*>* args = expr->arguments(); 3786 ZoneList<Expression*>* args = expr->arguments();
3787 ASSERT(args->length() == 1); 3787 ASSERT(args->length() == 1);
(...skipping 325 matching lines...) Expand 10 before | Expand all | Expand 10 after
4113 return; 4113 return;
4114 } 4114 }
4115 4115
4116 Comment cmnt(masm_, "[ CallRuntime"); 4116 Comment cmnt(masm_, "[ CallRuntime");
4117 ZoneList<Expression*>* args = expr->arguments(); 4117 ZoneList<Expression*>* args = expr->arguments();
4118 int arg_count = args->length(); 4118 int arg_count = args->length();
4119 4119
4120 if (expr->is_jsruntime()) { 4120 if (expr->is_jsruntime()) {
4121 // Push the builtins object as receiver. 4121 // Push the builtins object as receiver.
4122 __ movp(rax, GlobalObjectOperand()); 4122 __ movp(rax, GlobalObjectOperand());
4123 __ push(FieldOperand(rax, GlobalObject::kBuiltinsOffset)); 4123 __ Push(FieldOperand(rax, GlobalObject::kBuiltinsOffset));
4124 4124
4125 // Load the function from the receiver. 4125 // Load the function from the receiver.
4126 __ movp(rax, Operand(rsp, 0)); 4126 __ movp(rax, Operand(rsp, 0));
4127 __ Move(rcx, expr->name()); 4127 __ Move(rcx, expr->name());
4128 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId()); 4128 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4129 4129
4130 // Push the target function under the receiver. 4130 // Push the target function under the receiver.
4131 __ push(Operand(rsp, 0)); 4131 __ Push(Operand(rsp, 0));
4132 __ movp(Operand(rsp, kPointerSize), rax); 4132 __ movp(Operand(rsp, kPointerSize), rax);
4133 4133
4134 // Push the arguments ("left-to-right"). 4134 // Push the arguments ("left-to-right").
4135 for (int i = 0; i < arg_count; i++) { 4135 for (int i = 0; i < arg_count; i++) {
4136 VisitForStackValue(args->at(i)); 4136 VisitForStackValue(args->at(i));
4137 } 4137 }
4138 4138
4139 // Record source position of the IC call. 4139 // Record source position of the IC call.
4140 SetSourcePosition(expr->position()); 4140 SetSourcePosition(expr->position());
4141 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS); 4141 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
(...skipping 29 matching lines...) Expand all
4171 VisitForStackValue(property->key()); 4171 VisitForStackValue(property->key());
4172 __ Push(Smi::FromInt(strict_mode())); 4172 __ Push(Smi::FromInt(strict_mode()));
4173 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 4173 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4174 context()->Plug(rax); 4174 context()->Plug(rax);
4175 } else if (proxy != NULL) { 4175 } else if (proxy != NULL) {
4176 Variable* var = proxy->var(); 4176 Variable* var = proxy->var();
4177 // Delete of an unqualified identifier is disallowed in strict mode 4177 // Delete of an unqualified identifier is disallowed in strict mode
4178 // but "delete this" is allowed. 4178 // but "delete this" is allowed.
4179 ASSERT(strict_mode() == SLOPPY || var->is_this()); 4179 ASSERT(strict_mode() == SLOPPY || var->is_this());
4180 if (var->IsUnallocated()) { 4180 if (var->IsUnallocated()) {
4181 __ push(GlobalObjectOperand()); 4181 __ Push(GlobalObjectOperand());
4182 __ Push(var->name()); 4182 __ Push(var->name());
4183 __ Push(Smi::FromInt(SLOPPY)); 4183 __ Push(Smi::FromInt(SLOPPY));
4184 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 4184 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4185 context()->Plug(rax); 4185 context()->Plug(rax);
4186 } else if (var->IsStackAllocated() || var->IsContextSlot()) { 4186 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4187 // Result of deleting non-global variables is false. 'this' is 4187 // Result of deleting non-global variables is false. 'this' is
4188 // not really a variable, though we implement it as one. The 4188 // not really a variable, though we implement it as one. The
4189 // subexpression does not have side effects. 4189 // subexpression does not have side effects.
4190 context()->Plug(var->is_this()); 4190 context()->Plug(var->is_this());
4191 } else { 4191 } else {
4192 // Non-global variable. Call the runtime to try to delete from the 4192 // Non-global variable. Call the runtime to try to delete from the
4193 // context where the variable was introduced. 4193 // context where the variable was introduced.
4194 __ push(context_register()); 4194 __ Push(context_register());
4195 __ Push(var->name()); 4195 __ Push(var->name());
4196 __ CallRuntime(Runtime::kDeleteContextSlot, 2); 4196 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
4197 context()->Plug(rax); 4197 context()->Plug(rax);
4198 } 4198 }
4199 } else { 4199 } else {
4200 // Result of deleting non-property, non-variable reference is true. 4200 // Result of deleting non-property, non-variable reference is true.
4201 // The subexpression may have side effects. 4201 // The subexpression may have side effects.
4202 VisitForEffect(expr->expression()); 4202 VisitForEffect(expr->expression());
4203 context()->Plug(true); 4203 context()->Plug(true);
4204 } 4204 }
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
4296 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL); 4296 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4297 AccumulatorValueContext context(this); 4297 AccumulatorValueContext context(this);
4298 EmitVariableLoad(expr->expression()->AsVariableProxy()); 4298 EmitVariableLoad(expr->expression()->AsVariableProxy());
4299 } else { 4299 } else {
4300 // Reserve space for result of postfix operation. 4300 // Reserve space for result of postfix operation.
4301 if (expr->is_postfix() && !context()->IsEffect()) { 4301 if (expr->is_postfix() && !context()->IsEffect()) {
4302 __ Push(Smi::FromInt(0)); 4302 __ Push(Smi::FromInt(0));
4303 } 4303 }
4304 if (assign_type == NAMED_PROPERTY) { 4304 if (assign_type == NAMED_PROPERTY) {
4305 VisitForAccumulatorValue(prop->obj()); 4305 VisitForAccumulatorValue(prop->obj());
4306 __ push(rax); // Copy of receiver, needed for later store. 4306 __ Push(rax); // Copy of receiver, needed for later store.
4307 EmitNamedPropertyLoad(prop); 4307 EmitNamedPropertyLoad(prop);
4308 } else { 4308 } else {
4309 VisitForStackValue(prop->obj()); 4309 VisitForStackValue(prop->obj());
4310 VisitForAccumulatorValue(prop->key()); 4310 VisitForAccumulatorValue(prop->key());
4311 __ movp(rdx, Operand(rsp, 0)); // Leave receiver on stack 4311 __ movp(rdx, Operand(rsp, 0)); // Leave receiver on stack
4312 __ push(rax); // Copy of key, needed for later store. 4312 __ Push(rax); // Copy of key, needed for later store.
4313 EmitKeyedPropertyLoad(prop); 4313 EmitKeyedPropertyLoad(prop);
4314 } 4314 }
4315 } 4315 }
4316 4316
4317 // We need a second deoptimization point after loading the value 4317 // We need a second deoptimization point after loading the value
4318 // in case evaluating the property load my have a side effect. 4318 // in case evaluating the property load my have a side effect.
4319 if (assign_type == VARIABLE) { 4319 if (assign_type == VARIABLE) {
4320 PrepareForBailout(expr->expression(), TOS_REG); 4320 PrepareForBailout(expr->expression(), TOS_REG);
4321 } else { 4321 } else {
4322 PrepareForBailoutForId(prop->LoadId(), TOS_REG); 4322 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4323 } 4323 }
4324 4324
4325 // Inline smi case if we are in a loop. 4325 // Inline smi case if we are in a loop.
4326 Label done, stub_call; 4326 Label done, stub_call;
4327 JumpPatchSite patch_site(masm_); 4327 JumpPatchSite patch_site(masm_);
4328 if (ShouldInlineSmiCase(expr->op())) { 4328 if (ShouldInlineSmiCase(expr->op())) {
4329 Label slow; 4329 Label slow;
4330 patch_site.EmitJumpIfNotSmi(rax, &slow, Label::kNear); 4330 patch_site.EmitJumpIfNotSmi(rax, &slow, Label::kNear);
4331 4331
4332 // Save result for postfix expressions. 4332 // Save result for postfix expressions.
4333 if (expr->is_postfix()) { 4333 if (expr->is_postfix()) {
4334 if (!context()->IsEffect()) { 4334 if (!context()->IsEffect()) {
4335 // Save the result on the stack. If we have a named or keyed property 4335 // Save the result on the stack. If we have a named or keyed property
4336 // we store the result under the receiver that is currently on top 4336 // we store the result under the receiver that is currently on top
4337 // of the stack. 4337 // of the stack.
4338 switch (assign_type) { 4338 switch (assign_type) {
4339 case VARIABLE: 4339 case VARIABLE:
4340 __ push(rax); 4340 __ Push(rax);
4341 break; 4341 break;
4342 case NAMED_PROPERTY: 4342 case NAMED_PROPERTY:
4343 __ movp(Operand(rsp, kPointerSize), rax); 4343 __ movp(Operand(rsp, kPointerSize), rax);
4344 break; 4344 break;
4345 case KEYED_PROPERTY: 4345 case KEYED_PROPERTY:
4346 __ movp(Operand(rsp, 2 * kPointerSize), rax); 4346 __ movp(Operand(rsp, 2 * kPointerSize), rax);
4347 break; 4347 break;
4348 } 4348 }
4349 } 4349 }
4350 } 4350 }
(...skipping 14 matching lines...) Expand all
4365 __ CallStub(&convert_stub); 4365 __ CallStub(&convert_stub);
4366 4366
4367 // Save result for postfix expressions. 4367 // Save result for postfix expressions.
4368 if (expr->is_postfix()) { 4368 if (expr->is_postfix()) {
4369 if (!context()->IsEffect()) { 4369 if (!context()->IsEffect()) {
4370 // Save the result on the stack. If we have a named or keyed property 4370 // Save the result on the stack. If we have a named or keyed property
4371 // we store the result under the receiver that is currently on top 4371 // we store the result under the receiver that is currently on top
4372 // of the stack. 4372 // of the stack.
4373 switch (assign_type) { 4373 switch (assign_type) {
4374 case VARIABLE: 4374 case VARIABLE:
4375 __ push(rax); 4375 __ Push(rax);
4376 break; 4376 break;
4377 case NAMED_PROPERTY: 4377 case NAMED_PROPERTY:
4378 __ movp(Operand(rsp, kPointerSize), rax); 4378 __ movp(Operand(rsp, kPointerSize), rax);
4379 break; 4379 break;
4380 case KEYED_PROPERTY: 4380 case KEYED_PROPERTY:
4381 __ movp(Operand(rsp, 2 * kPointerSize), rax); 4381 __ movp(Operand(rsp, 2 * kPointerSize), rax);
4382 break; 4382 break;
4383 } 4383 }
4384 } 4384 }
4385 } 4385 }
(...skipping 29 matching lines...) Expand all
4415 } else { 4415 } else {
4416 // Perform the assignment as if via '='. 4416 // Perform the assignment as if via '='.
4417 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4417 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4418 Token::ASSIGN); 4418 Token::ASSIGN);
4419 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4419 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4420 context()->Plug(rax); 4420 context()->Plug(rax);
4421 } 4421 }
4422 break; 4422 break;
4423 case NAMED_PROPERTY: { 4423 case NAMED_PROPERTY: {
4424 __ Move(rcx, prop->key()->AsLiteral()->value()); 4424 __ Move(rcx, prop->key()->AsLiteral()->value());
4425 __ pop(rdx); 4425 __ Pop(rdx);
4426 CallStoreIC(expr->CountStoreFeedbackId()); 4426 CallStoreIC(expr->CountStoreFeedbackId());
4427 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4427 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4428 if (expr->is_postfix()) { 4428 if (expr->is_postfix()) {
4429 if (!context()->IsEffect()) { 4429 if (!context()->IsEffect()) {
4430 context()->PlugTOS(); 4430 context()->PlugTOS();
4431 } 4431 }
4432 } else { 4432 } else {
4433 context()->Plug(rax); 4433 context()->Plug(rax);
4434 } 4434 }
4435 break; 4435 break;
4436 } 4436 }
4437 case KEYED_PROPERTY: { 4437 case KEYED_PROPERTY: {
4438 __ pop(rcx); 4438 __ Pop(rcx);
4439 __ pop(rdx); 4439 __ Pop(rdx);
4440 Handle<Code> ic = strict_mode() == SLOPPY 4440 Handle<Code> ic = strict_mode() == SLOPPY
4441 ? isolate()->builtins()->KeyedStoreIC_Initialize() 4441 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4442 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 4442 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4443 CallIC(ic, expr->CountStoreFeedbackId()); 4443 CallIC(ic, expr->CountStoreFeedbackId());
4444 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4444 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4445 if (expr->is_postfix()) { 4445 if (expr->is_postfix()) {
4446 if (!context()->IsEffect()) { 4446 if (!context()->IsEffect()) {
4447 context()->PlugTOS(); 4447 context()->PlugTOS();
4448 } 4448 }
4449 } else { 4449 } else {
(...skipping 21 matching lines...) Expand all
4471 context()->Plug(rax); 4471 context()->Plug(rax);
4472 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { 4472 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4473 Comment cmnt(masm_, "[ Lookup slot"); 4473 Comment cmnt(masm_, "[ Lookup slot");
4474 Label done, slow; 4474 Label done, slow;
4475 4475
4476 // Generate code for loading from variables potentially shadowed 4476 // Generate code for loading from variables potentially shadowed
4477 // by eval-introduced variables. 4477 // by eval-introduced variables.
4478 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done); 4478 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4479 4479
4480 __ bind(&slow); 4480 __ bind(&slow);
4481 __ push(rsi); 4481 __ Push(rsi);
4482 __ Push(proxy->name()); 4482 __ Push(proxy->name());
4483 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); 4483 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4484 PrepareForBailout(expr, TOS_REG); 4484 PrepareForBailout(expr, TOS_REG);
4485 __ bind(&done); 4485 __ bind(&done);
4486 4486
4487 context()->Plug(rax); 4487 context()->Plug(rax);
4488 } else { 4488 } else {
4489 // This expression cannot throw a reference error at the top level. 4489 // This expression cannot throw a reference error at the top level.
4490 VisitInDuplicateContext(expr); 4490 VisitInDuplicateContext(expr);
4491 } 4491 }
(...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after
4605 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4605 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4606 __ testq(rax, rax); 4606 __ testq(rax, rax);
4607 // The stub returns 0 for true. 4607 // The stub returns 0 for true.
4608 Split(zero, if_true, if_false, fall_through); 4608 Split(zero, if_true, if_false, fall_through);
4609 break; 4609 break;
4610 } 4610 }
4611 4611
4612 default: { 4612 default: {
4613 VisitForAccumulatorValue(expr->right()); 4613 VisitForAccumulatorValue(expr->right());
4614 Condition cc = CompareIC::ComputeCondition(op); 4614 Condition cc = CompareIC::ComputeCondition(op);
4615 __ pop(rdx); 4615 __ Pop(rdx);
4616 4616
4617 bool inline_smi_code = ShouldInlineSmiCase(op); 4617 bool inline_smi_code = ShouldInlineSmiCase(op);
4618 JumpPatchSite patch_site(masm_); 4618 JumpPatchSite patch_site(masm_);
4619 if (inline_smi_code) { 4619 if (inline_smi_code) {
4620 Label slow_case; 4620 Label slow_case;
4621 __ movp(rcx, rdx); 4621 __ movp(rcx, rdx);
4622 __ or_(rcx, rax); 4622 __ or_(rcx, rax);
4623 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear); 4623 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
4624 __ cmpq(rdx, rax); 4624 __ cmpq(rdx, rax);
4625 Split(cc, if_true, if_false, NULL); 4625 Split(cc, if_true, if_false, NULL);
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
4705 declaration_scope->is_module_scope()) { 4705 declaration_scope->is_module_scope()) {
4706 // Contexts nested in the native context have a canonical empty function 4706 // Contexts nested in the native context have a canonical empty function
4707 // as their closure, not the anonymous closure containing the global 4707 // as their closure, not the anonymous closure containing the global
4708 // code. Pass a smi sentinel and let the runtime look up the empty 4708 // code. Pass a smi sentinel and let the runtime look up the empty
4709 // function. 4709 // function.
4710 __ Push(Smi::FromInt(0)); 4710 __ Push(Smi::FromInt(0));
4711 } else if (declaration_scope->is_eval_scope()) { 4711 } else if (declaration_scope->is_eval_scope()) {
4712 // Contexts created by a call to eval have the same closure as the 4712 // Contexts created by a call to eval have the same closure as the
4713 // context calling eval, not the anonymous closure containing the eval 4713 // context calling eval, not the anonymous closure containing the eval
4714 // code. Fetch it from the context. 4714 // code. Fetch it from the context.
4715 __ push(ContextOperand(rsi, Context::CLOSURE_INDEX)); 4715 __ Push(ContextOperand(rsi, Context::CLOSURE_INDEX));
4716 } else { 4716 } else {
4717 ASSERT(declaration_scope->is_function_scope()); 4717 ASSERT(declaration_scope->is_function_scope());
4718 __ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 4718 __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
4719 } 4719 }
4720 } 4720 }
4721 4721
4722 4722
4723 // ---------------------------------------------------------------------------- 4723 // ----------------------------------------------------------------------------
4724 // Non-local control flow support. 4724 // Non-local control flow support.
4725 4725
4726 4726
4727 void FullCodeGenerator::EnterFinallyBlock() { 4727 void FullCodeGenerator::EnterFinallyBlock() {
4728 ASSERT(!result_register().is(rdx)); 4728 ASSERT(!result_register().is(rdx));
4729 ASSERT(!result_register().is(rcx)); 4729 ASSERT(!result_register().is(rcx));
4730 // Cook return address on top of stack (smi encoded Code* delta) 4730 // Cook return address on top of stack (smi encoded Code* delta)
4731 __ PopReturnAddressTo(rdx); 4731 __ PopReturnAddressTo(rdx);
4732 __ Move(rcx, masm_->CodeObject()); 4732 __ Move(rcx, masm_->CodeObject());
4733 __ subq(rdx, rcx); 4733 __ subq(rdx, rcx);
4734 __ Integer32ToSmi(rdx, rdx); 4734 __ Integer32ToSmi(rdx, rdx);
4735 __ push(rdx); 4735 __ Push(rdx);
4736 4736
4737 // Store result register while executing finally block. 4737 // Store result register while executing finally block.
4738 __ push(result_register()); 4738 __ Push(result_register());
4739 4739
4740 // Store pending message while executing finally block. 4740 // Store pending message while executing finally block.
4741 ExternalReference pending_message_obj = 4741 ExternalReference pending_message_obj =
4742 ExternalReference::address_of_pending_message_obj(isolate()); 4742 ExternalReference::address_of_pending_message_obj(isolate());
4743 __ Load(rdx, pending_message_obj); 4743 __ Load(rdx, pending_message_obj);
4744 __ push(rdx); 4744 __ Push(rdx);
4745 4745
4746 ExternalReference has_pending_message = 4746 ExternalReference has_pending_message =
4747 ExternalReference::address_of_has_pending_message(isolate()); 4747 ExternalReference::address_of_has_pending_message(isolate());
4748 __ Load(rdx, has_pending_message); 4748 __ Load(rdx, has_pending_message);
4749 __ Integer32ToSmi(rdx, rdx); 4749 __ Integer32ToSmi(rdx, rdx);
4750 __ push(rdx); 4750 __ Push(rdx);
4751 4751
4752 ExternalReference pending_message_script = 4752 ExternalReference pending_message_script =
4753 ExternalReference::address_of_pending_message_script(isolate()); 4753 ExternalReference::address_of_pending_message_script(isolate());
4754 __ Load(rdx, pending_message_script); 4754 __ Load(rdx, pending_message_script);
4755 __ push(rdx); 4755 __ Push(rdx);
4756 } 4756 }
4757 4757
4758 4758
4759 void FullCodeGenerator::ExitFinallyBlock() { 4759 void FullCodeGenerator::ExitFinallyBlock() {
4760 ASSERT(!result_register().is(rdx)); 4760 ASSERT(!result_register().is(rdx));
4761 ASSERT(!result_register().is(rcx)); 4761 ASSERT(!result_register().is(rcx));
4762 // Restore pending message from stack. 4762 // Restore pending message from stack.
4763 __ pop(rdx); 4763 __ Pop(rdx);
4764 ExternalReference pending_message_script = 4764 ExternalReference pending_message_script =
4765 ExternalReference::address_of_pending_message_script(isolate()); 4765 ExternalReference::address_of_pending_message_script(isolate());
4766 __ Store(pending_message_script, rdx); 4766 __ Store(pending_message_script, rdx);
4767 4767
4768 __ pop(rdx); 4768 __ Pop(rdx);
4769 __ SmiToInteger32(rdx, rdx); 4769 __ SmiToInteger32(rdx, rdx);
4770 ExternalReference has_pending_message = 4770 ExternalReference has_pending_message =
4771 ExternalReference::address_of_has_pending_message(isolate()); 4771 ExternalReference::address_of_has_pending_message(isolate());
4772 __ Store(has_pending_message, rdx); 4772 __ Store(has_pending_message, rdx);
4773 4773
4774 __ pop(rdx); 4774 __ Pop(rdx);
4775 ExternalReference pending_message_obj = 4775 ExternalReference pending_message_obj =
4776 ExternalReference::address_of_pending_message_obj(isolate()); 4776 ExternalReference::address_of_pending_message_obj(isolate());
4777 __ Store(pending_message_obj, rdx); 4777 __ Store(pending_message_obj, rdx);
4778 4778
4779 // Restore result register from stack. 4779 // Restore result register from stack.
4780 __ pop(result_register()); 4780 __ Pop(result_register());
4781 4781
4782 // Uncook return address. 4782 // Uncook return address.
4783 __ pop(rdx); 4783 __ Pop(rdx);
4784 __ SmiToInteger32(rdx, rdx); 4784 __ SmiToInteger32(rdx, rdx);
4785 __ Move(rcx, masm_->CodeObject()); 4785 __ Move(rcx, masm_->CodeObject());
4786 __ addq(rdx, rcx); 4786 __ addq(rdx, rcx);
4787 __ jmp(rdx); 4787 __ jmp(rdx);
4788 } 4788 }
4789 4789
4790 4790
4791 #undef __ 4791 #undef __
4792 4792
4793 #define __ ACCESS_MASM(masm()) 4793 #define __ ACCESS_MASM(masm())
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
4893 ASSERT_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(), 4893 ASSERT_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
4894 Assembler::target_address_at(call_target_address, 4894 Assembler::target_address_at(call_target_address,
4895 unoptimized_code)); 4895 unoptimized_code));
4896 return OSR_AFTER_STACK_CHECK; 4896 return OSR_AFTER_STACK_CHECK;
4897 } 4897 }
4898 4898
4899 4899
4900 } } // namespace v8::internal 4900 } } // namespace v8::internal
4901 4901
4902 #endif // V8_TARGET_ARCH_X64 4902 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/deoptimizer-x64.cc ('k') | src/x64/ic-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698