OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
112 int length = builder.position(); | 112 int length = builder.position(); |
113 Vector<char> copy = Vector<char>::New(length + 1); | 113 Vector<char> copy = Vector<char>::New(length + 1); |
114 memcpy(copy.start(), builder.Finalize(), copy.length()); | 114 memcpy(copy.start(), builder.Finalize(), copy.length()); |
115 masm()->RecordComment(copy.start()); | 115 masm()->RecordComment(copy.start()); |
116 } | 116 } |
117 | 117 |
118 | 118 |
119 bool LCodeGen::GeneratePrologue() { | 119 bool LCodeGen::GeneratePrologue() { |
120 ASSERT(is_generating()); | 120 ASSERT(is_generating()); |
121 | 121 |
122 if (info()->IsOptimizing()) { | 122 ProfileEntryHookStub::MaybeCallEntryHook(masm_); |
123 ProfileEntryHookStub::MaybeCallEntryHook(masm_); | |
124 | 123 |
125 #ifdef DEBUG | 124 #ifdef DEBUG |
126 if (strlen(FLAG_stop_at) > 0 && | 125 if (strlen(FLAG_stop_at) > 0 && |
127 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { | 126 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { |
128 __ int3(); | 127 __ int3(); |
129 } | 128 } |
130 #endif | 129 #endif |
131 | 130 |
132 // Strict mode functions need to replace the receiver with undefined | 131 // Strict mode functions need to replace the receiver with undefined |
133 // when called as functions (without an explicit receiver | 132 // when called as functions (without an explicit receiver |
134 // object). rcx is zero for method calls and non-zero for function | 133 // object). rcx is zero for method calls and non-zero for function |
135 // calls. | 134 // calls. |
136 if (!info_->is_classic_mode() || info_->is_native()) { | 135 if (!info_->is_classic_mode() || info_->is_native()) { |
137 Label ok; | 136 Label ok; |
138 __ testq(rcx, rcx); | 137 __ testq(rcx, rcx); |
139 __ j(zero, &ok, Label::kNear); | 138 __ j(zero, &ok, Label::kNear); |
140 // +1 for return address. | 139 // +1 for return address. |
141 int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize; | 140 int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize; |
142 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); | 141 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); |
143 __ movq(Operand(rsp, receiver_offset), kScratchRegister); | 142 __ movq(Operand(rsp, receiver_offset), kScratchRegister); |
144 __ bind(&ok); | 143 __ bind(&ok); |
145 } | |
146 } | 144 } |
147 | 145 |
148 info()->set_prologue_offset(masm_->pc_offset()); | 146 info()->set_prologue_offset(masm_->pc_offset()); |
149 if (NeedsEagerFrame()) { | 147 __ push(rbp); // Caller's frame pointer. |
150 ASSERT(!frame_is_built_); | 148 __ movq(rbp, rsp); |
151 frame_is_built_ = true; | 149 __ push(rsi); // Callee's context. |
152 __ push(rbp); // Caller's frame pointer. | 150 __ push(rdi); // Callee's JS function. |
153 __ movq(rbp, rsp); | |
154 __ push(rsi); // Callee's context. | |
155 if (info()->IsStub()) { | |
156 __ Push(Smi::FromInt(StackFrame::STUB)); | |
157 } else { | |
158 __ push(rdi); // Callee's JS function. | |
159 } | |
160 } | |
161 | 151 |
162 // Reserve space for the stack slots needed by the code. | 152 // Reserve space for the stack slots needed by the code. |
163 int slots = GetStackSlotCount(); | 153 int slots = GetStackSlotCount(); |
164 if (slots > 0) { | 154 if (slots > 0) { |
165 if (FLAG_debug_code) { | 155 if (FLAG_debug_code) { |
166 __ Set(rax, slots); | 156 __ Set(rax, slots); |
167 __ movq(kScratchRegister, kSlotsZapValue, RelocInfo::NONE); | 157 __ movq(kScratchRegister, kSlotsZapValue, RelocInfo::NONE); |
168 Label loop; | 158 Label loop; |
169 __ bind(&loop); | 159 __ bind(&loop); |
170 __ push(kScratchRegister); | 160 __ push(kScratchRegister); |
171 __ decl(rax); | 161 __ decl(rax); |
172 __ j(not_zero, &loop); | 162 __ j(not_zero, &loop); |
173 } else { | 163 } else { |
174 __ subq(rsp, Immediate(slots * kPointerSize)); | 164 __ subq(rsp, Immediate(slots * kPointerSize)); |
175 #ifdef _MSC_VER | 165 #ifdef _MSC_VER |
176 // On windows, you may not access the stack more than one page below | 166 // On windows, you may not access the stack more than one page below |
177 // the most recently mapped page. To make the allocated area randomly | 167 // the most recently mapped page. To make the allocated area randomly |
178 // accessible, we write to each page in turn (the value is irrelevant). | 168 // accessible, we write to each page in turn (the value is irrelevant). |
179 const int kPageSize = 4 * KB; | 169 const int kPageSize = 4 * KB; |
180 for (int offset = slots * kPointerSize - kPageSize; | 170 for (int offset = slots * kPointerSize - kPageSize; |
181 offset > 0; | 171 offset > 0; |
182 offset -= kPageSize) { | 172 offset -= kPageSize) { |
183 __ movq(Operand(rsp, offset), rax); | 173 __ movq(Operand(rsp, offset), rax); |
184 } | 174 } |
185 #endif | 175 #endif |
186 } | 176 } |
187 } | 177 } |
188 | 178 |
189 // Possibly allocate a local context. | 179 // Possibly allocate a local context. |
190 int heap_slots = info_->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; | 180 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; |
191 if (heap_slots > 0) { | 181 if (heap_slots > 0) { |
192 Comment(";;; Allocate local context"); | 182 Comment(";;; Allocate local context"); |
193 // Argument to NewContext is the function, which is still in rdi. | 183 // Argument to NewContext is the function, which is still in rdi. |
194 __ push(rdi); | 184 __ push(rdi); |
195 if (heap_slots <= FastNewContextStub::kMaximumSlots) { | 185 if (heap_slots <= FastNewContextStub::kMaximumSlots) { |
196 FastNewContextStub stub(heap_slots); | 186 FastNewContextStub stub(heap_slots); |
197 __ CallStub(&stub); | 187 __ CallStub(&stub); |
198 } else { | 188 } else { |
199 __ CallRuntime(Runtime::kNewFunctionContext, 1); | 189 __ CallRuntime(Runtime::kNewFunctionContext, 1); |
200 } | 190 } |
(...skipping 15 matching lines...) Expand all Loading... |
216 int context_offset = Context::SlotOffset(var->index()); | 206 int context_offset = Context::SlotOffset(var->index()); |
217 __ movq(Operand(rsi, context_offset), rax); | 207 __ movq(Operand(rsi, context_offset), rax); |
218 // Update the write barrier. This clobbers rax and rbx. | 208 // Update the write barrier. This clobbers rax and rbx. |
219 __ RecordWriteContextSlot(rsi, context_offset, rax, rbx, kSaveFPRegs); | 209 __ RecordWriteContextSlot(rsi, context_offset, rax, rbx, kSaveFPRegs); |
220 } | 210 } |
221 } | 211 } |
222 Comment(";;; End allocate local context"); | 212 Comment(";;; End allocate local context"); |
223 } | 213 } |
224 | 214 |
225 // Trace the call. | 215 // Trace the call. |
226 if (FLAG_trace && info()->IsOptimizing()) { | 216 if (FLAG_trace) { |
227 __ CallRuntime(Runtime::kTraceEnter, 0); | 217 __ CallRuntime(Runtime::kTraceEnter, 0); |
228 } | 218 } |
229 return !is_aborted(); | 219 return !is_aborted(); |
230 } | 220 } |
231 | 221 |
232 | 222 |
233 bool LCodeGen::GenerateBody() { | 223 bool LCodeGen::GenerateBody() { |
234 ASSERT(is_generating()); | 224 ASSERT(is_generating()); |
235 bool emit_instructions = true; | 225 bool emit_instructions = true; |
236 for (current_instruction_ = 0; | 226 for (current_instruction_ = 0; |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
269 } | 259 } |
270 instr->CompileToNative(this); | 260 instr->CompileToNative(this); |
271 } | 261 } |
272 } | 262 } |
273 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 263 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
274 return !is_aborted(); | 264 return !is_aborted(); |
275 } | 265 } |
276 | 266 |
277 | 267 |
278 bool LCodeGen::GenerateJumpTable() { | 268 bool LCodeGen::GenerateJumpTable() { |
279 Label needs_frame_not_call; | |
280 Label needs_frame_is_call; | |
281 for (int i = 0; i < jump_table_.length(); i++) { | 269 for (int i = 0; i < jump_table_.length(); i++) { |
282 __ bind(&jump_table_[i].label); | 270 __ bind(&jump_table_[i].label); |
283 Address entry = jump_table_[i].address; | 271 __ Jump(jump_table_[i].address, RelocInfo::RUNTIME_ENTRY); |
284 if (jump_table_[i].needs_frame) { | |
285 __ movq(kScratchRegister, ExternalReference::ForDeoptEntry(entry)); | |
286 if (jump_table_[i].is_lazy_deopt) { | |
287 if (needs_frame_is_call.is_bound()) { | |
288 __ jmp(&needs_frame_is_call); | |
289 } else { | |
290 __ bind(&needs_frame_is_call); | |
291 __ push(rbp); | |
292 __ movq(rbp, rsp); | |
293 __ push(rsi); | |
294 // This variant of deopt can only be used with stubs. Since we don't | |
295 // have a function pointer to install in the stack frame that we're | |
296 // building, install a special marker there instead. | |
297 ASSERT(info()->IsStub()); | |
298 __ Move(rsi, Smi::FromInt(StackFrame::STUB)); | |
299 __ push(rsi); | |
300 __ movq(rsi, MemOperand(rsp, kPointerSize)); | |
301 __ call(kScratchRegister); | |
302 } | |
303 } else { | |
304 if (needs_frame_not_call.is_bound()) { | |
305 __ jmp(&needs_frame_not_call); | |
306 } else { | |
307 __ bind(&needs_frame_not_call); | |
308 __ push(rbp); | |
309 __ movq(rbp, rsp); | |
310 __ push(r8); | |
311 // This variant of deopt can only be used with stubs. Since we don't | |
312 // have a function pointer to install in the stack frame that we're | |
313 // building, install a special marker there instead. | |
314 ASSERT(info()->IsStub()); | |
315 __ Move(rsi, Smi::FromInt(StackFrame::STUB)); | |
316 __ push(rsi); | |
317 __ movq(rsi, MemOperand(rsp, kPointerSize)); | |
318 __ jmp(kScratchRegister); | |
319 } | |
320 } | |
321 } else { | |
322 if (jump_table_[i].is_lazy_deopt) { | |
323 __ Call(entry, RelocInfo::RUNTIME_ENTRY); | |
324 } else { | |
325 __ Jump(entry, RelocInfo::RUNTIME_ENTRY); | |
326 } | |
327 } | |
328 } | 272 } |
329 return !is_aborted(); | 273 return !is_aborted(); |
330 } | 274 } |
331 | 275 |
332 | 276 |
333 bool LCodeGen::GenerateDeferredCode() { | 277 bool LCodeGen::GenerateDeferredCode() { |
334 ASSERT(is_generating()); | 278 ASSERT(is_generating()); |
335 if (deferred_.length() > 0) { | 279 if (deferred_.length() > 0) { |
336 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { | 280 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { |
337 LDeferredCode* code = deferred_[i]; | 281 LDeferredCode* code = deferred_[i]; |
338 __ bind(code->entry()); | 282 __ bind(code->entry()); |
339 if (NeedsDeferredFrame()) { | |
340 Comment(";;; Deferred build frame", | |
341 code->instruction_index(), | |
342 code->instr()->Mnemonic()); | |
343 ASSERT(!frame_is_built_); | |
344 ASSERT(info()->IsStub()); | |
345 frame_is_built_ = true; | |
346 // Build the frame in such a way that esi isn't trashed. | |
347 __ push(rbp); // Caller's frame pointer. | |
348 __ push(Operand(rbp, StandardFrameConstants::kContextOffset)); | |
349 __ Push(Smi::FromInt(StackFrame::STUB)); | |
350 __ lea(rbp, Operand(rsp, 2 * kPointerSize)); | |
351 } | |
352 Comment(";;; Deferred code @%d: %s.", | 283 Comment(";;; Deferred code @%d: %s.", |
353 code->instruction_index(), | 284 code->instruction_index(), |
354 code->instr()->Mnemonic()); | 285 code->instr()->Mnemonic()); |
355 code->Generate(); | 286 code->Generate(); |
356 if (NeedsDeferredFrame()) { | |
357 Comment(";;; Deferred destroy frame", | |
358 code->instruction_index(), | |
359 code->instr()->Mnemonic()); | |
360 ASSERT(frame_is_built_); | |
361 frame_is_built_ = false; | |
362 __ movq(rsp, rbp); | |
363 __ pop(rbp); | |
364 } | |
365 __ jmp(code->exit()); | 287 __ jmp(code->exit()); |
366 } | 288 } |
367 } | 289 } |
368 | 290 |
369 // Deferred code is the last part of the instruction sequence. Mark | 291 // Deferred code is the last part of the instruction sequence. Mark |
370 // the generated code as done unless we bailed out. | 292 // the generated code as done unless we bailed out. |
371 if (!is_aborted()) status_ = DONE; | 293 if (!is_aborted()) status_ = DONE; |
372 return !is_aborted(); | 294 return !is_aborted(); |
373 } | 295 } |
374 | 296 |
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
467 // arguments index points to the first element of a sequence of tagged | 389 // arguments index points to the first element of a sequence of tagged |
468 // values on the stack that represent the arguments. This needs to be | 390 // values on the stack that represent the arguments. This needs to be |
469 // kept in sync with the LArgumentsElements implementation. | 391 // kept in sync with the LArgumentsElements implementation. |
470 *arguments_index = -environment->parameter_count(); | 392 *arguments_index = -environment->parameter_count(); |
471 *arguments_count = environment->parameter_count(); | 393 *arguments_count = environment->parameter_count(); |
472 | 394 |
473 WriteTranslation(environment->outer(), | 395 WriteTranslation(environment->outer(), |
474 translation, | 396 translation, |
475 arguments_index, | 397 arguments_index, |
476 arguments_count); | 398 arguments_count); |
477 bool has_closure_id = !info()->closure().is_null() && | 399 int closure_id = *info()->closure() != *environment->closure() |
478 *info()->closure() != *environment->closure(); | |
479 int closure_id = has_closure_id | |
480 ? DefineDeoptimizationLiteral(environment->closure()) | 400 ? DefineDeoptimizationLiteral(environment->closure()) |
481 : Translation::kSelfLiteralId; | 401 : Translation::kSelfLiteralId; |
482 | 402 |
483 switch (environment->frame_type()) { | 403 switch (environment->frame_type()) { |
484 case JS_FUNCTION: | 404 case JS_FUNCTION: |
485 translation->BeginJSFrame(environment->ast_id(), closure_id, height); | 405 translation->BeginJSFrame(environment->ast_id(), closure_id, height); |
486 break; | 406 break; |
487 case JS_CONSTRUCT: | 407 case JS_CONSTRUCT: |
488 translation->BeginConstructStubFrame(closure_id, translation_size); | 408 translation->BeginConstructStubFrame(closure_id, translation_size); |
489 break; | 409 break; |
490 case JS_GETTER: | 410 case JS_GETTER: |
491 ASSERT(translation_size == 1); | 411 ASSERT(translation_size == 1); |
492 ASSERT(height == 0); | 412 ASSERT(height == 0); |
493 translation->BeginGetterStubFrame(closure_id); | 413 translation->BeginGetterStubFrame(closure_id); |
494 break; | 414 break; |
495 case JS_SETTER: | 415 case JS_SETTER: |
496 ASSERT(translation_size == 2); | 416 ASSERT(translation_size == 2); |
497 ASSERT(height == 0); | 417 ASSERT(height == 0); |
498 translation->BeginSetterStubFrame(closure_id); | 418 translation->BeginSetterStubFrame(closure_id); |
499 break; | 419 break; |
500 case ARGUMENTS_ADAPTOR: | 420 case ARGUMENTS_ADAPTOR: |
501 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size); | 421 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size); |
502 break; | 422 break; |
503 case STUB: | |
504 translation->BeginCompiledStubFrame(); | |
505 break; | |
506 } | 423 } |
507 | 424 |
508 // Inlined frames which push their arguments cause the index to be | 425 // Inlined frames which push their arguments cause the index to be |
509 // bumped and a new stack area to be used for materialization. | 426 // bumped and a new stack area to be used for materialization. |
510 if (environment->entry() != NULL && | 427 if (environment->entry() != NULL && |
511 environment->entry()->arguments_pushed()) { | 428 environment->entry()->arguments_pushed()) { |
512 *arguments_index = *arguments_index < 0 | 429 *arguments_index = *arguments_index < 0 |
513 ? GetStackSlotCount() | 430 ? GetStackSlotCount() |
514 : *arguments_index + *arguments_count; | 431 : *arguments_index + *arguments_count; |
515 *arguments_count = environment->entry()->arguments_count() + 1; | 432 *arguments_count = environment->entry()->arguments_count() + 1; |
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
686 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); | 603 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); |
687 deoptimizations_.Add(environment, environment->zone()); | 604 deoptimizations_.Add(environment, environment->zone()); |
688 } | 605 } |
689 } | 606 } |
690 | 607 |
691 | 608 |
692 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { | 609 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { |
693 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 610 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
694 ASSERT(environment->HasBeenRegistered()); | 611 ASSERT(environment->HasBeenRegistered()); |
695 int id = environment->deoptimization_index(); | 612 int id = environment->deoptimization_index(); |
696 ASSERT(info()->IsOptimizing() || info()->IsStub()); | 613 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER); |
697 Deoptimizer::BailoutType bailout_type = info()->IsStub() | |
698 ? Deoptimizer::LAZY | |
699 : Deoptimizer::EAGER; | |
700 Address entry = Deoptimizer::GetDeoptimizationEntry(id, bailout_type); | |
701 if (entry == NULL) { | 614 if (entry == NULL) { |
702 Abort("bailout was not prepared"); | 615 Abort("bailout was not prepared"); |
703 return; | 616 return; |
704 } | 617 } |
705 | 618 |
706 ASSERT(info()->IsStub() || frame_is_built_); | |
707 bool lazy_deopt = info()->IsStub(); | |
708 if (cc == no_condition) { | 619 if (cc == no_condition) { |
709 if (lazy_deopt) { | 620 __ Jump(entry, RelocInfo::RUNTIME_ENTRY); |
710 __ Call(entry, RelocInfo::RUNTIME_ENTRY); | |
711 } else { | |
712 __ Jump(entry, RelocInfo::RUNTIME_ENTRY); | |
713 } | |
714 } else { | 621 } else { |
715 // We often have several deopts to the same entry, reuse the last | 622 // We often have several deopts to the same entry, reuse the last |
716 // jump entry if this is the case. | 623 // jump entry if this is the case. |
717 if (jump_table_.is_empty() || | 624 if (jump_table_.is_empty() || |
718 jump_table_.last().address != entry || | 625 jump_table_.last().address != entry) { |
719 jump_table_.last().needs_frame != !frame_is_built_ || | 626 jump_table_.Add(JumpTableEntry(entry), zone()); |
720 jump_table_.last().is_lazy_deopt != lazy_deopt) { | |
721 JumpTableEntry table_entry(entry, !frame_is_built_, lazy_deopt); | |
722 jump_table_.Add(table_entry, zone()); | |
723 } | 627 } |
724 __ j(cc, &jump_table_.last().label); | 628 __ j(cc, &jump_table_.last().label); |
725 } | 629 } |
726 } | 630 } |
727 | 631 |
728 | 632 |
729 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { | 633 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { |
730 int length = deoptimizations_.length(); | 634 int length = deoptimizations_.length(); |
731 if (length == 0) return; | 635 if (length == 0) return; |
732 Handle<DeoptimizationInputData> data = | 636 Handle<DeoptimizationInputData> data = |
(...skipping 1644 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2377 __ j(condition, &true_value, Label::kNear); | 2281 __ j(condition, &true_value, Label::kNear); |
2378 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); | 2282 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); |
2379 __ jmp(&done, Label::kNear); | 2283 __ jmp(&done, Label::kNear); |
2380 __ bind(&true_value); | 2284 __ bind(&true_value); |
2381 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex); | 2285 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex); |
2382 __ bind(&done); | 2286 __ bind(&done); |
2383 } | 2287 } |
2384 | 2288 |
2385 | 2289 |
2386 void LCodeGen::DoReturn(LReturn* instr) { | 2290 void LCodeGen::DoReturn(LReturn* instr) { |
2387 if (FLAG_trace && info()->IsOptimizing()) { | 2291 if (FLAG_trace) { |
2388 // Preserve the return value on the stack and rely on the runtime | 2292 // Preserve the return value on the stack and rely on the runtime |
2389 // call to return the value in the same register. | 2293 // call to return the value in the same register. |
2390 __ push(rax); | 2294 __ push(rax); |
2391 __ CallRuntime(Runtime::kTraceExit, 1); | 2295 __ CallRuntime(Runtime::kTraceExit, 1); |
2392 } | 2296 } |
2393 if (NeedsEagerFrame()) { | 2297 __ movq(rsp, rbp); |
2394 __ movq(rsp, rbp); | 2298 __ pop(rbp); |
2395 __ pop(rbp); | 2299 __ Ret((GetParameterCount() + 1) * kPointerSize, rcx); |
2396 } | |
2397 if (info()->IsStub()) { | |
2398 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | |
2399 __ Ret(0, r10); | |
2400 } else { | |
2401 __ Ret((GetParameterCount() + 1) * kPointerSize, rcx); | |
2402 } | |
2403 } | 2300 } |
2404 | 2301 |
2405 | 2302 |
2406 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) { | 2303 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) { |
2407 Register result = ToRegister(instr->result()); | 2304 Register result = ToRegister(instr->result()); |
2408 __ LoadGlobalCell(result, instr->hydrogen()->cell()); | 2305 __ LoadGlobalCell(result, instr->hydrogen()->cell()); |
2409 if (instr->hydrogen()->RequiresHoleCheck()) { | 2306 if (instr->hydrogen()->RequiresHoleCheck()) { |
2410 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); | 2307 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); |
2411 DeoptimizeIf(equal, instr->environment()); | 2308 DeoptimizeIf(equal, instr->environment()); |
2412 } | 2309 } |
(...skipping 2210 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4623 } else { | 4520 } else { |
4624 __ Cmp(reg, target); | 4521 __ Cmp(reg, target); |
4625 } | 4522 } |
4626 DeoptimizeIf(not_equal, instr->environment()); | 4523 DeoptimizeIf(not_equal, instr->environment()); |
4627 } | 4524 } |
4628 | 4525 |
4629 | 4526 |
4630 void LCodeGen::DoCheckMapCommon(Register reg, | 4527 void LCodeGen::DoCheckMapCommon(Register reg, |
4631 Handle<Map> map, | 4528 Handle<Map> map, |
4632 CompareMapMode mode, | 4529 CompareMapMode mode, |
4633 LInstruction* instr) { | 4530 LEnvironment* env) { |
4634 Label success; | 4531 Label success; |
4635 __ CompareMap(reg, map, &success, mode); | 4532 __ CompareMap(reg, map, &success, mode); |
4636 DeoptimizeIf(not_equal, instr->environment()); | 4533 DeoptimizeIf(not_equal, env); |
4637 __ bind(&success); | 4534 __ bind(&success); |
4638 } | 4535 } |
4639 | 4536 |
4640 | 4537 |
4641 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { | 4538 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { |
4642 LOperand* input = instr->value(); | 4539 LOperand* input = instr->value(); |
4643 ASSERT(input->IsRegister()); | 4540 ASSERT(input->IsRegister()); |
4644 Register reg = ToRegister(input); | 4541 Register reg = ToRegister(input); |
4645 | 4542 |
4646 Label success; | 4543 Label success; |
4647 SmallMapList* map_set = instr->hydrogen()->map_set(); | 4544 SmallMapList* map_set = instr->hydrogen()->map_set(); |
4648 for (int i = 0; i < map_set->length() - 1; i++) { | 4545 for (int i = 0; i < map_set->length() - 1; i++) { |
4649 Handle<Map> map = map_set->at(i); | 4546 Handle<Map> map = map_set->at(i); |
4650 __ CompareMap(reg, map, &success, REQUIRE_EXACT_MAP); | 4547 __ CompareMap(reg, map, &success, REQUIRE_EXACT_MAP); |
4651 __ j(equal, &success); | 4548 __ j(equal, &success); |
4652 } | 4549 } |
4653 Handle<Map> map = map_set->last(); | 4550 Handle<Map> map = map_set->last(); |
4654 DoCheckMapCommon(reg, map, REQUIRE_EXACT_MAP, instr); | 4551 DoCheckMapCommon(reg, map, REQUIRE_EXACT_MAP, instr->environment()); |
4655 __ bind(&success); | 4552 __ bind(&success); |
4656 } | 4553 } |
4657 | 4554 |
4658 | 4555 |
4659 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { | 4556 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { |
4660 XMMRegister value_reg = ToDoubleRegister(instr->unclamped()); | 4557 XMMRegister value_reg = ToDoubleRegister(instr->unclamped()); |
4661 Register result_reg = ToRegister(instr->result()); | 4558 Register result_reg = ToRegister(instr->result()); |
4662 __ ClampDoubleToUint8(value_reg, xmm0, result_reg); | 4559 __ ClampDoubleToUint8(value_reg, xmm0, result_reg); |
4663 } | 4560 } |
4664 | 4561 |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4711 | 4608 |
4712 Handle<JSObject> holder = instr->holder(); | 4609 Handle<JSObject> holder = instr->holder(); |
4713 Handle<JSObject> current_prototype = instr->prototype(); | 4610 Handle<JSObject> current_prototype = instr->prototype(); |
4714 | 4611 |
4715 // Load prototype object. | 4612 // Load prototype object. |
4716 __ LoadHeapObject(reg, current_prototype); | 4613 __ LoadHeapObject(reg, current_prototype); |
4717 | 4614 |
4718 // Check prototype maps up to the holder. | 4615 // Check prototype maps up to the holder. |
4719 while (!current_prototype.is_identical_to(holder)) { | 4616 while (!current_prototype.is_identical_to(holder)) { |
4720 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()), | 4617 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()), |
4721 ALLOW_ELEMENT_TRANSITION_MAPS, instr); | 4618 ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment()); |
4722 current_prototype = | 4619 current_prototype = |
4723 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype())); | 4620 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype())); |
4724 // Load next prototype object. | 4621 // Load next prototype object. |
4725 __ LoadHeapObject(reg, current_prototype); | 4622 __ LoadHeapObject(reg, current_prototype); |
4726 } | 4623 } |
4727 | 4624 |
4728 // Check the holder map. | 4625 // Check the holder map. |
4729 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()), | 4626 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()), |
4730 ALLOW_ELEMENT_TRANSITION_MAPS, instr); | 4627 ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment()); |
4731 } | 4628 } |
4732 | 4629 |
4733 | 4630 |
4734 void LCodeGen::DoAllocateObject(LAllocateObject* instr) { | 4631 void LCodeGen::DoAllocateObject(LAllocateObject* instr) { |
4735 class DeferredAllocateObject: public LDeferredCode { | 4632 class DeferredAllocateObject: public LDeferredCode { |
4736 public: | 4633 public: |
4737 DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr) | 4634 DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr) |
4738 : LDeferredCode(codegen), instr_(instr) { } | 4635 : LDeferredCode(codegen), instr_(instr) { } |
4739 virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); } | 4636 virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); } |
4740 virtual LInstruction* instr() { return instr_; } | 4637 virtual LInstruction* instr() { return instr_; } |
(...skipping 515 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5256 __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset)); | 5153 __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset)); |
5257 | 5154 |
5258 // Check the marker in the calling frame. | 5155 // Check the marker in the calling frame. |
5259 __ bind(&check_frame_marker); | 5156 __ bind(&check_frame_marker); |
5260 __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), | 5157 __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), |
5261 Smi::FromInt(StackFrame::CONSTRUCT)); | 5158 Smi::FromInt(StackFrame::CONSTRUCT)); |
5262 } | 5159 } |
5263 | 5160 |
5264 | 5161 |
5265 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) { | 5162 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) { |
5266 if (info()->IsStub()) return; | |
5267 // Ensure that we have enough space after the previous lazy-bailout | 5163 // Ensure that we have enough space after the previous lazy-bailout |
5268 // instruction for patching the code here. | 5164 // instruction for patching the code here. |
5269 int current_pc = masm()->pc_offset(); | 5165 int current_pc = masm()->pc_offset(); |
5270 if (current_pc < last_lazy_deopt_pc_ + space_needed) { | 5166 if (current_pc < last_lazy_deopt_pc_ + space_needed) { |
5271 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; | 5167 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; |
5272 __ Nop(padding_size); | 5168 __ Nop(padding_size); |
5273 } | 5169 } |
5274 } | 5170 } |
5275 | 5171 |
5276 | 5172 |
(...skipping 205 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5482 FixedArray::kHeaderSize - kPointerSize)); | 5378 FixedArray::kHeaderSize - kPointerSize)); |
5483 __ bind(&done); | 5379 __ bind(&done); |
5484 } | 5380 } |
5485 | 5381 |
5486 | 5382 |
5487 #undef __ | 5383 #undef __ |
5488 | 5384 |
5489 } } // namespace v8::internal | 5385 } } // namespace v8::internal |
5490 | 5386 |
5491 #endif // V8_TARGET_ARCH_X64 | 5387 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |