Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(396)

Side by Side Diff: src/x64/lithium-codegen-x64.cc

Issue 10701054: Enable stub generation using Hydrogen/Lithium (again) (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Review feedback Created 8 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
112 int length = builder.position(); 112 int length = builder.position();
113 Vector<char> copy = Vector<char>::New(length + 1); 113 Vector<char> copy = Vector<char>::New(length + 1);
114 memcpy(copy.start(), builder.Finalize(), copy.length()); 114 memcpy(copy.start(), builder.Finalize(), copy.length());
115 masm()->RecordComment(copy.start()); 115 masm()->RecordComment(copy.start());
116 } 116 }
117 117
118 118
119 bool LCodeGen::GeneratePrologue() { 119 bool LCodeGen::GeneratePrologue() {
120 ASSERT(is_generating()); 120 ASSERT(is_generating());
121 121
122 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 122 if (info()->IsOptimizing()) {
123 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
123 124
124 #ifdef DEBUG 125 #ifdef DEBUG
125 if (strlen(FLAG_stop_at) > 0 && 126 if (strlen(FLAG_stop_at) > 0 &&
126 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { 127 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
127 __ int3(); 128 __ int3();
128 } 129 }
129 #endif 130 #endif
130 131
131 // Strict mode functions need to replace the receiver with undefined 132 // Strict mode functions need to replace the receiver with undefined
132 // when called as functions (without an explicit receiver 133 // when called as functions (without an explicit receiver
133 // object). rcx is zero for method calls and non-zero for function 134 // object). rcx is zero for method calls and non-zero for function
134 // calls. 135 // calls.
135 if (!info_->is_classic_mode() || info_->is_native()) { 136 if (!info_->is_classic_mode() || info_->is_native()) {
136 Label begin; 137 Label begin;
137 __ bind(&begin); 138 __ bind(&begin);
138 Label ok; 139 Label ok;
139 __ testq(rcx, rcx); 140 __ testq(rcx, rcx);
140 __ j(zero, &ok, Label::kNear); 141 __ j(zero, &ok, Label::kNear);
141 // +1 for return address. 142 // +1 for return address.
142 int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize; 143 int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize;
143 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); 144 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
144 __ movq(Operand(rsp, receiver_offset), kScratchRegister); 145 __ movq(Operand(rsp, receiver_offset), kScratchRegister);
145 __ bind(&ok); 146 __ bind(&ok);
146 ASSERT(!FLAG_age_code || 147 ASSERT(!FLAG_age_code ||
147 (kSizeOfOptimizedStrictModePrologue == ok.pos() - begin.pos())); 148 (kSizeOfOptimizedStrictModePrologue == ok.pos() - begin.pos()));
149 }
148 } 150 }
149 151
150 __ push(rbp); // Caller's frame pointer. 152 if (NeedsEagerFrame()) {
151 __ movq(rbp, rsp); 153 ASSERT(!frame_is_built_);
152 __ push(rsi); // Callee's context. 154 frame_is_built_ = true;
153 __ push(rdi); // Callee's JS function. 155 __ push(rbp); // Caller's frame pointer.
156 __ movq(rbp, rsp);
157 if (info()->IsStub()) {
158 __ push(rsi); // Callee's context.
Jakob Kummerow 2012/11/28 16:28:22 nit: could hoist this out of the if/else blocks.
danno 2012/11/30 16:23:24 Done.
159 __ Push(Smi::FromInt(StackFrame::STUB));
160 } else {
161 __ push(rsi); // Callee's context.
162 __ push(rdi); // Callee's JS function.
163 }
164 }
154 165
155 // Reserve space for the stack slots needed by the code. 166 // Reserve space for the stack slots needed by the code.
156 int slots = GetStackSlotCount(); 167 int slots = GetStackSlotCount();
157 if (slots > 0) { 168 if (slots > 0) {
158 if (FLAG_debug_code) { 169 if (FLAG_debug_code) {
159 __ Set(rax, slots); 170 __ Set(rax, slots);
160 __ movq(kScratchRegister, kSlotsZapValue, RelocInfo::NONE); 171 __ movq(kScratchRegister, kSlotsZapValue, RelocInfo::NONE);
161 Label loop; 172 Label loop;
162 __ bind(&loop); 173 __ bind(&loop);
163 __ push(kScratchRegister); 174 __ push(kScratchRegister);
164 __ decl(rax); 175 __ decl(rax);
165 __ j(not_zero, &loop); 176 __ j(not_zero, &loop);
166 } else { 177 } else {
167 __ subq(rsp, Immediate(slots * kPointerSize)); 178 __ subq(rsp, Immediate(slots * kPointerSize));
168 #ifdef _MSC_VER 179 #ifdef _MSC_VER
169 // On windows, you may not access the stack more than one page below 180 // On windows, you may not access the stack more than one page below
170 // the most recently mapped page. To make the allocated area randomly 181 // the most recently mapped page. To make the allocated area randomly
171 // accessible, we write to each page in turn (the value is irrelevant). 182 // accessible, we write to each page in turn (the value is irrelevant).
172 const int kPageSize = 4 * KB; 183 const int kPageSize = 4 * KB;
173 for (int offset = slots * kPointerSize - kPageSize; 184 for (int offset = slots * kPointerSize - kPageSize;
174 offset > 0; 185 offset > 0;
175 offset -= kPageSize) { 186 offset -= kPageSize) {
176 __ movq(Operand(rsp, offset), rax); 187 __ movq(Operand(rsp, offset), rax);
177 } 188 }
178 #endif 189 #endif
179 } 190 }
180 } 191 }
181 192
182 // Possibly allocate a local context. 193 // Possibly allocate a local context.
183 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 194 int heap_slots = info_->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
184 if (heap_slots > 0) { 195 if (heap_slots > 0) {
185 Comment(";;; Allocate local context"); 196 Comment(";;; Allocate local context");
186 // Argument to NewContext is the function, which is still in rdi. 197 // Argument to NewContext is the function, which is still in rdi.
187 __ push(rdi); 198 __ push(rdi);
188 if (heap_slots <= FastNewContextStub::kMaximumSlots) { 199 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
189 FastNewContextStub stub(heap_slots); 200 FastNewContextStub stub(heap_slots);
190 __ CallStub(&stub); 201 __ CallStub(&stub);
191 } else { 202 } else {
192 __ CallRuntime(Runtime::kNewFunctionContext, 1); 203 __ CallRuntime(Runtime::kNewFunctionContext, 1);
193 } 204 }
(...skipping 15 matching lines...) Expand all
209 int context_offset = Context::SlotOffset(var->index()); 220 int context_offset = Context::SlotOffset(var->index());
210 __ movq(Operand(rsi, context_offset), rax); 221 __ movq(Operand(rsi, context_offset), rax);
211 // Update the write barrier. This clobbers rax and rbx. 222 // Update the write barrier. This clobbers rax and rbx.
212 __ RecordWriteContextSlot(rsi, context_offset, rax, rbx, kSaveFPRegs); 223 __ RecordWriteContextSlot(rsi, context_offset, rax, rbx, kSaveFPRegs);
213 } 224 }
214 } 225 }
215 Comment(";;; End allocate local context"); 226 Comment(";;; End allocate local context");
216 } 227 }
217 228
218 // Trace the call. 229 // Trace the call.
219 if (FLAG_trace) { 230 if (FLAG_trace && info()->IsOptimizing()) {
220 __ CallRuntime(Runtime::kTraceEnter, 0); 231 __ CallRuntime(Runtime::kTraceEnter, 0);
221 } 232 }
222 return !is_aborted(); 233 return !is_aborted();
223 } 234 }
224 235
225 236
226 bool LCodeGen::GenerateBody() { 237 bool LCodeGen::GenerateBody() {
227 ASSERT(is_generating()); 238 ASSERT(is_generating());
228 bool emit_instructions = true; 239 bool emit_instructions = true;
229 for (current_instruction_ = 0; 240 for (current_instruction_ = 0;
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
262 } 273 }
263 instr->CompileToNative(this); 274 instr->CompileToNative(this);
264 } 275 }
265 } 276 }
266 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); 277 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
267 return !is_aborted(); 278 return !is_aborted();
268 } 279 }
269 280
270 281
271 bool LCodeGen::GenerateJumpTable() { 282 bool LCodeGen::GenerateJumpTable() {
283 Label needs_frame_not_call;
284 bool has_generated_needs_frame_not_call = false;
Jakob Kummerow 2012/11/28 16:28:22 see comments on ia32 version. In short: 1) if you
danno 2012/11/30 16:23:24 Done.
285 Label needs_frame_is_call;
286 bool has_generated_needs_frame_is_call = false;
272 for (int i = 0; i < jump_table_.length(); i++) { 287 for (int i = 0; i < jump_table_.length(); i++) {
273 __ bind(&jump_table_[i].label); 288 __ bind(&jump_table_[i].label);
274 __ Jump(jump_table_[i].address, RelocInfo::RUNTIME_ENTRY); 289 Address entry = jump_table_[i].address;
290 if (jump_table_[i].needs_frame) {
291 __ movq(kScratchRegister, ExternalReference::ForDeoptEntry(entry));
292 if (jump_table_[i].is_lazy_deopt) {
293 if (!has_generated_needs_frame_is_call) {
294 has_generated_needs_frame_is_call = true;
295 __ bind(&needs_frame_is_call);
296 __ push(rbp);
297 __ movq(rbp, rsp);
298 __ push(rsi);
299 // If there is not frame, we don't have access to the JSFunction that
300 // needs to be put into the frame.
301 ASSERT(info()->IsStub());
302 __ Move(rsi, Smi::FromInt(StackFrame::STUB));
303 __ push(rsi);
304 __ movq(rsi, MemOperand(rsp, kPointerSize));
305 __ call(kScratchRegister);
306 } else {
307 __ jmp(&needs_frame_is_call);
308 }
309 } else {
310 if (!has_generated_needs_frame_not_call) {
311 has_generated_needs_frame_not_call = true;
312 __ bind(&needs_frame_not_call);
313 __ push(rbp);
314 __ movq(rbp, rsp);
315 __ push(r8);
316 // If there is not frame, we don't have access to the JSFunction that
317 // needs to be put into the frame.
318 ASSERT(info()->IsStub());
319 __ Move(rsi, Smi::FromInt(StackFrame::STUB));
320 __ push(rsi);
321 __ movq(rsi, MemOperand(rsp, kPointerSize));
322 __ jmp(kScratchRegister);
323 } else {
324 __ jmp(&needs_frame_not_call);
325 }
326 }
327 } else {
328 if (jump_table_[i].is_lazy_deopt) {
329 __ Call(entry, RelocInfo::RUNTIME_ENTRY);
330 } else {
331 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
332 }
333 }
275 } 334 }
276 return !is_aborted(); 335 return !is_aborted();
277 } 336 }
278 337
279 338
280 bool LCodeGen::GenerateDeferredCode() { 339 bool LCodeGen::GenerateDeferredCode() {
281 ASSERT(is_generating()); 340 ASSERT(is_generating());
282 if (deferred_.length() > 0) { 341 if (deferred_.length() > 0) {
283 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { 342 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
284 LDeferredCode* code = deferred_[i]; 343 LDeferredCode* code = deferred_[i];
285 __ bind(code->entry()); 344 __ bind(code->entry());
345 if (NeedsDeferredFrame()) {
346 Comment(";;; Deferred build frame",
347 code->instruction_index(),
348 code->instr()->Mnemonic());
349 ASSERT(!frame_is_built_);
350 ASSERT(info()->IsStub());
351 frame_is_built_ = true;
352 // Build the frame in such a way that esi isn't trashed.
353 __ push(rbp); // Caller's frame pointer.
354 __ push(Operand(rbp, StandardFrameConstants::kContextOffset));
355 __ Push(Smi::FromInt(StackFrame::STUB));
356 __ lea(rbp, Operand(rsp, 2 * kPointerSize));
357 }
286 Comment(";;; Deferred code @%d: %s.", 358 Comment(";;; Deferred code @%d: %s.",
287 code->instruction_index(), 359 code->instruction_index(),
288 code->instr()->Mnemonic()); 360 code->instr()->Mnemonic());
289 code->Generate(); 361 code->Generate();
362 if (NeedsDeferredFrame()) {
363 Comment(";;; Deferred destory frame",
Jakob Kummerow 2012/11/28 16:28:22 nit: "destroy"
danno 2012/11/30 16:23:24 Done.
364 code->instruction_index(),
365 code->instr()->Mnemonic());
366 ASSERT(frame_is_built_);
367 frame_is_built_ = false;
368 __ movq(rsp, rbp);
369 __ pop(rbp);
370 }
290 __ jmp(code->exit()); 371 __ jmp(code->exit());
291 } 372 }
292 } 373 }
293 374
294 // Deferred code is the last part of the instruction sequence. Mark 375 // Deferred code is the last part of the instruction sequence. Mark
295 // the generated code as done unless we bailed out. 376 // the generated code as done unless we bailed out.
296 if (!is_aborted()) status_ = DONE; 377 if (!is_aborted()) status_ = DONE;
297 return !is_aborted(); 378 return !is_aborted();
298 } 379 }
299 380
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
392 // arguments index points to the first element of a sequence of tagged 473 // arguments index points to the first element of a sequence of tagged
393 // values on the stack that represent the arguments. This needs to be 474 // values on the stack that represent the arguments. This needs to be
394 // kept in sync with the LArgumentsElements implementation. 475 // kept in sync with the LArgumentsElements implementation.
395 *arguments_index = -environment->parameter_count(); 476 *arguments_index = -environment->parameter_count();
396 *arguments_count = environment->parameter_count(); 477 *arguments_count = environment->parameter_count();
397 478
398 WriteTranslation(environment->outer(), 479 WriteTranslation(environment->outer(),
399 translation, 480 translation,
400 arguments_index, 481 arguments_index,
401 arguments_count); 482 arguments_count);
402 int closure_id = *info()->closure() != *environment->closure() 483 bool has_closure_id = !info()->closure().is_null() &&
484 *info()->closure() != *environment->closure();
485 int closure_id = has_closure_id
403 ? DefineDeoptimizationLiteral(environment->closure()) 486 ? DefineDeoptimizationLiteral(environment->closure())
404 : Translation::kSelfLiteralId; 487 : Translation::kSelfLiteralId;
405 488
406 switch (environment->frame_type()) { 489 switch (environment->frame_type()) {
407 case JS_FUNCTION: 490 case JS_FUNCTION:
408 translation->BeginJSFrame(environment->ast_id(), closure_id, height); 491 translation->BeginJSFrame(environment->ast_id(), closure_id, height);
409 break; 492 break;
410 case JS_CONSTRUCT: 493 case JS_CONSTRUCT:
411 translation->BeginConstructStubFrame(closure_id, translation_size); 494 translation->BeginConstructStubFrame(closure_id, translation_size);
412 break; 495 break;
413 case JS_GETTER: 496 case JS_GETTER:
414 ASSERT(translation_size == 1); 497 ASSERT(translation_size == 1);
415 ASSERT(height == 0); 498 ASSERT(height == 0);
416 translation->BeginGetterStubFrame(closure_id); 499 translation->BeginGetterStubFrame(closure_id);
417 break; 500 break;
418 case JS_SETTER: 501 case JS_SETTER:
419 ASSERT(translation_size == 2); 502 ASSERT(translation_size == 2);
420 ASSERT(height == 0); 503 ASSERT(height == 0);
421 translation->BeginSetterStubFrame(closure_id); 504 translation->BeginSetterStubFrame(closure_id);
422 break; 505 break;
423 case ARGUMENTS_ADAPTOR: 506 case ARGUMENTS_ADAPTOR:
424 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size); 507 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
425 break; 508 break;
509 case STUB:
510 translation->BeginCompiledStubFrame();
511 break;
426 } 512 }
427 513
428 // Inlined frames which push their arguments cause the index to be 514 // Inlined frames which push their arguments cause the index to be
429 // bumped and a new stack area to be used for materialization. 515 // bumped and a new stack area to be used for materialization.
430 if (environment->entry() != NULL && 516 if (environment->entry() != NULL &&
431 environment->entry()->arguments_pushed()) { 517 environment->entry()->arguments_pushed()) {
432 *arguments_index = *arguments_index < 0 518 *arguments_index = *arguments_index < 0
433 ? GetStackSlotCount() 519 ? GetStackSlotCount()
434 : *arguments_index + *arguments_count; 520 : *arguments_index + *arguments_count;
435 *arguments_count = environment->entry()->arguments_count() + 1; 521 *arguments_count = environment->entry()->arguments_count() + 1;
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after
606 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); 692 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
607 deoptimizations_.Add(environment, environment->zone()); 693 deoptimizations_.Add(environment, environment->zone());
608 } 694 }
609 } 695 }
610 696
611 697
612 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { 698 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
613 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); 699 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
614 ASSERT(environment->HasBeenRegistered()); 700 ASSERT(environment->HasBeenRegistered());
615 int id = environment->deoptimization_index(); 701 int id = environment->deoptimization_index();
616 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER); 702 ASSERT(info()->IsOptimizing() || info()->IsStub());
703 Deoptimizer::BailoutType bailout_type = info()->IsStub()
704 ? Deoptimizer::LAZY
705 : Deoptimizer::EAGER;
706 Address entry = Deoptimizer::GetDeoptimizationEntry(id, bailout_type);
617 if (entry == NULL) { 707 if (entry == NULL) {
618 Abort("bailout was not prepared"); 708 Abort("bailout was not prepared");
619 return; 709 return;
620 } 710 }
621 711
712 ASSERT(info()->IsStub() || frame_is_built_);
713 bool lazy_deopt = info()->IsStub();
622 if (cc == no_condition) { 714 if (cc == no_condition) {
623 __ Jump(entry, RelocInfo::RUNTIME_ENTRY); 715 if (lazy_deopt) {
716 __ Call(entry, RelocInfo::RUNTIME_ENTRY);
717 } else {
718 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
719 }
624 } else { 720 } else {
625 // We often have several deopts to the same entry, reuse the last 721 // We often have several deopts to the same entry, reuse the last
626 // jump entry if this is the case. 722 // jump entry if this is the case.
627 if (jump_table_.is_empty() || 723 if (jump_table_.is_empty() ||
628 jump_table_.last().address != entry) { 724 jump_table_.last().address != entry ||
629 jump_table_.Add(JumpTableEntry(entry), zone()); 725 jump_table_.last().needs_frame != !frame_is_built_ ||
726 jump_table_.last().is_lazy_deopt != lazy_deopt) {
727 JumpTableEntry table_entry(entry, !frame_is_built_, lazy_deopt);
728 jump_table_.Add(table_entry, zone());
630 } 729 }
631 __ j(cc, &jump_table_.last().label); 730 __ j(cc, &jump_table_.last().label);
632 } 731 }
633 } 732 }
634 733
635 734
636 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { 735 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
637 int length = deoptimizations_.length(); 736 int length = deoptimizations_.length();
638 if (length == 0) return; 737 if (length == 0) return;
639 Handle<DeoptimizationInputData> data = 738 Handle<DeoptimizationInputData> data =
(...skipping 1644 matching lines...) Expand 10 before | Expand all | Expand 10 after
2284 __ j(condition, &true_value, Label::kNear); 2383 __ j(condition, &true_value, Label::kNear);
2285 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); 2384 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
2286 __ jmp(&done, Label::kNear); 2385 __ jmp(&done, Label::kNear);
2287 __ bind(&true_value); 2386 __ bind(&true_value);
2288 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex); 2387 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex);
2289 __ bind(&done); 2388 __ bind(&done);
2290 } 2389 }
2291 2390
2292 2391
2293 void LCodeGen::DoReturn(LReturn* instr) { 2392 void LCodeGen::DoReturn(LReturn* instr) {
2294 if (FLAG_trace) { 2393 if (FLAG_trace && info()->IsOptimizing()) {
2295 // Preserve the return value on the stack and rely on the runtime 2394 // Preserve the return value on the stack and rely on the runtime
2296 // call to return the value in the same register. 2395 // call to return the value in the same register.
2297 __ push(rax); 2396 __ push(rax);
2298 __ CallRuntime(Runtime::kTraceExit, 1); 2397 __ CallRuntime(Runtime::kTraceExit, 1);
2299 } 2398 }
2300 __ movq(rsp, rbp); 2399 if (NeedsEagerFrame()) {
2301 __ pop(rbp); 2400 __ movq(rsp, rbp);
2302 __ Ret((GetParameterCount() + 1) * kPointerSize, rcx); 2401 __ pop(rbp);
2402 }
2403 if (info()->IsStub()) {
2404 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2405 __ Ret(0, r10);
2406 } else {
2407 __ Ret((GetParameterCount() + 1) * kPointerSize, rcx);
2408 }
2303 } 2409 }
2304 2410
2305 2411
2306 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) { 2412 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
2307 Register result = ToRegister(instr->result()); 2413 Register result = ToRegister(instr->result());
2308 __ LoadGlobalCell(result, instr->hydrogen()->cell()); 2414 __ LoadGlobalCell(result, instr->hydrogen()->cell());
2309 if (instr->hydrogen()->RequiresHoleCheck()) { 2415 if (instr->hydrogen()->RequiresHoleCheck()) {
2310 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); 2416 __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
2311 DeoptimizeIf(equal, instr->environment()); 2417 DeoptimizeIf(equal, instr->environment());
2312 } 2418 }
(...skipping 2210 matching lines...) Expand 10 before | Expand all | Expand 10 after
4523 } else { 4629 } else {
4524 __ Cmp(reg, target); 4630 __ Cmp(reg, target);
4525 } 4631 }
4526 DeoptimizeIf(not_equal, instr->environment()); 4632 DeoptimizeIf(not_equal, instr->environment());
4527 } 4633 }
4528 4634
4529 4635
4530 void LCodeGen::DoCheckMapCommon(Register reg, 4636 void LCodeGen::DoCheckMapCommon(Register reg,
4531 Handle<Map> map, 4637 Handle<Map> map,
4532 CompareMapMode mode, 4638 CompareMapMode mode,
4533 LEnvironment* env) { 4639 LInstruction* instr) {
4534 Label success; 4640 Label success;
4535 __ CompareMap(reg, map, &success, mode); 4641 __ CompareMap(reg, map, &success, mode);
4536 DeoptimizeIf(not_equal, env); 4642 DeoptimizeIf(not_equal, instr->environment());
4537 __ bind(&success); 4643 __ bind(&success);
4538 } 4644 }
4539 4645
4540 4646
4541 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { 4647 void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
4542 LOperand* input = instr->value(); 4648 LOperand* input = instr->value();
4543 ASSERT(input->IsRegister()); 4649 ASSERT(input->IsRegister());
4544 Register reg = ToRegister(input); 4650 Register reg = ToRegister(input);
4545 4651
4546 Label success; 4652 Label success;
4547 SmallMapList* map_set = instr->hydrogen()->map_set(); 4653 SmallMapList* map_set = instr->hydrogen()->map_set();
4548 for (int i = 0; i < map_set->length() - 1; i++) { 4654 for (int i = 0; i < map_set->length() - 1; i++) {
4549 Handle<Map> map = map_set->at(i); 4655 Handle<Map> map = map_set->at(i);
4550 __ CompareMap(reg, map, &success, REQUIRE_EXACT_MAP); 4656 __ CompareMap(reg, map, &success, REQUIRE_EXACT_MAP);
4551 __ j(equal, &success); 4657 __ j(equal, &success);
4552 } 4658 }
4553 Handle<Map> map = map_set->last(); 4659 Handle<Map> map = map_set->last();
4554 DoCheckMapCommon(reg, map, REQUIRE_EXACT_MAP, instr->environment()); 4660 DoCheckMapCommon(reg, map, REQUIRE_EXACT_MAP, instr);
4555 __ bind(&success); 4661 __ bind(&success);
4556 } 4662 }
4557 4663
4558 4664
4559 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { 4665 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
4560 XMMRegister value_reg = ToDoubleRegister(instr->unclamped()); 4666 XMMRegister value_reg = ToDoubleRegister(instr->unclamped());
4561 Register result_reg = ToRegister(instr->result()); 4667 Register result_reg = ToRegister(instr->result());
4562 __ ClampDoubleToUint8(value_reg, xmm0, result_reg); 4668 __ ClampDoubleToUint8(value_reg, xmm0, result_reg);
4563 } 4669 }
4564 4670
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
4611 4717
4612 Handle<JSObject> holder = instr->holder(); 4718 Handle<JSObject> holder = instr->holder();
4613 Handle<JSObject> current_prototype = instr->prototype(); 4719 Handle<JSObject> current_prototype = instr->prototype();
4614 4720
4615 // Load prototype object. 4721 // Load prototype object.
4616 __ LoadHeapObject(reg, current_prototype); 4722 __ LoadHeapObject(reg, current_prototype);
4617 4723
4618 // Check prototype maps up to the holder. 4724 // Check prototype maps up to the holder.
4619 while (!current_prototype.is_identical_to(holder)) { 4725 while (!current_prototype.is_identical_to(holder)) {
4620 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()), 4726 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
4621 ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment()); 4727 ALLOW_ELEMENT_TRANSITION_MAPS, instr);
4622 current_prototype = 4728 current_prototype =
4623 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype())); 4729 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
4624 // Load next prototype object. 4730 // Load next prototype object.
4625 __ LoadHeapObject(reg, current_prototype); 4731 __ LoadHeapObject(reg, current_prototype);
4626 } 4732 }
4627 4733
4628 // Check the holder map. 4734 // Check the holder map.
4629 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()), 4735 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
4630 ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment()); 4736 ALLOW_ELEMENT_TRANSITION_MAPS, instr);
4631 } 4737 }
4632 4738
4633 4739
4634 void LCodeGen::DoAllocateObject(LAllocateObject* instr) { 4740 void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
4635 class DeferredAllocateObject: public LDeferredCode { 4741 class DeferredAllocateObject: public LDeferredCode {
4636 public: 4742 public:
4637 DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr) 4743 DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr)
4638 : LDeferredCode(codegen), instr_(instr) { } 4744 : LDeferredCode(codegen), instr_(instr) { }
4639 virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); } 4745 virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); }
4640 virtual LInstruction* instr() { return instr_; } 4746 virtual LInstruction* instr() { return instr_; }
(...skipping 515 matching lines...) Expand 10 before | Expand all | Expand 10 after
5156 __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset)); 5262 __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset));
5157 5263
5158 // Check the marker in the calling frame. 5264 // Check the marker in the calling frame.
5159 __ bind(&check_frame_marker); 5265 __ bind(&check_frame_marker);
5160 __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), 5266 __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset),
5161 Smi::FromInt(StackFrame::CONSTRUCT)); 5267 Smi::FromInt(StackFrame::CONSTRUCT));
5162 } 5268 }
5163 5269
5164 5270
5165 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) { 5271 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) {
5272 if (info()->IsStub()) return;
5166 // Ensure that we have enough space after the previous lazy-bailout 5273 // Ensure that we have enough space after the previous lazy-bailout
5167 // instruction for patching the code here. 5274 // instruction for patching the code here.
5168 int current_pc = masm()->pc_offset(); 5275 int current_pc = masm()->pc_offset();
5169 if (current_pc < last_lazy_deopt_pc_ + space_needed) { 5276 if (current_pc < last_lazy_deopt_pc_ + space_needed) {
5170 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; 5277 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
5171 __ Nop(padding_size); 5278 __ Nop(padding_size);
5172 } 5279 }
5173 } 5280 }
5174 5281
5175 5282
(...skipping 205 matching lines...) Expand 10 before | Expand all | Expand 10 after
5381 FixedArray::kHeaderSize - kPointerSize)); 5488 FixedArray::kHeaderSize - kPointerSize));
5382 __ bind(&done); 5489 __ bind(&done);
5383 } 5490 }
5384 5491
5385 5492
5386 #undef __ 5493 #undef __
5387 5494
5388 } } // namespace v8::internal 5495 } } // namespace v8::internal
5389 5496
5390 #endif // V8_TARGET_ARCH_X64 5497 #endif // V8_TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698