Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(494)

Side by Side Diff: src/x64/lithium-codegen-x64.cc

Issue 10701054: Enable stub generation using Hydrogen/Lithium (again) (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Merge with latest Created 8 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/lithium-codegen-x64.h ('k') | src/x64/lithium-x64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
112 int length = builder.position(); 112 int length = builder.position();
113 Vector<char> copy = Vector<char>::New(length + 1); 113 Vector<char> copy = Vector<char>::New(length + 1);
114 memcpy(copy.start(), builder.Finalize(), copy.length()); 114 memcpy(copy.start(), builder.Finalize(), copy.length());
115 masm()->RecordComment(copy.start()); 115 masm()->RecordComment(copy.start());
116 } 116 }
117 117
118 118
119 bool LCodeGen::GeneratePrologue() { 119 bool LCodeGen::GeneratePrologue() {
120 ASSERT(is_generating()); 120 ASSERT(is_generating());
121 121
122 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 122 if (info()->IsOptimizing()) {
123 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
123 124
124 #ifdef DEBUG 125 #ifdef DEBUG
125 if (strlen(FLAG_stop_at) > 0 && 126 if (strlen(FLAG_stop_at) > 0 &&
126 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { 127 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
127 __ int3(); 128 __ int3();
128 } 129 }
129 #endif 130 #endif
130 131
131 // Strict mode functions need to replace the receiver with undefined 132 // Strict mode functions need to replace the receiver with undefined
132 // when called as functions (without an explicit receiver 133 // when called as functions (without an explicit receiver
133 // object). rcx is zero for method calls and non-zero for function 134 // object). rcx is zero for method calls and non-zero for function
134 // calls. 135 // calls.
135 if (!info_->is_classic_mode() || info_->is_native()) { 136 if (!info_->is_classic_mode() || info_->is_native()) {
136 Label ok; 137 Label ok;
137 __ testq(rcx, rcx); 138 __ testq(rcx, rcx);
138 __ j(zero, &ok, Label::kNear); 139 __ j(zero, &ok, Label::kNear);
139 // +1 for return address. 140 // +1 for return address.
140 int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize; 141 int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize;
141 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); 142 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
142 __ movq(Operand(rsp, receiver_offset), kScratchRegister); 143 __ movq(Operand(rsp, receiver_offset), kScratchRegister);
143 __ bind(&ok); 144 __ bind(&ok);
145 }
144 } 146 }
145 147
146 info()->set_prologue_offset(masm_->pc_offset()); 148 info()->set_prologue_offset(masm_->pc_offset());
147 __ push(rbp); // Caller's frame pointer. 149 if (NeedsEagerFrame()) {
148 __ movq(rbp, rsp); 150 ASSERT(!frame_is_built_);
149 __ push(rsi); // Callee's context. 151 frame_is_built_ = true;
150 __ push(rdi); // Callee's JS function. 152 __ push(rbp); // Caller's frame pointer.
153 __ movq(rbp, rsp);
154 __ push(rsi); // Callee's context.
155 if (info()->IsStub()) {
156 __ Push(Smi::FromInt(StackFrame::STUB));
157 } else {
158 __ push(rdi); // Callee's JS function.
159 }
160 }
151 161
152 // Reserve space for the stack slots needed by the code. 162 // Reserve space for the stack slots needed by the code.
153 int slots = GetStackSlotCount(); 163 int slots = GetStackSlotCount();
154 if (slots > 0) { 164 if (slots > 0) {
155 if (FLAG_debug_code) { 165 if (FLAG_debug_code) {
156 __ Set(rax, slots); 166 __ Set(rax, slots);
157 __ movq(kScratchRegister, kSlotsZapValue, RelocInfo::NONE); 167 __ movq(kScratchRegister, kSlotsZapValue, RelocInfo::NONE);
158 Label loop; 168 Label loop;
159 __ bind(&loop); 169 __ bind(&loop);
160 __ push(kScratchRegister); 170 __ push(kScratchRegister);
161 __ decl(rax); 171 __ decl(rax);
162 __ j(not_zero, &loop); 172 __ j(not_zero, &loop);
163 } else { 173 } else {
164 __ subq(rsp, Immediate(slots * kPointerSize)); 174 __ subq(rsp, Immediate(slots * kPointerSize));
165 #ifdef _MSC_VER 175 #ifdef _MSC_VER
166 // On windows, you may not access the stack more than one page below 176 // On windows, you may not access the stack more than one page below
167 // the most recently mapped page. To make the allocated area randomly 177 // the most recently mapped page. To make the allocated area randomly
168 // accessible, we write to each page in turn (the value is irrelevant). 178 // accessible, we write to each page in turn (the value is irrelevant).
169 const int kPageSize = 4 * KB; 179 const int kPageSize = 4 * KB;
170 for (int offset = slots * kPointerSize - kPageSize; 180 for (int offset = slots * kPointerSize - kPageSize;
171 offset > 0; 181 offset > 0;
172 offset -= kPageSize) { 182 offset -= kPageSize) {
173 __ movq(Operand(rsp, offset), rax); 183 __ movq(Operand(rsp, offset), rax);
174 } 184 }
175 #endif 185 #endif
176 } 186 }
177 } 187 }
178 188
179 // Possibly allocate a local context. 189 // Possibly allocate a local context.
180 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 190 int heap_slots = info_->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
181 if (heap_slots > 0) { 191 if (heap_slots > 0) {
182 Comment(";;; Allocate local context"); 192 Comment(";;; Allocate local context");
183 // Argument to NewContext is the function, which is still in rdi. 193 // Argument to NewContext is the function, which is still in rdi.
184 __ push(rdi); 194 __ push(rdi);
185 if (heap_slots <= FastNewContextStub::kMaximumSlots) { 195 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
186 FastNewContextStub stub(heap_slots); 196 FastNewContextStub stub(heap_slots);
187 __ CallStub(&stub); 197 __ CallStub(&stub);
188 } else { 198 } else {
189 __ CallRuntime(Runtime::kNewFunctionContext, 1); 199 __ CallRuntime(Runtime::kNewFunctionContext, 1);
190 } 200 }
(...skipping 15 matching lines...) Expand all
206 int context_offset = Context::SlotOffset(var->index()); 216 int context_offset = Context::SlotOffset(var->index());
207 __ movq(Operand(rsi, context_offset), rax); 217 __ movq(Operand(rsi, context_offset), rax);
208 // Update the write barrier. This clobbers rax and rbx. 218 // Update the write barrier. This clobbers rax and rbx.
209 __ RecordWriteContextSlot(rsi, context_offset, rax, rbx, kSaveFPRegs); 219 __ RecordWriteContextSlot(rsi, context_offset, rax, rbx, kSaveFPRegs);
210 } 220 }
211 } 221 }
212 Comment(";;; End allocate local context"); 222 Comment(";;; End allocate local context");
213 } 223 }
214 224
215 // Trace the call. 225 // Trace the call.
216 if (FLAG_trace) { 226 if (FLAG_trace && info()->IsOptimizing()) {
217 __ CallRuntime(Runtime::kTraceEnter, 0); 227 __ CallRuntime(Runtime::kTraceEnter, 0);
218 } 228 }
219 return !is_aborted(); 229 return !is_aborted();
220 } 230 }
221 231
222 232
223 bool LCodeGen::GenerateBody() { 233 bool LCodeGen::GenerateBody() {
224 ASSERT(is_generating()); 234 ASSERT(is_generating());
225 bool emit_instructions = true; 235 bool emit_instructions = true;
226 for (current_instruction_ = 0; 236 for (current_instruction_ = 0;
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
259 } 269 }
260 instr->CompileToNative(this); 270 instr->CompileToNative(this);
261 } 271 }
262 } 272 }
263 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); 273 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
264 return !is_aborted(); 274 return !is_aborted();
265 } 275 }
266 276
267 277
268 bool LCodeGen::GenerateJumpTable() { 278 bool LCodeGen::GenerateJumpTable() {
279 Label needs_frame_not_call;
280 Label needs_frame_is_call;
269 for (int i = 0; i < jump_table_.length(); i++) { 281 for (int i = 0; i < jump_table_.length(); i++) {
270 __ bind(&jump_table_[i].label); 282 __ bind(&jump_table_[i].label);
271 __ Jump(jump_table_[i].address, RelocInfo::RUNTIME_ENTRY); 283 Address entry = jump_table_[i].address;
284 if (jump_table_[i].needs_frame) {
285 __ movq(kScratchRegister, ExternalReference::ForDeoptEntry(entry));
286 if (jump_table_[i].is_lazy_deopt) {
287 if (needs_frame_is_call.is_bound()) {
288 __ jmp(&needs_frame_is_call);
289 } else {
290 __ bind(&needs_frame_is_call);
291 __ push(rbp);
292 __ movq(rbp, rsp);
293 __ push(rsi);
294 // This variant of deopt can only be used with stubs. Since we don't
295 // have a function pointer to install in the stack frame that we're
296 // building, install a special marker there instead.
297 ASSERT(info()->IsStub());
298 __ Move(rsi, Smi::FromInt(StackFrame::STUB));
299 __ push(rsi);
300 __ movq(rsi, MemOperand(rsp, kPointerSize));
301 __ call(kScratchRegister);
302 }
303 } else {
304 if (needs_frame_not_call.is_bound()) {
305 __ jmp(&needs_frame_not_call);
306 } else {
307 __ bind(&needs_frame_not_call);
308 __ push(rbp);
309 __ movq(rbp, rsp);
310 __ push(r8);
311 // This variant of deopt can only be used with stubs. Since we don't
312 // have a function pointer to install in the stack frame that we're
313 // building, install a special marker there instead.
314 ASSERT(info()->IsStub());
315 __ Move(rsi, Smi::FromInt(StackFrame::STUB));
316 __ push(rsi);
317 __ movq(rsi, MemOperand(rsp, kPointerSize));
318 __ jmp(kScratchRegister);
319 }
320 }
321 } else {
322 if (jump_table_[i].is_lazy_deopt) {
323 __ Call(entry, RelocInfo::RUNTIME_ENTRY);
324 } else {
325 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
326 }
327 }
272 } 328 }
273 return !is_aborted(); 329 return !is_aborted();
274 } 330 }
275 331
276 332
277 bool LCodeGen::GenerateDeferredCode() { 333 bool LCodeGen::GenerateDeferredCode() {
278 ASSERT(is_generating()); 334 ASSERT(is_generating());
279 if (deferred_.length() > 0) { 335 if (deferred_.length() > 0) {
280 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { 336 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
281 LDeferredCode* code = deferred_[i]; 337 LDeferredCode* code = deferred_[i];
282 __ bind(code->entry()); 338 __ bind(code->entry());
339 if (NeedsDeferredFrame()) {
340 Comment(";;; Deferred build frame",
341 code->instruction_index(),
342 code->instr()->Mnemonic());
343 ASSERT(!frame_is_built_);
344 ASSERT(info()->IsStub());
345 frame_is_built_ = true;
346 // Build the frame in such a way that esi isn't trashed.
347 __ push(rbp); // Caller's frame pointer.
348 __ push(Operand(rbp, StandardFrameConstants::kContextOffset));
349 __ Push(Smi::FromInt(StackFrame::STUB));
350 __ lea(rbp, Operand(rsp, 2 * kPointerSize));
351 }
283 Comment(";;; Deferred code @%d: %s.", 352 Comment(";;; Deferred code @%d: %s.",
284 code->instruction_index(), 353 code->instruction_index(),
285 code->instr()->Mnemonic()); 354 code->instr()->Mnemonic());
286 code->Generate(); 355 code->Generate();
356 if (NeedsDeferredFrame()) {
357 Comment(";;; Deferred destroy frame",
358 code->instruction_index(),
359 code->instr()->Mnemonic());
360 ASSERT(frame_is_built_);
361 frame_is_built_ = false;
362 __ movq(rsp, rbp);
363 __ pop(rbp);
364 }
287 __ jmp(code->exit()); 365 __ jmp(code->exit());
288 } 366 }
289 } 367 }
290 368
291 // Deferred code is the last part of the instruction sequence. Mark 369 // Deferred code is the last part of the instruction sequence. Mark
292 // the generated code as done unless we bailed out. 370 // the generated code as done unless we bailed out.
293 if (!is_aborted()) status_ = DONE; 371 if (!is_aborted()) status_ = DONE;
294 return !is_aborted(); 372 return !is_aborted();
295 } 373 }
296 374
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
389 // arguments index points to the first element of a sequence of tagged 467 // arguments index points to the first element of a sequence of tagged
390 // values on the stack that represent the arguments. This needs to be 468 // values on the stack that represent the arguments. This needs to be
391 // kept in sync with the LArgumentsElements implementation. 469 // kept in sync with the LArgumentsElements implementation.
392 *arguments_index = -environment->parameter_count(); 470 *arguments_index = -environment->parameter_count();
393 *arguments_count = environment->parameter_count(); 471 *arguments_count = environment->parameter_count();
394 472
395 WriteTranslation(environment->outer(), 473 WriteTranslation(environment->outer(),
396 translation, 474 translation,
397 arguments_index, 475 arguments_index,
398 arguments_count); 476 arguments_count);
399 int closure_id = *info()->closure() != *environment->closure() 477 bool has_closure_id = !info()->closure().is_null() &&
478 *info()->closure() != *environment->closure();
479 int closure_id = has_closure_id
400 ? DefineDeoptimizationLiteral(environment->closure()) 480 ? DefineDeoptimizationLiteral(environment->closure())
401 : Translation::kSelfLiteralId; 481 : Translation::kSelfLiteralId;
402 482
403 switch (environment->frame_type()) { 483 switch (environment->frame_type()) {
404 case JS_FUNCTION: 484 case JS_FUNCTION:
405 translation->BeginJSFrame(environment->ast_id(), closure_id, height); 485 translation->BeginJSFrame(environment->ast_id(), closure_id, height);
406 break; 486 break;
407 case JS_CONSTRUCT: 487 case JS_CONSTRUCT:
408 translation->BeginConstructStubFrame(closure_id, translation_size); 488 translation->BeginConstructStubFrame(closure_id, translation_size);
409 break; 489 break;
410 case JS_GETTER: 490 case JS_GETTER:
411 ASSERT(translation_size == 1); 491 ASSERT(translation_size == 1);
412 ASSERT(height == 0); 492 ASSERT(height == 0);
413 translation->BeginGetterStubFrame(closure_id); 493 translation->BeginGetterStubFrame(closure_id);
414 break; 494 break;
415 case JS_SETTER: 495 case JS_SETTER:
416 ASSERT(translation_size == 2); 496 ASSERT(translation_size == 2);
417 ASSERT(height == 0); 497 ASSERT(height == 0);
418 translation->BeginSetterStubFrame(closure_id); 498 translation->BeginSetterStubFrame(closure_id);
419 break; 499 break;
420 case ARGUMENTS_ADAPTOR: 500 case ARGUMENTS_ADAPTOR:
421 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size); 501 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
422 break; 502 break;
503 case STUB:
504 translation->BeginCompiledStubFrame();
505 break;
423 } 506 }
424 507
425 // Inlined frames which push their arguments cause the index to be 508 // Inlined frames which push their arguments cause the index to be
426 // bumped and a new stack area to be used for materialization. 509 // bumped and a new stack area to be used for materialization.
427 if (environment->entry() != NULL && 510 if (environment->entry() != NULL &&
428 environment->entry()->arguments_pushed()) { 511 environment->entry()->arguments_pushed()) {
429 *arguments_index = *arguments_index < 0 512 *arguments_index = *arguments_index < 0
430 ? GetStackSlotCount() 513 ? GetStackSlotCount()
431 : *arguments_index + *arguments_count; 514 : *arguments_index + *arguments_count;
432 *arguments_count = environment->entry()->arguments_count() + 1; 515 *arguments_count = environment->entry()->arguments_count() + 1;
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after
603 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); 686 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
604 deoptimizations_.Add(environment, environment->zone()); 687 deoptimizations_.Add(environment, environment->zone());
605 } 688 }
606 } 689 }
607 690
608 691
609 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { 692 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
610 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); 693 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
611 ASSERT(environment->HasBeenRegistered()); 694 ASSERT(environment->HasBeenRegistered());
612 int id = environment->deoptimization_index(); 695 int id = environment->deoptimization_index();
613 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER); 696 ASSERT(info()->IsOptimizing() || info()->IsStub());
697 Deoptimizer::BailoutType bailout_type = info()->IsStub()
698 ? Deoptimizer::LAZY
699 : Deoptimizer::EAGER;
700 Address entry = Deoptimizer::GetDeoptimizationEntry(id, bailout_type);
614 if (entry == NULL) { 701 if (entry == NULL) {
615 Abort("bailout was not prepared"); 702 Abort("bailout was not prepared");
616 return; 703 return;
617 } 704 }
618 705
706 ASSERT(info()->IsStub() || frame_is_built_);
707 bool lazy_deopt = info()->IsStub();
619 if (cc == no_condition) { 708 if (cc == no_condition) {
620 __ Jump(entry, RelocInfo::RUNTIME_ENTRY); 709 if (lazy_deopt) {
710 __ Call(entry, RelocInfo::RUNTIME_ENTRY);
711 } else {
712 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
713 }
621 } else { 714 } else {
622 // We often have several deopts to the same entry, reuse the last 715 // We often have several deopts to the same entry, reuse the last
623 // jump entry if this is the case. 716 // jump entry if this is the case.
624 if (jump_table_.is_empty() || 717 if (jump_table_.is_empty() ||
625 jump_table_.last().address != entry) { 718 jump_table_.last().address != entry ||
626 jump_table_.Add(JumpTableEntry(entry), zone()); 719 jump_table_.last().needs_frame != !frame_is_built_ ||
720 jump_table_.last().is_lazy_deopt != lazy_deopt) {
721 JumpTableEntry table_entry(entry, !frame_is_built_, lazy_deopt);
722 jump_table_.Add(table_entry, zone());
627 } 723 }
628 __ j(cc, &jump_table_.last().label); 724 __ j(cc, &jump_table_.last().label);
629 } 725 }
630 } 726 }
631 727
632 728
633 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { 729 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
634 int length = deoptimizations_.length(); 730 int length = deoptimizations_.length();
635 if (length == 0) return; 731 if (length == 0) return;
636 Handle<DeoptimizationInputData> data = 732 Handle<DeoptimizationInputData> data =
(...skipping 1644 matching lines...) Expand 10 before | Expand all | Expand 10 after
2281 __ j(condition, &true_value, Label::kNear); 2377 __ j(condition, &true_value, Label::kNear);
2282 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); 2378 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
2283 __ jmp(&done, Label::kNear); 2379 __ jmp(&done, Label::kNear);
2284 __ bind(&true_value); 2380 __ bind(&true_value);
2285 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex); 2381 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex);
2286 __ bind(&done); 2382 __ bind(&done);
2287 } 2383 }
2288 2384
2289 2385
2290 void LCodeGen::DoReturn(LReturn* instr) { 2386 void LCodeGen::DoReturn(LReturn* instr) {
2291 if (FLAG_trace) { 2387 if (FLAG_trace && info()->IsOptimizing()) {
2292 // Preserve the return value on the stack and rely on the runtime 2388 // Preserve the return value on the stack and rely on the runtime
2293 // call to return the value in the same register. 2389 // call to return the value in the same register.
2294 __ push(rax); 2390 __ push(rax);
2295 __ CallRuntime(Runtime::kTraceExit, 1); 2391 __ CallRuntime(Runtime::kTraceExit, 1);
2296 } 2392 }
2297 __ movq(rsp, rbp); 2393 if (NeedsEagerFrame()) {
2298 __ pop(rbp); 2394 __ movq(rsp, rbp);
2299 __ Ret((GetParameterCount() + 1) * kPointerSize, rcx); 2395 __ pop(rbp);
2396 }
2397 if (info()->IsStub()) {
2398 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2399 __ Ret(0, r10);
2400 } else {
2401 __ Ret((GetParameterCount() + 1) * kPointerSize, rcx);
2402 }
2300 } 2403 }
2301 2404
2302 2405
2303 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) { 2406 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
2304 Register result = ToRegister(instr->result()); 2407 Register result = ToRegister(instr->result());
2305 __ LoadGlobalCell(result, instr->hydrogen()->cell()); 2408 __ LoadGlobalCell(result, instr->hydrogen()->cell());
2306 if (instr->hydrogen()->RequiresHoleCheck()) { 2409 if (instr->hydrogen()->RequiresHoleCheck()) {
2307 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); 2410 __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
2308 DeoptimizeIf(equal, instr->environment()); 2411 DeoptimizeIf(equal, instr->environment());
2309 } 2412 }
(...skipping 2210 matching lines...) Expand 10 before | Expand all | Expand 10 after
4520 } else { 4623 } else {
4521 __ Cmp(reg, target); 4624 __ Cmp(reg, target);
4522 } 4625 }
4523 DeoptimizeIf(not_equal, instr->environment()); 4626 DeoptimizeIf(not_equal, instr->environment());
4524 } 4627 }
4525 4628
4526 4629
4527 void LCodeGen::DoCheckMapCommon(Register reg, 4630 void LCodeGen::DoCheckMapCommon(Register reg,
4528 Handle<Map> map, 4631 Handle<Map> map,
4529 CompareMapMode mode, 4632 CompareMapMode mode,
4530 LEnvironment* env) { 4633 LInstruction* instr) {
4531 Label success; 4634 Label success;
4532 __ CompareMap(reg, map, &success, mode); 4635 __ CompareMap(reg, map, &success, mode);
4533 DeoptimizeIf(not_equal, env); 4636 DeoptimizeIf(not_equal, instr->environment());
4534 __ bind(&success); 4637 __ bind(&success);
4535 } 4638 }
4536 4639
4537 4640
4538 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { 4641 void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
4539 LOperand* input = instr->value(); 4642 LOperand* input = instr->value();
4540 ASSERT(input->IsRegister()); 4643 ASSERT(input->IsRegister());
4541 Register reg = ToRegister(input); 4644 Register reg = ToRegister(input);
4542 4645
4543 Label success; 4646 Label success;
4544 SmallMapList* map_set = instr->hydrogen()->map_set(); 4647 SmallMapList* map_set = instr->hydrogen()->map_set();
4545 for (int i = 0; i < map_set->length() - 1; i++) { 4648 for (int i = 0; i < map_set->length() - 1; i++) {
4546 Handle<Map> map = map_set->at(i); 4649 Handle<Map> map = map_set->at(i);
4547 __ CompareMap(reg, map, &success, REQUIRE_EXACT_MAP); 4650 __ CompareMap(reg, map, &success, REQUIRE_EXACT_MAP);
4548 __ j(equal, &success); 4651 __ j(equal, &success);
4549 } 4652 }
4550 Handle<Map> map = map_set->last(); 4653 Handle<Map> map = map_set->last();
4551 DoCheckMapCommon(reg, map, REQUIRE_EXACT_MAP, instr->environment()); 4654 DoCheckMapCommon(reg, map, REQUIRE_EXACT_MAP, instr);
4552 __ bind(&success); 4655 __ bind(&success);
4553 } 4656 }
4554 4657
4555 4658
4556 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { 4659 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
4557 XMMRegister value_reg = ToDoubleRegister(instr->unclamped()); 4660 XMMRegister value_reg = ToDoubleRegister(instr->unclamped());
4558 Register result_reg = ToRegister(instr->result()); 4661 Register result_reg = ToRegister(instr->result());
4559 __ ClampDoubleToUint8(value_reg, xmm0, result_reg); 4662 __ ClampDoubleToUint8(value_reg, xmm0, result_reg);
4560 } 4663 }
4561 4664
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
4608 4711
4609 Handle<JSObject> holder = instr->holder(); 4712 Handle<JSObject> holder = instr->holder();
4610 Handle<JSObject> current_prototype = instr->prototype(); 4713 Handle<JSObject> current_prototype = instr->prototype();
4611 4714
4612 // Load prototype object. 4715 // Load prototype object.
4613 __ LoadHeapObject(reg, current_prototype); 4716 __ LoadHeapObject(reg, current_prototype);
4614 4717
4615 // Check prototype maps up to the holder. 4718 // Check prototype maps up to the holder.
4616 while (!current_prototype.is_identical_to(holder)) { 4719 while (!current_prototype.is_identical_to(holder)) {
4617 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()), 4720 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
4618 ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment()); 4721 ALLOW_ELEMENT_TRANSITION_MAPS, instr);
4619 current_prototype = 4722 current_prototype =
4620 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype())); 4723 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
4621 // Load next prototype object. 4724 // Load next prototype object.
4622 __ LoadHeapObject(reg, current_prototype); 4725 __ LoadHeapObject(reg, current_prototype);
4623 } 4726 }
4624 4727
4625 // Check the holder map. 4728 // Check the holder map.
4626 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()), 4729 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
4627 ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment()); 4730 ALLOW_ELEMENT_TRANSITION_MAPS, instr);
4628 } 4731 }
4629 4732
4630 4733
4631 void LCodeGen::DoAllocateObject(LAllocateObject* instr) { 4734 void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
4632 class DeferredAllocateObject: public LDeferredCode { 4735 class DeferredAllocateObject: public LDeferredCode {
4633 public: 4736 public:
4634 DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr) 4737 DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr)
4635 : LDeferredCode(codegen), instr_(instr) { } 4738 : LDeferredCode(codegen), instr_(instr) { }
4636 virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); } 4739 virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); }
4637 virtual LInstruction* instr() { return instr_; } 4740 virtual LInstruction* instr() { return instr_; }
(...skipping 515 matching lines...) Expand 10 before | Expand all | Expand 10 after
5153 __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset)); 5256 __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset));
5154 5257
5155 // Check the marker in the calling frame. 5258 // Check the marker in the calling frame.
5156 __ bind(&check_frame_marker); 5259 __ bind(&check_frame_marker);
5157 __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), 5260 __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset),
5158 Smi::FromInt(StackFrame::CONSTRUCT)); 5261 Smi::FromInt(StackFrame::CONSTRUCT));
5159 } 5262 }
5160 5263
5161 5264
5162 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) { 5265 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) {
5266 if (info()->IsStub()) return;
5163 // Ensure that we have enough space after the previous lazy-bailout 5267 // Ensure that we have enough space after the previous lazy-bailout
5164 // instruction for patching the code here. 5268 // instruction for patching the code here.
5165 int current_pc = masm()->pc_offset(); 5269 int current_pc = masm()->pc_offset();
5166 if (current_pc < last_lazy_deopt_pc_ + space_needed) { 5270 if (current_pc < last_lazy_deopt_pc_ + space_needed) {
5167 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; 5271 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
5168 __ Nop(padding_size); 5272 __ Nop(padding_size);
5169 } 5273 }
5170 } 5274 }
5171 5275
5172 5276
(...skipping 205 matching lines...) Expand 10 before | Expand all | Expand 10 after
5378 FixedArray::kHeaderSize - kPointerSize)); 5482 FixedArray::kHeaderSize - kPointerSize));
5379 __ bind(&done); 5483 __ bind(&done);
5380 } 5484 }
5381 5485
5382 5486
5383 #undef __ 5487 #undef __
5384 5488
5385 } } // namespace v8::internal 5489 } } // namespace v8::internal
5386 5490
5387 #endif // V8_TARGET_ARCH_X64 5491 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/lithium-codegen-x64.h ('k') | src/x64/lithium-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698