Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(911)

Side by Side Diff: src/x64/lithium-codegen-x64.cc

Issue 11498006: Revert 13157, 13145 and 13140: Crankshaft code stubs. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 8 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/lithium-codegen-x64.h ('k') | src/x64/lithium-x64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
112 int length = builder.position(); 112 int length = builder.position();
113 Vector<char> copy = Vector<char>::New(length + 1); 113 Vector<char> copy = Vector<char>::New(length + 1);
114 memcpy(copy.start(), builder.Finalize(), copy.length()); 114 memcpy(copy.start(), builder.Finalize(), copy.length());
115 masm()->RecordComment(copy.start()); 115 masm()->RecordComment(copy.start());
116 } 116 }
117 117
118 118
119 bool LCodeGen::GeneratePrologue() { 119 bool LCodeGen::GeneratePrologue() {
120 ASSERT(is_generating()); 120 ASSERT(is_generating());
121 121
122 if (info()->IsOptimizing()) { 122 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
123 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
124 123
125 #ifdef DEBUG 124 #ifdef DEBUG
126 if (strlen(FLAG_stop_at) > 0 && 125 if (strlen(FLAG_stop_at) > 0 &&
127 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { 126 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
128 __ int3(); 127 __ int3();
129 } 128 }
130 #endif 129 #endif
131 130
132 // Strict mode functions need to replace the receiver with undefined 131 // Strict mode functions need to replace the receiver with undefined
133 // when called as functions (without an explicit receiver 132 // when called as functions (without an explicit receiver
134 // object). rcx is zero for method calls and non-zero for function 133 // object). rcx is zero for method calls and non-zero for function
135 // calls. 134 // calls.
136 if (!info_->is_classic_mode() || info_->is_native()) { 135 if (!info_->is_classic_mode() || info_->is_native()) {
137 Label ok; 136 Label ok;
138 __ testq(rcx, rcx); 137 __ testq(rcx, rcx);
139 __ j(zero, &ok, Label::kNear); 138 __ j(zero, &ok, Label::kNear);
140 // +1 for return address. 139 // +1 for return address.
141 int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize; 140 int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize;
142 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); 141 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
143 __ movq(Operand(rsp, receiver_offset), kScratchRegister); 142 __ movq(Operand(rsp, receiver_offset), kScratchRegister);
144 __ bind(&ok); 143 __ bind(&ok);
145 }
146 } 144 }
147 145
148 info()->set_prologue_offset(masm_->pc_offset()); 146 info()->set_prologue_offset(masm_->pc_offset());
149 if (NeedsEagerFrame()) { 147 __ push(rbp); // Caller's frame pointer.
150 ASSERT(!frame_is_built_); 148 __ movq(rbp, rsp);
151 frame_is_built_ = true; 149 __ push(rsi); // Callee's context.
152 __ push(rbp); // Caller's frame pointer. 150 __ push(rdi); // Callee's JS function.
153 __ movq(rbp, rsp);
154 __ push(rsi); // Callee's context.
155 if (info()->IsStub()) {
156 __ Push(Smi::FromInt(StackFrame::STUB));
157 } else {
158 __ push(rdi); // Callee's JS function.
159 }
160 }
161 151
162 // Reserve space for the stack slots needed by the code. 152 // Reserve space for the stack slots needed by the code.
163 int slots = GetStackSlotCount(); 153 int slots = GetStackSlotCount();
164 if (slots > 0) { 154 if (slots > 0) {
165 if (FLAG_debug_code) { 155 if (FLAG_debug_code) {
166 __ Set(rax, slots); 156 __ Set(rax, slots);
167 __ movq(kScratchRegister, kSlotsZapValue, RelocInfo::NONE); 157 __ movq(kScratchRegister, kSlotsZapValue, RelocInfo::NONE);
168 Label loop; 158 Label loop;
169 __ bind(&loop); 159 __ bind(&loop);
170 __ push(kScratchRegister); 160 __ push(kScratchRegister);
171 __ decl(rax); 161 __ decl(rax);
172 __ j(not_zero, &loop); 162 __ j(not_zero, &loop);
173 } else { 163 } else {
174 __ subq(rsp, Immediate(slots * kPointerSize)); 164 __ subq(rsp, Immediate(slots * kPointerSize));
175 #ifdef _MSC_VER 165 #ifdef _MSC_VER
176 // On windows, you may not access the stack more than one page below 166 // On windows, you may not access the stack more than one page below
177 // the most recently mapped page. To make the allocated area randomly 167 // the most recently mapped page. To make the allocated area randomly
178 // accessible, we write to each page in turn (the value is irrelevant). 168 // accessible, we write to each page in turn (the value is irrelevant).
179 const int kPageSize = 4 * KB; 169 const int kPageSize = 4 * KB;
180 for (int offset = slots * kPointerSize - kPageSize; 170 for (int offset = slots * kPointerSize - kPageSize;
181 offset > 0; 171 offset > 0;
182 offset -= kPageSize) { 172 offset -= kPageSize) {
183 __ movq(Operand(rsp, offset), rax); 173 __ movq(Operand(rsp, offset), rax);
184 } 174 }
185 #endif 175 #endif
186 } 176 }
187 } 177 }
188 178
189 // Possibly allocate a local context. 179 // Possibly allocate a local context.
190 int heap_slots = info_->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 180 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
191 if (heap_slots > 0) { 181 if (heap_slots > 0) {
192 Comment(";;; Allocate local context"); 182 Comment(";;; Allocate local context");
193 // Argument to NewContext is the function, which is still in rdi. 183 // Argument to NewContext is the function, which is still in rdi.
194 __ push(rdi); 184 __ push(rdi);
195 if (heap_slots <= FastNewContextStub::kMaximumSlots) { 185 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
196 FastNewContextStub stub(heap_slots); 186 FastNewContextStub stub(heap_slots);
197 __ CallStub(&stub); 187 __ CallStub(&stub);
198 } else { 188 } else {
199 __ CallRuntime(Runtime::kNewFunctionContext, 1); 189 __ CallRuntime(Runtime::kNewFunctionContext, 1);
200 } 190 }
(...skipping 15 matching lines...) Expand all
216 int context_offset = Context::SlotOffset(var->index()); 206 int context_offset = Context::SlotOffset(var->index());
217 __ movq(Operand(rsi, context_offset), rax); 207 __ movq(Operand(rsi, context_offset), rax);
218 // Update the write barrier. This clobbers rax and rbx. 208 // Update the write barrier. This clobbers rax and rbx.
219 __ RecordWriteContextSlot(rsi, context_offset, rax, rbx, kSaveFPRegs); 209 __ RecordWriteContextSlot(rsi, context_offset, rax, rbx, kSaveFPRegs);
220 } 210 }
221 } 211 }
222 Comment(";;; End allocate local context"); 212 Comment(";;; End allocate local context");
223 } 213 }
224 214
225 // Trace the call. 215 // Trace the call.
226 if (FLAG_trace && info()->IsOptimizing()) { 216 if (FLAG_trace) {
227 __ CallRuntime(Runtime::kTraceEnter, 0); 217 __ CallRuntime(Runtime::kTraceEnter, 0);
228 } 218 }
229 return !is_aborted(); 219 return !is_aborted();
230 } 220 }
231 221
232 222
233 bool LCodeGen::GenerateBody() { 223 bool LCodeGen::GenerateBody() {
234 ASSERT(is_generating()); 224 ASSERT(is_generating());
235 bool emit_instructions = true; 225 bool emit_instructions = true;
236 for (current_instruction_ = 0; 226 for (current_instruction_ = 0;
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
269 } 259 }
270 instr->CompileToNative(this); 260 instr->CompileToNative(this);
271 } 261 }
272 } 262 }
273 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); 263 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
274 return !is_aborted(); 264 return !is_aborted();
275 } 265 }
276 266
277 267
278 bool LCodeGen::GenerateJumpTable() { 268 bool LCodeGen::GenerateJumpTable() {
279 Label needs_frame_not_call;
280 Label needs_frame_is_call;
281 for (int i = 0; i < jump_table_.length(); i++) { 269 for (int i = 0; i < jump_table_.length(); i++) {
282 __ bind(&jump_table_[i].label); 270 __ bind(&jump_table_[i].label);
283 Address entry = jump_table_[i].address; 271 __ Jump(jump_table_[i].address, RelocInfo::RUNTIME_ENTRY);
284 if (jump_table_[i].needs_frame) {
285 __ movq(kScratchRegister, ExternalReference::ForDeoptEntry(entry));
286 if (jump_table_[i].is_lazy_deopt) {
287 if (needs_frame_is_call.is_bound()) {
288 __ jmp(&needs_frame_is_call);
289 } else {
290 __ bind(&needs_frame_is_call);
291 __ push(rbp);
292 __ movq(rbp, rsp);
293 __ push(rsi);
294 // This variant of deopt can only be used with stubs. Since we don't
295 // have a function pointer to install in the stack frame that we're
296 // building, install a special marker there instead.
297 ASSERT(info()->IsStub());
298 __ Move(rsi, Smi::FromInt(StackFrame::STUB));
299 __ push(rsi);
300 __ movq(rsi, MemOperand(rsp, kPointerSize));
301 __ call(kScratchRegister);
302 }
303 } else {
304 if (needs_frame_not_call.is_bound()) {
305 __ jmp(&needs_frame_not_call);
306 } else {
307 __ bind(&needs_frame_not_call);
308 __ push(rbp);
309 __ movq(rbp, rsp);
310 __ push(r8);
311 // This variant of deopt can only be used with stubs. Since we don't
312 // have a function pointer to install in the stack frame that we're
313 // building, install a special marker there instead.
314 ASSERT(info()->IsStub());
315 __ Move(rsi, Smi::FromInt(StackFrame::STUB));
316 __ push(rsi);
317 __ movq(rsi, MemOperand(rsp, kPointerSize));
318 __ jmp(kScratchRegister);
319 }
320 }
321 } else {
322 if (jump_table_[i].is_lazy_deopt) {
323 __ Call(entry, RelocInfo::RUNTIME_ENTRY);
324 } else {
325 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
326 }
327 }
328 } 272 }
329 return !is_aborted(); 273 return !is_aborted();
330 } 274 }
331 275
332 276
333 bool LCodeGen::GenerateDeferredCode() { 277 bool LCodeGen::GenerateDeferredCode() {
334 ASSERT(is_generating()); 278 ASSERT(is_generating());
335 if (deferred_.length() > 0) { 279 if (deferred_.length() > 0) {
336 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { 280 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
337 LDeferredCode* code = deferred_[i]; 281 LDeferredCode* code = deferred_[i];
338 __ bind(code->entry()); 282 __ bind(code->entry());
339 if (NeedsDeferredFrame()) {
340 Comment(";;; Deferred build frame",
341 code->instruction_index(),
342 code->instr()->Mnemonic());
343 ASSERT(!frame_is_built_);
344 ASSERT(info()->IsStub());
345 frame_is_built_ = true;
346 // Build the frame in such a way that esi isn't trashed.
347 __ push(rbp); // Caller's frame pointer.
348 __ push(Operand(rbp, StandardFrameConstants::kContextOffset));
349 __ Push(Smi::FromInt(StackFrame::STUB));
350 __ lea(rbp, Operand(rsp, 2 * kPointerSize));
351 }
352 Comment(";;; Deferred code @%d: %s.", 283 Comment(";;; Deferred code @%d: %s.",
353 code->instruction_index(), 284 code->instruction_index(),
354 code->instr()->Mnemonic()); 285 code->instr()->Mnemonic());
355 code->Generate(); 286 code->Generate();
356 if (NeedsDeferredFrame()) {
357 Comment(";;; Deferred destroy frame",
358 code->instruction_index(),
359 code->instr()->Mnemonic());
360 ASSERT(frame_is_built_);
361 frame_is_built_ = false;
362 __ movq(rsp, rbp);
363 __ pop(rbp);
364 }
365 __ jmp(code->exit()); 287 __ jmp(code->exit());
366 } 288 }
367 } 289 }
368 290
369 // Deferred code is the last part of the instruction sequence. Mark 291 // Deferred code is the last part of the instruction sequence. Mark
370 // the generated code as done unless we bailed out. 292 // the generated code as done unless we bailed out.
371 if (!is_aborted()) status_ = DONE; 293 if (!is_aborted()) status_ = DONE;
372 return !is_aborted(); 294 return !is_aborted();
373 } 295 }
374 296
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
467 // arguments index points to the first element of a sequence of tagged 389 // arguments index points to the first element of a sequence of tagged
468 // values on the stack that represent the arguments. This needs to be 390 // values on the stack that represent the arguments. This needs to be
469 // kept in sync with the LArgumentsElements implementation. 391 // kept in sync with the LArgumentsElements implementation.
470 *arguments_index = -environment->parameter_count(); 392 *arguments_index = -environment->parameter_count();
471 *arguments_count = environment->parameter_count(); 393 *arguments_count = environment->parameter_count();
472 394
473 WriteTranslation(environment->outer(), 395 WriteTranslation(environment->outer(),
474 translation, 396 translation,
475 arguments_index, 397 arguments_index,
476 arguments_count); 398 arguments_count);
477 bool has_closure_id = !info()->closure().is_null() && 399 int closure_id = *info()->closure() != *environment->closure()
478 *info()->closure() != *environment->closure();
479 int closure_id = has_closure_id
480 ? DefineDeoptimizationLiteral(environment->closure()) 400 ? DefineDeoptimizationLiteral(environment->closure())
481 : Translation::kSelfLiteralId; 401 : Translation::kSelfLiteralId;
482 402
483 switch (environment->frame_type()) { 403 switch (environment->frame_type()) {
484 case JS_FUNCTION: 404 case JS_FUNCTION:
485 translation->BeginJSFrame(environment->ast_id(), closure_id, height); 405 translation->BeginJSFrame(environment->ast_id(), closure_id, height);
486 break; 406 break;
487 case JS_CONSTRUCT: 407 case JS_CONSTRUCT:
488 translation->BeginConstructStubFrame(closure_id, translation_size); 408 translation->BeginConstructStubFrame(closure_id, translation_size);
489 break; 409 break;
490 case JS_GETTER: 410 case JS_GETTER:
491 ASSERT(translation_size == 1); 411 ASSERT(translation_size == 1);
492 ASSERT(height == 0); 412 ASSERT(height == 0);
493 translation->BeginGetterStubFrame(closure_id); 413 translation->BeginGetterStubFrame(closure_id);
494 break; 414 break;
495 case JS_SETTER: 415 case JS_SETTER:
496 ASSERT(translation_size == 2); 416 ASSERT(translation_size == 2);
497 ASSERT(height == 0); 417 ASSERT(height == 0);
498 translation->BeginSetterStubFrame(closure_id); 418 translation->BeginSetterStubFrame(closure_id);
499 break; 419 break;
500 case ARGUMENTS_ADAPTOR: 420 case ARGUMENTS_ADAPTOR:
501 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size); 421 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
502 break; 422 break;
503 case STUB:
504 translation->BeginCompiledStubFrame();
505 break;
506 } 423 }
507 424
508 // Inlined frames which push their arguments cause the index to be 425 // Inlined frames which push their arguments cause the index to be
509 // bumped and a new stack area to be used for materialization. 426 // bumped and a new stack area to be used for materialization.
510 if (environment->entry() != NULL && 427 if (environment->entry() != NULL &&
511 environment->entry()->arguments_pushed()) { 428 environment->entry()->arguments_pushed()) {
512 *arguments_index = *arguments_index < 0 429 *arguments_index = *arguments_index < 0
513 ? GetStackSlotCount() 430 ? GetStackSlotCount()
514 : *arguments_index + *arguments_count; 431 : *arguments_index + *arguments_count;
515 *arguments_count = environment->entry()->arguments_count() + 1; 432 *arguments_count = environment->entry()->arguments_count() + 1;
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after
686 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); 603 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
687 deoptimizations_.Add(environment, environment->zone()); 604 deoptimizations_.Add(environment, environment->zone());
688 } 605 }
689 } 606 }
690 607
691 608
692 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { 609 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
693 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); 610 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
694 ASSERT(environment->HasBeenRegistered()); 611 ASSERT(environment->HasBeenRegistered());
695 int id = environment->deoptimization_index(); 612 int id = environment->deoptimization_index();
696 ASSERT(info()->IsOptimizing() || info()->IsStub()); 613 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
697 Deoptimizer::BailoutType bailout_type = info()->IsStub()
698 ? Deoptimizer::LAZY
699 : Deoptimizer::EAGER;
700 Address entry = Deoptimizer::GetDeoptimizationEntry(id, bailout_type);
701 if (entry == NULL) { 614 if (entry == NULL) {
702 Abort("bailout was not prepared"); 615 Abort("bailout was not prepared");
703 return; 616 return;
704 } 617 }
705 618
706 ASSERT(info()->IsStub() || frame_is_built_);
707 bool lazy_deopt = info()->IsStub();
708 if (cc == no_condition) { 619 if (cc == no_condition) {
709 if (lazy_deopt) { 620 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
710 __ Call(entry, RelocInfo::RUNTIME_ENTRY);
711 } else {
712 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
713 }
714 } else { 621 } else {
715 // We often have several deopts to the same entry, reuse the last 622 // We often have several deopts to the same entry, reuse the last
716 // jump entry if this is the case. 623 // jump entry if this is the case.
717 if (jump_table_.is_empty() || 624 if (jump_table_.is_empty() ||
718 jump_table_.last().address != entry || 625 jump_table_.last().address != entry) {
719 jump_table_.last().needs_frame != !frame_is_built_ || 626 jump_table_.Add(JumpTableEntry(entry), zone());
720 jump_table_.last().is_lazy_deopt != lazy_deopt) {
721 JumpTableEntry table_entry(entry, !frame_is_built_, lazy_deopt);
722 jump_table_.Add(table_entry, zone());
723 } 627 }
724 __ j(cc, &jump_table_.last().label); 628 __ j(cc, &jump_table_.last().label);
725 } 629 }
726 } 630 }
727 631
728 632
729 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { 633 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
730 int length = deoptimizations_.length(); 634 int length = deoptimizations_.length();
731 if (length == 0) return; 635 if (length == 0) return;
732 Handle<DeoptimizationInputData> data = 636 Handle<DeoptimizationInputData> data =
(...skipping 1653 matching lines...) Expand 10 before | Expand all | Expand 10 after
2386 __ j(condition, &true_value, Label::kNear); 2290 __ j(condition, &true_value, Label::kNear);
2387 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); 2291 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
2388 __ jmp(&done, Label::kNear); 2292 __ jmp(&done, Label::kNear);
2389 __ bind(&true_value); 2293 __ bind(&true_value);
2390 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex); 2294 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex);
2391 __ bind(&done); 2295 __ bind(&done);
2392 } 2296 }
2393 2297
2394 2298
2395 void LCodeGen::DoReturn(LReturn* instr) { 2299 void LCodeGen::DoReturn(LReturn* instr) {
2396 if (FLAG_trace && info()->IsOptimizing()) { 2300 if (FLAG_trace) {
2397 // Preserve the return value on the stack and rely on the runtime 2301 // Preserve the return value on the stack and rely on the runtime
2398 // call to return the value in the same register. 2302 // call to return the value in the same register.
2399 __ push(rax); 2303 __ push(rax);
2400 __ CallRuntime(Runtime::kTraceExit, 1); 2304 __ CallRuntime(Runtime::kTraceExit, 1);
2401 } 2305 }
2402 if (NeedsEagerFrame()) { 2306 __ movq(rsp, rbp);
2403 __ movq(rsp, rbp); 2307 __ pop(rbp);
2404 __ pop(rbp); 2308 __ Ret((GetParameterCount() + 1) * kPointerSize, rcx);
2405 }
2406 if (info()->IsStub()) {
2407 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2408 __ Ret(0, r10);
2409 } else {
2410 __ Ret((GetParameterCount() + 1) * kPointerSize, rcx);
2411 }
2412 } 2309 }
2413 2310
2414 2311
2415 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) { 2312 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
2416 Register result = ToRegister(instr->result()); 2313 Register result = ToRegister(instr->result());
2417 __ LoadGlobalCell(result, instr->hydrogen()->cell()); 2314 __ LoadGlobalCell(result, instr->hydrogen()->cell());
2418 if (instr->hydrogen()->RequiresHoleCheck()) { 2315 if (instr->hydrogen()->RequiresHoleCheck()) {
2419 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); 2316 __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
2420 DeoptimizeIf(equal, instr->environment()); 2317 DeoptimizeIf(equal, instr->environment());
2421 } 2318 }
(...skipping 2210 matching lines...) Expand 10 before | Expand all | Expand 10 after
4632 } else { 4529 } else {
4633 __ Cmp(reg, target); 4530 __ Cmp(reg, target);
4634 } 4531 }
4635 DeoptimizeIf(not_equal, instr->environment()); 4532 DeoptimizeIf(not_equal, instr->environment());
4636 } 4533 }
4637 4534
4638 4535
4639 void LCodeGen::DoCheckMapCommon(Register reg, 4536 void LCodeGen::DoCheckMapCommon(Register reg,
4640 Handle<Map> map, 4537 Handle<Map> map,
4641 CompareMapMode mode, 4538 CompareMapMode mode,
4642 LInstruction* instr) { 4539 LEnvironment* env) {
4643 Label success; 4540 Label success;
4644 __ CompareMap(reg, map, &success, mode); 4541 __ CompareMap(reg, map, &success, mode);
4645 DeoptimizeIf(not_equal, instr->environment()); 4542 DeoptimizeIf(not_equal, env);
4646 __ bind(&success); 4543 __ bind(&success);
4647 } 4544 }
4648 4545
4649 4546
4650 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { 4547 void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
4651 LOperand* input = instr->value(); 4548 LOperand* input = instr->value();
4652 ASSERT(input->IsRegister()); 4549 ASSERT(input->IsRegister());
4653 Register reg = ToRegister(input); 4550 Register reg = ToRegister(input);
4654 4551
4655 Label success; 4552 Label success;
4656 SmallMapList* map_set = instr->hydrogen()->map_set(); 4553 SmallMapList* map_set = instr->hydrogen()->map_set();
4657 for (int i = 0; i < map_set->length() - 1; i++) { 4554 for (int i = 0; i < map_set->length() - 1; i++) {
4658 Handle<Map> map = map_set->at(i); 4555 Handle<Map> map = map_set->at(i);
4659 __ CompareMap(reg, map, &success, REQUIRE_EXACT_MAP); 4556 __ CompareMap(reg, map, &success, REQUIRE_EXACT_MAP);
4660 __ j(equal, &success); 4557 __ j(equal, &success);
4661 } 4558 }
4662 Handle<Map> map = map_set->last(); 4559 Handle<Map> map = map_set->last();
4663 DoCheckMapCommon(reg, map, REQUIRE_EXACT_MAP, instr); 4560 DoCheckMapCommon(reg, map, REQUIRE_EXACT_MAP, instr->environment());
4664 __ bind(&success); 4561 __ bind(&success);
4665 } 4562 }
4666 4563
4667 4564
4668 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { 4565 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
4669 XMMRegister value_reg = ToDoubleRegister(instr->unclamped()); 4566 XMMRegister value_reg = ToDoubleRegister(instr->unclamped());
4670 Register result_reg = ToRegister(instr->result()); 4567 Register result_reg = ToRegister(instr->result());
4671 __ ClampDoubleToUint8(value_reg, xmm0, result_reg); 4568 __ ClampDoubleToUint8(value_reg, xmm0, result_reg);
4672 } 4569 }
4673 4570
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
4720 4617
4721 Handle<JSObject> holder = instr->holder(); 4618 Handle<JSObject> holder = instr->holder();
4722 Handle<JSObject> current_prototype = instr->prototype(); 4619 Handle<JSObject> current_prototype = instr->prototype();
4723 4620
4724 // Load prototype object. 4621 // Load prototype object.
4725 __ LoadHeapObject(reg, current_prototype); 4622 __ LoadHeapObject(reg, current_prototype);
4726 4623
4727 // Check prototype maps up to the holder. 4624 // Check prototype maps up to the holder.
4728 while (!current_prototype.is_identical_to(holder)) { 4625 while (!current_prototype.is_identical_to(holder)) {
4729 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()), 4626 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
4730 ALLOW_ELEMENT_TRANSITION_MAPS, instr); 4627 ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
4731 current_prototype = 4628 current_prototype =
4732 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype())); 4629 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
4733 // Load next prototype object. 4630 // Load next prototype object.
4734 __ LoadHeapObject(reg, current_prototype); 4631 __ LoadHeapObject(reg, current_prototype);
4735 } 4632 }
4736 4633
4737 // Check the holder map. 4634 // Check the holder map.
4738 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()), 4635 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
4739 ALLOW_ELEMENT_TRANSITION_MAPS, instr); 4636 ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
4740 } 4637 }
4741 4638
4742 4639
4743 void LCodeGen::DoAllocateObject(LAllocateObject* instr) { 4640 void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
4744 class DeferredAllocateObject: public LDeferredCode { 4641 class DeferredAllocateObject: public LDeferredCode {
4745 public: 4642 public:
4746 DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr) 4643 DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr)
4747 : LDeferredCode(codegen), instr_(instr) { } 4644 : LDeferredCode(codegen), instr_(instr) { }
4748 virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); } 4645 virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); }
4749 virtual LInstruction* instr() { return instr_; } 4646 virtual LInstruction* instr() { return instr_; }
(...skipping 515 matching lines...) Expand 10 before | Expand all | Expand 10 after
5265 __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset)); 5162 __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset));
5266 5163
5267 // Check the marker in the calling frame. 5164 // Check the marker in the calling frame.
5268 __ bind(&check_frame_marker); 5165 __ bind(&check_frame_marker);
5269 __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), 5166 __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset),
5270 Smi::FromInt(StackFrame::CONSTRUCT)); 5167 Smi::FromInt(StackFrame::CONSTRUCT));
5271 } 5168 }
5272 5169
5273 5170
5274 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) { 5171 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) {
5275 if (info()->IsStub()) return;
5276 // Ensure that we have enough space after the previous lazy-bailout 5172 // Ensure that we have enough space after the previous lazy-bailout
5277 // instruction for patching the code here. 5173 // instruction for patching the code here.
5278 int current_pc = masm()->pc_offset(); 5174 int current_pc = masm()->pc_offset();
5279 if (current_pc < last_lazy_deopt_pc_ + space_needed) { 5175 if (current_pc < last_lazy_deopt_pc_ + space_needed) {
5280 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; 5176 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
5281 __ Nop(padding_size); 5177 __ Nop(padding_size);
5282 } 5178 }
5283 } 5179 }
5284 5180
5285 5181
(...skipping 205 matching lines...) Expand 10 before | Expand all | Expand 10 after
5491 FixedArray::kHeaderSize - kPointerSize)); 5387 FixedArray::kHeaderSize - kPointerSize));
5492 __ bind(&done); 5388 __ bind(&done);
5493 } 5389 }
5494 5390
5495 5391
5496 #undef __ 5392 #undef __
5497 5393
5498 } } // namespace v8::internal 5394 } } // namespace v8::internal
5499 5395
5500 #endif // V8_TARGET_ARCH_X64 5396 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/lithium-codegen-x64.h ('k') | src/x64/lithium-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698