Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(171)

Side by Side Diff: src/x64/lithium-codegen-x64.cc

Issue 10701054: Enable stub generation using Hydrogen/Lithium (again) (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: First pass at pre-VFP2 RA Created 8 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
112 int length = builder.position(); 112 int length = builder.position();
113 Vector<char> copy = Vector<char>::New(length + 1); 113 Vector<char> copy = Vector<char>::New(length + 1);
114 memcpy(copy.start(), builder.Finalize(), copy.length()); 114 memcpy(copy.start(), builder.Finalize(), copy.length());
115 masm()->RecordComment(copy.start()); 115 masm()->RecordComment(copy.start());
116 } 116 }
117 117
118 118
119 bool LCodeGen::GeneratePrologue() { 119 bool LCodeGen::GeneratePrologue() {
120 ASSERT(is_generating()); 120 ASSERT(is_generating());
121 121
122 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 122 if (info()->IsOptimizing()) {
123 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
123 124
124 #ifdef DEBUG 125 #ifdef DEBUG
125 if (strlen(FLAG_stop_at) > 0 && 126 if (strlen(FLAG_stop_at) > 0 &&
126 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { 127 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
127 __ int3(); 128 __ int3();
128 } 129 }
129 #endif 130 #endif
130 131
131 // Strict mode functions need to replace the receiver with undefined 132 // Strict mode functions need to replace the receiver with undefined
132 // when called as functions (without an explicit receiver 133 // when called as functions (without an explicit receiver
133 // object). rcx is zero for method calls and non-zero for function 134 // object). rcx is zero for method calls and non-zero for function
134 // calls. 135 // calls.
135 if (!info_->is_classic_mode() || info_->is_native()) { 136 if (!info_->is_classic_mode() || info_->is_native()) {
136 Label begin; 137 Label begin;
137 __ bind(&begin); 138 __ bind(&begin);
138 Label ok; 139 Label ok;
139 __ testq(rcx, rcx); 140 __ testq(rcx, rcx);
140 __ j(zero, &ok, Label::kNear); 141 __ j(zero, &ok, Label::kNear);
141 // +1 for return address. 142 // +1 for return address.
142 int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize; 143 int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize;
143 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); 144 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
144 __ movq(Operand(rsp, receiver_offset), kScratchRegister); 145 __ movq(Operand(rsp, receiver_offset), kScratchRegister);
145 __ bind(&ok); 146 __ bind(&ok);
146 ASSERT(!FLAG_age_code || 147 ASSERT(!FLAG_age_code ||
147 (kSizeOfOptimizedStrictModePrologue == ok.pos() - begin.pos())); 148 (kSizeOfOptimizedStrictModePrologue == ok.pos() - begin.pos()));
149 }
148 } 150 }
149 151
150 __ push(rbp); // Caller's frame pointer. 152 if (NeedsEagerFrame()) {
151 __ movq(rbp, rsp); 153 ASSERT(!frame_is_built_);
152 __ push(rsi); // Callee's context. 154 frame_is_built_ = true;
153 __ push(rdi); // Callee's JS function. 155 __ push(rbp); // Caller's frame pointer.
156 __ movq(rbp, rsp);
157 if (info()->IsStub()) {
158 __ push(rsi); // Callee's context.
159 __ Push(Smi::FromInt(StackFrame::STUB));
160 } else {
161 __ push(rsi); // Callee's context.
162 __ push(rdi); // Callee's JS function.
163 }
164 }
154 165
155 // Reserve space for the stack slots needed by the code. 166 // Reserve space for the stack slots needed by the code.
156 int slots = GetStackSlotCount(); 167 int slots = GetStackSlotCount();
157 if (slots > 0) { 168 if (slots > 0) {
158 if (FLAG_debug_code) { 169 if (FLAG_debug_code) {
159 __ Set(rax, slots); 170 __ Set(rax, slots);
160 __ movq(kScratchRegister, kSlotsZapValue, RelocInfo::NONE); 171 __ movq(kScratchRegister, kSlotsZapValue, RelocInfo::NONE);
161 Label loop; 172 Label loop;
162 __ bind(&loop); 173 __ bind(&loop);
163 __ push(kScratchRegister); 174 __ push(kScratchRegister);
164 __ decl(rax); 175 __ decl(rax);
165 __ j(not_zero, &loop); 176 __ j(not_zero, &loop);
166 } else { 177 } else {
167 __ subq(rsp, Immediate(slots * kPointerSize)); 178 __ subq(rsp, Immediate(slots * kPointerSize));
168 #ifdef _MSC_VER 179 #ifdef _MSC_VER
169 // On windows, you may not access the stack more than one page below 180 // On windows, you may not access the stack more than one page below
170 // the most recently mapped page. To make the allocated area randomly 181 // the most recently mapped page. To make the allocated area randomly
171 // accessible, we write to each page in turn (the value is irrelevant). 182 // accessible, we write to each page in turn (the value is irrelevant).
172 const int kPageSize = 4 * KB; 183 const int kPageSize = 4 * KB;
173 for (int offset = slots * kPointerSize - kPageSize; 184 for (int offset = slots * kPointerSize - kPageSize;
174 offset > 0; 185 offset > 0;
175 offset -= kPageSize) { 186 offset -= kPageSize) {
176 __ movq(Operand(rsp, offset), rax); 187 __ movq(Operand(rsp, offset), rax);
177 } 188 }
178 #endif 189 #endif
179 } 190 }
180 } 191 }
181 192
182 // Possibly allocate a local context. 193 // Possibly allocate a local context.
183 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 194 int heap_slots = info_->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
184 if (heap_slots > 0) { 195 if (heap_slots > 0) {
185 Comment(";;; Allocate local context"); 196 Comment(";;; Allocate local context");
186 // Argument to NewContext is the function, which is still in rdi. 197 // Argument to NewContext is the function, which is still in rdi.
187 __ push(rdi); 198 __ push(rdi);
188 if (heap_slots <= FastNewContextStub::kMaximumSlots) { 199 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
189 FastNewContextStub stub(heap_slots); 200 FastNewContextStub stub(heap_slots);
190 __ CallStub(&stub); 201 __ CallStub(&stub);
191 } else { 202 } else {
192 __ CallRuntime(Runtime::kNewFunctionContext, 1); 203 __ CallRuntime(Runtime::kNewFunctionContext, 1);
193 } 204 }
(...skipping 15 matching lines...) Expand all
209 int context_offset = Context::SlotOffset(var->index()); 220 int context_offset = Context::SlotOffset(var->index());
210 __ movq(Operand(rsi, context_offset), rax); 221 __ movq(Operand(rsi, context_offset), rax);
211 // Update the write barrier. This clobbers rax and rbx. 222 // Update the write barrier. This clobbers rax and rbx.
212 __ RecordWriteContextSlot(rsi, context_offset, rax, rbx, kSaveFPRegs); 223 __ RecordWriteContextSlot(rsi, context_offset, rax, rbx, kSaveFPRegs);
213 } 224 }
214 } 225 }
215 Comment(";;; End allocate local context"); 226 Comment(";;; End allocate local context");
216 } 227 }
217 228
218 // Trace the call. 229 // Trace the call.
219 if (FLAG_trace) { 230 if (FLAG_trace && info()->IsOptimizing()) {
220 __ CallRuntime(Runtime::kTraceEnter, 0); 231 __ CallRuntime(Runtime::kTraceEnter, 0);
221 } 232 }
222 return !is_aborted(); 233 return !is_aborted();
223 } 234 }
224 235
225 236
226 bool LCodeGen::GenerateBody() { 237 bool LCodeGen::GenerateBody() {
227 ASSERT(is_generating()); 238 ASSERT(is_generating());
228 bool emit_instructions = true; 239 bool emit_instructions = true;
229 for (current_instruction_ = 0; 240 for (current_instruction_ = 0;
(...skipping 11 matching lines...) Expand all
241 } 252 }
242 } 253 }
243 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); 254 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
244 return !is_aborted(); 255 return !is_aborted();
245 } 256 }
246 257
247 258
248 bool LCodeGen::GenerateJumpTable() { 259 bool LCodeGen::GenerateJumpTable() {
249 for (int i = 0; i < jump_table_.length(); i++) { 260 for (int i = 0; i < jump_table_.length(); i++) {
250 __ bind(&jump_table_[i].label); 261 __ bind(&jump_table_[i].label);
251 __ Jump(jump_table_[i].address, RelocInfo::RUNTIME_ENTRY); 262 if (jump_table_[i].is_call) {
263 __ Call(jump_table_[i].address, RelocInfo::RUNTIME_ENTRY);
264 } else {
265 __ Jump(jump_table_[i].address, RelocInfo::RUNTIME_ENTRY);
266 }
252 } 267 }
253 return !is_aborted(); 268 return !is_aborted();
254 } 269 }
255 270
256 271
257 bool LCodeGen::GenerateDeferredCode() { 272 bool LCodeGen::GenerateDeferredCode() {
258 ASSERT(is_generating()); 273 ASSERT(is_generating());
259 if (deferred_.length() > 0) { 274 if (deferred_.length() > 0) {
260 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { 275 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
261 LDeferredCode* code = deferred_[i]; 276 LDeferredCode* code = deferred_[i];
262 __ bind(code->entry()); 277 __ bind(code->entry());
278 if (NeedsDeferredFrame()) {
279 Comment(";;; Deferred build frame",
280 code->instruction_index(),
281 code->instr()->Mnemonic());
282 ASSERT(!frame_is_built_);
283 ASSERT(info()->IsStub());
284 frame_is_built_ = true;
285 // Build the frame in such a way that esi isn't trashed.
286 __ push(rbp); // Caller's frame pointer.
287 __ push(Operand(rbp, StandardFrameConstants::kContextOffset));
288 __ Push(Smi::FromInt(StackFrame::STUB));
289 __ lea(rbp, Operand(rsp, 2 * kPointerSize));
290 }
263 Comment(";;; Deferred code @%d: %s.", 291 Comment(";;; Deferred code @%d: %s.",
264 code->instruction_index(), 292 code->instruction_index(),
265 code->instr()->Mnemonic()); 293 code->instr()->Mnemonic());
266 code->Generate(); 294 code->Generate();
295 if (NeedsDeferredFrame()) {
296 Comment(";;; Deferred destory frame",
297 code->instruction_index(),
298 code->instr()->Mnemonic());
299 ASSERT(frame_is_built_);
300 frame_is_built_ = false;
301 __ movq(rsp, rbp);
302 __ pop(rbp);
303 }
267 __ jmp(code->exit()); 304 __ jmp(code->exit());
268 } 305 }
269 } 306 }
270 307
271 // Deferred code is the last part of the instruction sequence. Mark 308 // Deferred code is the last part of the instruction sequence. Mark
272 // the generated code as done unless we bailed out. 309 // the generated code as done unless we bailed out.
273 if (!is_aborted()) status_ = DONE; 310 if (!is_aborted()) status_ = DONE;
274 return !is_aborted(); 311 return !is_aborted();
275 } 312 }
276 313
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
369 // arguments index points to the first element of a sequence of tagged 406 // arguments index points to the first element of a sequence of tagged
370 // values on the stack that represent the arguments. This needs to be 407 // values on the stack that represent the arguments. This needs to be
371 // kept in sync with the LArgumentsElements implementation. 408 // kept in sync with the LArgumentsElements implementation.
372 *arguments_index = -environment->parameter_count(); 409 *arguments_index = -environment->parameter_count();
373 *arguments_count = environment->parameter_count(); 410 *arguments_count = environment->parameter_count();
374 411
375 WriteTranslation(environment->outer(), 412 WriteTranslation(environment->outer(),
376 translation, 413 translation,
377 arguments_index, 414 arguments_index,
378 arguments_count); 415 arguments_count);
379 int closure_id = *info()->closure() != *environment->closure() 416 bool has_closure_id = !info()->closure().is_null() &&
417 *info()->closure() != *environment->closure();
418 int closure_id = has_closure_id
380 ? DefineDeoptimizationLiteral(environment->closure()) 419 ? DefineDeoptimizationLiteral(environment->closure())
381 : Translation::kSelfLiteralId; 420 : Translation::kSelfLiteralId;
382 421
383 switch (environment->frame_type()) { 422 switch (environment->frame_type()) {
384 case JS_FUNCTION: 423 case JS_FUNCTION:
385 translation->BeginJSFrame(environment->ast_id(), closure_id, height); 424 translation->BeginJSFrame(environment->ast_id(), closure_id, height);
386 break; 425 break;
387 case JS_CONSTRUCT: 426 case JS_CONSTRUCT:
388 translation->BeginConstructStubFrame(closure_id, translation_size); 427 translation->BeginConstructStubFrame(closure_id, translation_size);
389 break; 428 break;
390 case JS_GETTER: 429 case JS_GETTER:
391 ASSERT(translation_size == 1); 430 ASSERT(translation_size == 1);
392 ASSERT(height == 0); 431 ASSERT(height == 0);
393 translation->BeginGetterStubFrame(closure_id); 432 translation->BeginGetterStubFrame(closure_id);
394 break; 433 break;
395 case JS_SETTER: 434 case JS_SETTER:
396 ASSERT(translation_size == 2); 435 ASSERT(translation_size == 2);
397 ASSERT(height == 0); 436 ASSERT(height == 0);
398 translation->BeginSetterStubFrame(closure_id); 437 translation->BeginSetterStubFrame(closure_id);
399 break; 438 break;
400 case ARGUMENTS_ADAPTOR: 439 case ARGUMENTS_ADAPTOR:
401 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size); 440 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
402 break; 441 break;
442 case STUB:
443 translation->BeginCompiledStubPseudoFrame(Code::KEYED_LOAD_IC);
444 break;
403 } 445 }
404 446
405 // Inlined frames which push their arguments cause the index to be 447 // Inlined frames which push their arguments cause the index to be
406 // bumped and a new stack area to be used for materialization. 448 // bumped and a new stack area to be used for materialization.
407 if (environment->entry() != NULL && 449 if (environment->entry() != NULL &&
408 environment->entry()->arguments_pushed()) { 450 environment->entry()->arguments_pushed()) {
409 *arguments_index = *arguments_index < 0 451 *arguments_index = *arguments_index < 0
410 ? GetStackSlotCount() 452 ? GetStackSlotCount()
411 : *arguments_index + *arguments_count; 453 : *arguments_index + *arguments_count;
412 *arguments_count = environment->entry()->arguments_count() + 1; 454 *arguments_count = environment->entry()->arguments_count() + 1;
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after
583 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); 625 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
584 deoptimizations_.Add(environment, environment->zone()); 626 deoptimizations_.Add(environment, environment->zone());
585 } 627 }
586 } 628 }
587 629
588 630
589 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { 631 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
590 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); 632 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
591 ASSERT(environment->HasBeenRegistered()); 633 ASSERT(environment->HasBeenRegistered());
592 int id = environment->deoptimization_index(); 634 int id = environment->deoptimization_index();
593 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER); 635 ASSERT(info()->IsOptimizing() || info()->IsStub());
636 Deoptimizer::BailoutType bailout_type = frame_is_built_
637 ? Deoptimizer::EAGER
638 : Deoptimizer::LAZY;
639 Address entry = Deoptimizer::GetDeoptimizationEntry(id, bailout_type);
594 if (entry == NULL) { 640 if (entry == NULL) {
595 Abort("bailout was not prepared"); 641 Abort("bailout was not prepared");
596 return; 642 return;
597 } 643 }
598 644
645 ASSERT(info()->IsStub() || frame_is_built_);
599 if (cc == no_condition) { 646 if (cc == no_condition) {
600 __ Jump(entry, RelocInfo::RUNTIME_ENTRY); 647 if (frame_is_built_) {
648 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
649 } else {
650 __ Call(entry, RelocInfo::RUNTIME_ENTRY);
651 }
601 } else { 652 } else {
602 // We often have several deopts to the same entry, reuse the last 653 // We often have several deopts to the same entry, reuse the last
603 // jump entry if this is the case. 654 // jump entry if this is the case.
655 bool is_call = !frame_is_built_;
604 if (jump_table_.is_empty() || 656 if (jump_table_.is_empty() ||
605 jump_table_.last().address != entry) { 657 jump_table_.last().address != entry ||
606 jump_table_.Add(JumpTableEntry(entry), zone()); 658 jump_table_.last().is_call != is_call) {
659 jump_table_.Add(JumpTableEntry(entry, is_call), zone());
607 } 660 }
608 __ j(cc, &jump_table_.last().label); 661 __ j(cc, &jump_table_.last().label);
609 } 662 }
610 } 663 }
611 664
612 665
613 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { 666 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
614 int length = deoptimizations_.length(); 667 int length = deoptimizations_.length();
615 if (length == 0) return; 668 if (length == 0) return;
616 Handle<DeoptimizationInputData> data = 669 Handle<DeoptimizationInputData> data =
(...skipping 1644 matching lines...) Expand 10 before | Expand all | Expand 10 after
2261 __ j(condition, &true_value, Label::kNear); 2314 __ j(condition, &true_value, Label::kNear);
2262 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); 2315 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
2263 __ jmp(&done, Label::kNear); 2316 __ jmp(&done, Label::kNear);
2264 __ bind(&true_value); 2317 __ bind(&true_value);
2265 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex); 2318 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex);
2266 __ bind(&done); 2319 __ bind(&done);
2267 } 2320 }
2268 2321
2269 2322
2270 void LCodeGen::DoReturn(LReturn* instr) { 2323 void LCodeGen::DoReturn(LReturn* instr) {
2271 if (FLAG_trace) { 2324 if (FLAG_trace && info()->IsOptimizing()) {
2272 // Preserve the return value on the stack and rely on the runtime 2325 // Preserve the return value on the stack and rely on the runtime
2273 // call to return the value in the same register. 2326 // call to return the value in the same register.
2274 __ push(rax); 2327 __ push(rax);
2275 __ CallRuntime(Runtime::kTraceExit, 1); 2328 __ CallRuntime(Runtime::kTraceExit, 1);
2276 } 2329 }
2277 __ movq(rsp, rbp); 2330 if (NeedsEagerFrame()) {
2278 __ pop(rbp); 2331 __ movq(rsp, rbp);
2279 __ Ret((GetParameterCount() + 1) * kPointerSize, rcx); 2332 __ pop(rbp);
2333 }
2334 if (info()->IsStub()) {
2335 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2336 __ Ret(0, r10);
2337 } else {
2338 __ Ret((GetParameterCount() + 1) * kPointerSize, rcx);
2339 }
2280 } 2340 }
2281 2341
2282 2342
2283 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) { 2343 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
2284 Register result = ToRegister(instr->result()); 2344 Register result = ToRegister(instr->result());
2285 __ LoadGlobalCell(result, instr->hydrogen()->cell()); 2345 __ LoadGlobalCell(result, instr->hydrogen()->cell());
2286 if (instr->hydrogen()->RequiresHoleCheck()) { 2346 if (instr->hydrogen()->RequiresHoleCheck()) {
2287 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); 2347 __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
2288 DeoptimizeIf(equal, instr->environment()); 2348 DeoptimizeIf(equal, instr->environment());
2289 } 2349 }
(...skipping 2200 matching lines...) Expand 10 before | Expand all | Expand 10 after
4490 } else { 4550 } else {
4491 __ Cmp(reg, target); 4551 __ Cmp(reg, target);
4492 } 4552 }
4493 DeoptimizeIf(not_equal, instr->environment()); 4553 DeoptimizeIf(not_equal, instr->environment());
4494 } 4554 }
4495 4555
4496 4556
4497 void LCodeGen::DoCheckMapCommon(Register reg, 4557 void LCodeGen::DoCheckMapCommon(Register reg,
4498 Handle<Map> map, 4558 Handle<Map> map,
4499 CompareMapMode mode, 4559 CompareMapMode mode,
4500 LEnvironment* env) { 4560 LInstruction* instr) {
4501 Label success; 4561 Label success;
4502 __ CompareMap(reg, map, &success, mode); 4562 __ CompareMap(reg, map, &success, mode);
4503 DeoptimizeIf(not_equal, env); 4563 DeoptimizeIf(not_equal, instr->environment());
4504 __ bind(&success); 4564 __ bind(&success);
4505 } 4565 }
4506 4566
4507 4567
4508 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { 4568 void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
4509 LOperand* input = instr->value(); 4569 LOperand* input = instr->value();
4510 ASSERT(input->IsRegister()); 4570 ASSERT(input->IsRegister());
4511 Register reg = ToRegister(input); 4571 Register reg = ToRegister(input);
4512 4572
4513 Label success; 4573 Label success;
4514 SmallMapList* map_set = instr->hydrogen()->map_set(); 4574 SmallMapList* map_set = instr->hydrogen()->map_set();
4515 for (int i = 0; i < map_set->length() - 1; i++) { 4575 for (int i = 0; i < map_set->length() - 1; i++) {
4516 Handle<Map> map = map_set->at(i); 4576 Handle<Map> map = map_set->at(i);
4517 __ CompareMap(reg, map, &success, REQUIRE_EXACT_MAP); 4577 __ CompareMap(reg, map, &success, REQUIRE_EXACT_MAP);
4518 __ j(equal, &success); 4578 __ j(equal, &success);
4519 } 4579 }
4520 Handle<Map> map = map_set->last(); 4580 Handle<Map> map = map_set->last();
4521 DoCheckMapCommon(reg, map, REQUIRE_EXACT_MAP, instr->environment()); 4581 DoCheckMapCommon(reg, map, REQUIRE_EXACT_MAP, instr);
4522 __ bind(&success); 4582 __ bind(&success);
4523 } 4583 }
4524 4584
4525 4585
4526 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { 4586 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
4527 XMMRegister value_reg = ToDoubleRegister(instr->unclamped()); 4587 XMMRegister value_reg = ToDoubleRegister(instr->unclamped());
4528 Register result_reg = ToRegister(instr->result()); 4588 Register result_reg = ToRegister(instr->result());
4529 __ ClampDoubleToUint8(value_reg, xmm0, result_reg); 4589 __ ClampDoubleToUint8(value_reg, xmm0, result_reg);
4530 } 4590 }
4531 4591
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
4578 4638
4579 Handle<JSObject> holder = instr->holder(); 4639 Handle<JSObject> holder = instr->holder();
4580 Handle<JSObject> current_prototype = instr->prototype(); 4640 Handle<JSObject> current_prototype = instr->prototype();
4581 4641
4582 // Load prototype object. 4642 // Load prototype object.
4583 __ LoadHeapObject(reg, current_prototype); 4643 __ LoadHeapObject(reg, current_prototype);
4584 4644
4585 // Check prototype maps up to the holder. 4645 // Check prototype maps up to the holder.
4586 while (!current_prototype.is_identical_to(holder)) { 4646 while (!current_prototype.is_identical_to(holder)) {
4587 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()), 4647 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
4588 ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment()); 4648 ALLOW_ELEMENT_TRANSITION_MAPS, instr);
4589 current_prototype = 4649 current_prototype =
4590 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype())); 4650 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
4591 // Load next prototype object. 4651 // Load next prototype object.
4592 __ LoadHeapObject(reg, current_prototype); 4652 __ LoadHeapObject(reg, current_prototype);
4593 } 4653 }
4594 4654
4595 // Check the holder map. 4655 // Check the holder map.
4596 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()), 4656 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
4597 ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment()); 4657 ALLOW_ELEMENT_TRANSITION_MAPS, instr);
4598 } 4658 }
4599 4659
4600 4660
4601 void LCodeGen::DoAllocateObject(LAllocateObject* instr) { 4661 void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
4602 class DeferredAllocateObject: public LDeferredCode { 4662 class DeferredAllocateObject: public LDeferredCode {
4603 public: 4663 public:
4604 DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr) 4664 DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr)
4605 : LDeferredCode(codegen), instr_(instr) { } 4665 : LDeferredCode(codegen), instr_(instr) { }
4606 virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); } 4666 virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); }
4607 virtual LInstruction* instr() { return instr_; } 4667 virtual LInstruction* instr() { return instr_; }
(...skipping 515 matching lines...) Expand 10 before | Expand all | Expand 10 after
5123 __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset)); 5183 __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset));
5124 5184
5125 // Check the marker in the calling frame. 5185 // Check the marker in the calling frame.
5126 __ bind(&check_frame_marker); 5186 __ bind(&check_frame_marker);
5127 __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), 5187 __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset),
5128 Smi::FromInt(StackFrame::CONSTRUCT)); 5188 Smi::FromInt(StackFrame::CONSTRUCT));
5129 } 5189 }
5130 5190
5131 5191
5132 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) { 5192 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) {
5193 if (info()->IsStub()) return;
5133 // Ensure that we have enough space after the previous lazy-bailout 5194 // Ensure that we have enough space after the previous lazy-bailout
5134 // instruction for patching the code here. 5195 // instruction for patching the code here.
5135 int current_pc = masm()->pc_offset(); 5196 int current_pc = masm()->pc_offset();
5136 if (current_pc < last_lazy_deopt_pc_ + space_needed) { 5197 if (current_pc < last_lazy_deopt_pc_ + space_needed) {
5137 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; 5198 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
5138 __ Nop(padding_size); 5199 __ Nop(padding_size);
5139 } 5200 }
5140 } 5201 }
5141 5202
5142 5203
(...skipping 205 matching lines...) Expand 10 before | Expand all | Expand 10 after
5348 FixedArray::kHeaderSize - kPointerSize)); 5409 FixedArray::kHeaderSize - kPointerSize));
5349 __ bind(&done); 5410 __ bind(&done);
5350 } 5411 }
5351 5412
5352 5413
5353 #undef __ 5414 #undef __
5354 5415
5355 } } // namespace v8::internal 5416 } } // namespace v8::internal
5356 5417
5357 #endif // V8_TARGET_ARCH_X64 5418 #endif // V8_TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698