Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(659)

Side by Side Diff: src/x64/codegen-x64.cc

Issue 126198: X64 Implementation: Make codegen load literals and assign to local variables. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 11 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/assembler-x64.cc ('k') | src/x64/macro-assembler-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2009 the V8 project authors. All rights reserved. 1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 11 matching lines...) Expand all
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28 28
29 #include "v8.h" 29 #include "v8.h"
30 #include "macro-assembler.h" 30 #include "macro-assembler.h"
31 #include "register-allocator-inl.h" 31 #include "register-allocator-inl.h"
32 #include "codegen.h" 32 #include "codegen-inl.h"
33 #include "codegen-x64-inl.h"
34
33 // TEST 35 // TEST
34 #include "compiler.h" 36 #include "compiler.h"
35 37
36 namespace v8 { 38 namespace v8 {
37 namespace internal { 39 namespace internal {
38 40
39 // ------------------------------------------------------------------------- 41 // -------------------------------------------------------------------------
40 // Platform-specific DeferredCode functions. 42 // Platform-specific DeferredCode functions.
41 43
42 void DeferredCode::SaveRegisters() { UNIMPLEMENTED(); } 44 void DeferredCode::SaveRegisters() { UNIMPLEMENTED(); }
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
94 #define __ ACCESS_MASM(masm_) 96 #define __ ACCESS_MASM(masm_)
95 97
96 98
97 void CodeGenerator::DeclareGlobals(Handle<FixedArray> a) { 99 void CodeGenerator::DeclareGlobals(Handle<FixedArray> a) {
98 UNIMPLEMENTED(); 100 UNIMPLEMENTED();
99 } 101 }
100 102
101 void CodeGenerator::TestCodeGenerator() { 103 void CodeGenerator::TestCodeGenerator() {
102 // Compile a function from a string, and run it. 104 // Compile a function from a string, and run it.
103 Handle<JSFunction> test_function = Compiler::Compile( 105 Handle<JSFunction> test_function = Compiler::Compile(
104 Factory::NewStringFromAscii(CStrVector("42")), 106 Factory::NewStringFromAscii(CStrVector("39; 42;")),
105 Factory::NewStringFromAscii(CStrVector("CodeGeneratorTestScript")), 107 Factory::NewStringFromAscii(CStrVector("CodeGeneratorTestScript")),
106 0, 108 0,
107 0, 109 0,
108 NULL, 110 NULL,
109 NULL); 111 NULL);
110 112
111 Code* code_object = test_function->code(); // Local for debugging ease. 113 Code* code_object = test_function->code(); // Local for debugging ease.
112 USE(code_object); 114 USE(code_object);
113 115
114 // Create a dummy function and context. 116 // Create a dummy function and context.
115 Handle<JSFunction> bridge = 117 Handle<JSFunction> bridge =
116 Factory::NewFunction(Factory::empty_symbol(), Factory::undefined_value()); 118 Factory::NewFunction(Factory::empty_symbol(), Factory::undefined_value());
117 Handle<Context> context = 119 Handle<Context> context =
118 Factory::NewFunctionContext(Context::MIN_CONTEXT_SLOTS, bridge); 120 Factory::NewFunctionContext(Context::MIN_CONTEXT_SLOTS, bridge);
119 121
120 test_function = Factory::NewFunctionFromBoilerplate( 122 test_function = Factory::NewFunctionFromBoilerplate(
121 test_function, 123 test_function,
122 context); 124 context);
123 125
124 bool pending_exceptions; 126 bool pending_exceptions;
125 Handle<Object> result = 127 Handle<Object> result =
126 Execution::Call(test_function, 128 Execution::Call(test_function,
127 Handle<Object>::cast(test_function), 129 Handle<Object>::cast(test_function),
128 0, 130 0,
129 NULL, 131 NULL,
130 &pending_exceptions); 132 &pending_exceptions);
133 // Function compiles and runs, but returns a JSFunction object.
131 CHECK(result->IsSmi()); 134 CHECK(result->IsSmi());
132 CHECK_EQ(42, Smi::cast(*result)->value()); 135 CHECK_EQ(42, Smi::cast(*result)->value());
133 } 136 }
134 137
135 138
136 void CodeGenerator::GenCode(FunctionLiteral* function) { 139 void CodeGenerator::GenCode(FunctionLiteral* function) {
137 // Record the position for debugging purposes. 140 // Record the position for debugging purposes.
138 CodeForFunctionPosition(function); 141 CodeForFunctionPosition(function);
139 // ZoneList<Statement*>* body = fun->body(); 142 ZoneList<Statement*>* body = function->body();
140 143
141 // Initialize state. 144 // Initialize state.
142 ASSERT(scope_ == NULL); 145 ASSERT(scope_ == NULL);
143 scope_ = function->scope(); 146 scope_ = function->scope();
144 ASSERT(allocator_ == NULL); 147 ASSERT(allocator_ == NULL);
145 RegisterAllocator register_allocator(this); 148 RegisterAllocator register_allocator(this);
146 allocator_ = &register_allocator; 149 allocator_ = &register_allocator;
147 ASSERT(frame_ == NULL); 150 ASSERT(frame_ == NULL);
148 frame_ = new VirtualFrame(); 151 frame_ = new VirtualFrame();
149 set_in_spilled_code(false); 152 set_in_spilled_code(false);
(...skipping 19 matching lines...) Expand all
169 172
170 // Entry: 173 // Entry:
171 // Stack: receiver, arguments, return address. 174 // Stack: receiver, arguments, return address.
172 // ebp: caller's frame pointer 175 // ebp: caller's frame pointer
173 // esp: stack pointer 176 // esp: stack pointer
174 // edi: called JS function 177 // edi: called JS function
175 // esi: callee's context 178 // esi: callee's context
176 allocator_->Initialize(); 179 allocator_->Initialize();
177 frame_->Enter(); 180 frame_->Enter();
178 181
179 Result return_register = allocator_->Allocate(rax); 182 // Allocate space for locals and initialize them.
183 frame_->AllocateStackSlots();
184 // Initialize the function return target after the locals are set
185 // up, because it needs the expected frame height from the frame.
186 function_return_.set_direction(JumpTarget::BIDIRECTIONAL);
187 function_return_is_shadowed_ = false;
180 188
181 __ movq(return_register.reg(), Immediate(0x54)); // Smi 42 189 VisitStatements(body);
190 }
191 // Adjust for function-level loop nesting.
192 loop_nesting_ -= function->loop_nesting();
182 193
183 GenerateReturnSequence(&return_register); 194 // Code generation state must be reset.
195 ASSERT(state_ == NULL);
196 ASSERT(loop_nesting() == 0);
197 ASSERT(!function_return_is_shadowed_);
198 function_return_.Unuse();
199 DeleteFrame();
200
201 // Process any deferred code using the register allocator.
202 if (!HasStackOverflow()) {
203 HistogramTimerScope deferred_timer(&Counters::deferred_code_generation);
204 JumpTarget::set_compiling_deferred_code(true);
205 ProcessDeferred();
206 JumpTarget::set_compiling_deferred_code(false);
184 } 207 }
208
209 // There is no need to delete the register allocator, it is a
210 // stack-allocated local.
211 allocator_ = NULL;
212 scope_ = NULL;
185 } 213 }
186 214
187 void CodeGenerator::GenerateReturnSequence(Result* return_value) { 215 void CodeGenerator::GenerateReturnSequence(Result* return_value) {
188 // The return value is a live (but not currently reference counted) 216 // The return value is a live (but not currently reference counted)
189 // reference to rax. This is safe because the current frame does not 217 // reference to rax. This is safe because the current frame does not
190 // contain a reference to rax (it is prepared for the return by spilling 218 // contain a reference to rax (it is prepared for the return by spilling
191 // all registers). 219 // all registers).
192 if (FLAG_trace) { 220 if (FLAG_trace) {
193 frame_->Push(return_value); 221 frame_->Push(return_value);
194 // *return_value = frame_->CallRuntime(Runtime::kTraceExit, 1); 222 // *return_value = frame_->CallRuntime(Runtime::kTraceExit, 1);
(...skipping 19 matching lines...) Expand all
214 242
215 void CodeGenerator::GenerateFastCaseSwitchJumpTable(SwitchStatement* a, 243 void CodeGenerator::GenerateFastCaseSwitchJumpTable(SwitchStatement* a,
216 int b, 244 int b,
217 int c, 245 int c,
218 Label* d, 246 Label* d,
219 Vector<Label*> e, 247 Vector<Label*> e,
220 Vector<Label> f) { 248 Vector<Label> f) {
221 UNIMPLEMENTED(); 249 UNIMPLEMENTED();
222 } 250 }
223 251
224 void CodeGenerator::VisitStatements(ZoneList<Statement*>* a) { 252 #ifdef DEBUG
225 UNIMPLEMENTED(); 253 bool CodeGenerator::HasValidEntryRegisters() {
254 return (allocator()->count(rax) == (frame()->is_used(rax) ? 1 : 0))
255 && (allocator()->count(rbx) == (frame()->is_used(rbx) ? 1 : 0))
256 && (allocator()->count(rcx) == (frame()->is_used(rcx) ? 1 : 0))
257 && (allocator()->count(rdx) == (frame()->is_used(rdx) ? 1 : 0))
258 && (allocator()->count(rdi) == (frame()->is_used(rdi) ? 1 : 0))
259 && (allocator()->count(r8) == (frame()->is_used(r8) ? 1 : 0))
260 && (allocator()->count(r9) == (frame()->is_used(r9) ? 1 : 0))
261 && (allocator()->count(r11) == (frame()->is_used(r11) ? 1 : 0))
262 && (allocator()->count(r14) == (frame()->is_used(r14) ? 1 : 0))
263 && (allocator()->count(r15) == (frame()->is_used(r15) ? 1 : 0))
264 && (allocator()->count(r13) == (frame()->is_used(r13) ? 1 : 0))
265 && (allocator()->count(r12) == (frame()->is_used(r12) ? 1 : 0));
226 } 266 }
267 #endif
268
269
270 void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) {
271 ASSERT(!in_spilled_code());
272 for (int i = 0; has_valid_frame() && i < statements->length(); i++) {
273 Visit(statements->at(i));
274 }
275 }
276
227 277
228 void CodeGenerator::VisitBlock(Block* a) { 278 void CodeGenerator::VisitBlock(Block* a) {
229 UNIMPLEMENTED(); 279 UNIMPLEMENTED();
230 } 280 }
231 281
232 void CodeGenerator::VisitDeclaration(Declaration* a) { 282 void CodeGenerator::VisitDeclaration(Declaration* a) {
233 UNIMPLEMENTED(); 283 UNIMPLEMENTED();
234 } 284 }
235 285
236 void CodeGenerator::VisitExpressionStatement(ExpressionStatement* a) { 286
237 UNIMPLEMENTED(); 287 void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) {
288 ASSERT(!in_spilled_code());
289 Comment cmnt(masm_, "[ ExpressionStatement");
290 CodeForStatementPosition(node);
291 Expression* expression = node->expression();
292 expression->MarkAsStatement();
293 Load(expression);
294 // Remove the lingering expression result from the top of stack.
295 frame_->Drop();
238 } 296 }
239 297
298
240 void CodeGenerator::VisitEmptyStatement(EmptyStatement* a) { 299 void CodeGenerator::VisitEmptyStatement(EmptyStatement* a) {
241 UNIMPLEMENTED(); 300 UNIMPLEMENTED();
242 } 301 }
243 302
244 void CodeGenerator::VisitIfStatement(IfStatement* a) { 303 void CodeGenerator::VisitIfStatement(IfStatement* a) {
245 UNIMPLEMENTED(); 304 UNIMPLEMENTED();
246 } 305 }
247 306
248 void CodeGenerator::VisitContinueStatement(ContinueStatement* a) { 307 void CodeGenerator::VisitContinueStatement(ContinueStatement* a) {
249 UNIMPLEMENTED(); 308 UNIMPLEMENTED();
250 } 309 }
251 310
252 void CodeGenerator::VisitBreakStatement(BreakStatement* a) { 311 void CodeGenerator::VisitBreakStatement(BreakStatement* a) {
253 UNIMPLEMENTED(); 312 UNIMPLEMENTED();
254 } 313 }
255 314
256 void CodeGenerator::VisitReturnStatement(ReturnStatement* a) { 315
257 UNIMPLEMENTED(); 316 void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
317 ASSERT(!in_spilled_code());
318 Comment cmnt(masm_, "[ ReturnStatement");
319
320 CodeForStatementPosition(node);
321 Load(node->expression());
322 Result return_value = frame_->Pop();
323 /* if (function_return_is_shadowed_) {
324 function_return_.Jump(&return_value);
325 } else {
326 frame_->PrepareForReturn();
327 if (function_return_.is_bound()) {
328 // If the function return label is already bound we reuse the
329 // code by jumping to the return site.
330 function_return_.Jump(&return_value);
331 } else {
332 function_return_.Bind(&return_value);
333 GenerateReturnSequence(&return_value);
334 }
335 }
336 */
337 GenerateReturnSequence(&return_value);
258 } 338 }
259 339
340
260 void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* a) { 341 void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* a) {
261 UNIMPLEMENTED(); 342 UNIMPLEMENTED();
262 } 343 }
263 344
264 void CodeGenerator::VisitWithExitStatement(WithExitStatement* a) { 345 void CodeGenerator::VisitWithExitStatement(WithExitStatement* a) {
265 UNIMPLEMENTED(); 346 UNIMPLEMENTED();
266 } 347 }
267 348
268 void CodeGenerator::VisitSwitchStatement(SwitchStatement* a) { 349 void CodeGenerator::VisitSwitchStatement(SwitchStatement* a) {
269 UNIMPLEMENTED(); 350 UNIMPLEMENTED();
(...skipping 25 matching lines...) Expand all
295 376
296 void CodeGenerator::VisitFunctionBoilerplateLiteral( 377 void CodeGenerator::VisitFunctionBoilerplateLiteral(
297 FunctionBoilerplateLiteral* a) { 378 FunctionBoilerplateLiteral* a) {
298 UNIMPLEMENTED(); 379 UNIMPLEMENTED();
299 } 380 }
300 381
301 void CodeGenerator::VisitConditional(Conditional* a) { 382 void CodeGenerator::VisitConditional(Conditional* a) {
302 UNIMPLEMENTED(); 383 UNIMPLEMENTED();
303 } 384 }
304 385
305 void CodeGenerator::VisitSlot(Slot* a) { 386 void CodeGenerator::VisitSlot(Slot* node) {
306 UNIMPLEMENTED(); 387 Comment cmnt(masm_, "[ Slot");
388 LoadFromSlot(node, typeof_state());
307 } 389 }
308 390
309 void CodeGenerator::VisitVariableProxy(VariableProxy* a) { 391
310 UNIMPLEMENTED(); 392 void CodeGenerator::VisitVariableProxy(VariableProxy* node) {
393 Comment cmnt(masm_, "[ VariableProxy");
394 Variable* var = node->var();
395 Expression* expr = var->rewrite();
396 if (expr != NULL) {
397 Visit(expr);
398 } else {
399 ASSERT(var->is_global());
400 Reference ref(this, node);
401 // ref.GetValue(typeof_state());
402 }
311 } 403 }
312 404
313 void CodeGenerator::VisitLiteral(Literal* a) { 405
314 UNIMPLEMENTED(); 406 void CodeGenerator::VisitLiteral(Literal* node) {
407 Comment cmnt(masm_, "[ Literal");
408 frame_->Push(node->handle());
315 } 409 }
316 410
411
317 void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* a) { 412 void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* a) {
318 UNIMPLEMENTED(); 413 UNIMPLEMENTED();
319 } 414 }
320 415
321 void CodeGenerator::VisitObjectLiteral(ObjectLiteral* a) { 416 void CodeGenerator::VisitObjectLiteral(ObjectLiteral* a) {
322 UNIMPLEMENTED(); 417 UNIMPLEMENTED();
323 } 418 }
324 419
325 void CodeGenerator::VisitArrayLiteral(ArrayLiteral* a) { 420 void CodeGenerator::VisitArrayLiteral(ArrayLiteral* a) {
326 UNIMPLEMENTED(); 421 UNIMPLEMENTED();
327 } 422 }
328 423
329 void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* a) { 424 void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* a) {
330 UNIMPLEMENTED(); 425 UNIMPLEMENTED();
331 } 426 }
332 427
333 void CodeGenerator::VisitAssignment(Assignment* a) { 428
334 UNIMPLEMENTED(); 429 void CodeGenerator::VisitAssignment(Assignment* node) {
430 Comment cmnt(masm_, "[ Assignment");
431 CodeForStatementPosition(node);
432
433 { Reference target(this, node->target());
434 if (target.is_illegal()) {
435 // Fool the virtual frame into thinking that we left the assignment's
436 // value on the frame.
437 frame_->Push(Smi::FromInt(0));
438 return;
439 }
440 Variable* var = node->target()->AsVariableProxy()->AsVariable();
441
442 if (node->starts_initialization_block()) {
443 ASSERT(target.type() == Reference::NAMED ||
444 target.type() == Reference::KEYED);
445 // Change to slow case in the beginning of an initialization
446 // block to avoid the quadratic behavior of repeatedly adding
447 // fast properties.
448
449 // The receiver is the argument to the runtime call. It is the
450 // first value pushed when the reference was loaded to the
451 // frame.
452 frame_->PushElementAt(target.size() - 1);
453 // Result ignored = frame_->CallRuntime(Runtime::kToSlowProperties, 1);
454 }
455 if (node->op() == Token::ASSIGN ||
456 node->op() == Token::INIT_VAR ||
457 node->op() == Token::INIT_CONST) {
458 Load(node->value());
459
460 } else {
461 // TODO(X64): Make compound assignments work.
462 /*
463 Literal* literal = node->value()->AsLiteral();
464 bool overwrite_value =
465 (node->value()->AsBinaryOperation() != NULL &&
466 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
467 Variable* right_var = node->value()->AsVariableProxy()->AsVariable();
468 // There are two cases where the target is not read in the right hand
469 // side, that are easy to test for: the right hand side is a literal,
470 // or the right hand side is a different variable. TakeValue invalidates
471 // the target, with an implicit promise that it will be written to again
472 // before it is read.
473 if (literal != NULL || (right_var != NULL && right_var != var)) {
474 target.TakeValue(NOT_INSIDE_TYPEOF);
475 } else {
476 target.GetValue(NOT_INSIDE_TYPEOF);
477 }
478 */
479 Load(node->value());
480 /*
481 GenericBinaryOperation(node->binary_op(),
482 node->type(),
483 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
484 */
485 }
486
487 if (var != NULL &&
488 var->mode() == Variable::CONST &&
489 node->op() != Token::INIT_VAR && node->op() != Token::INIT_CONST) {
490 // Assignment ignored - leave the value on the stack.
491 } else {
492 CodeForSourcePosition(node->position());
493 if (node->op() == Token::INIT_CONST) {
494 // Dynamic constant initializations must use the function context
495 // and initialize the actual constant declared. Dynamic variable
496 // initializations are simply assignments and use SetValue.
497 target.SetValue(CONST_INIT);
498 } else {
499 target.SetValue(NOT_CONST_INIT);
500 }
501 if (node->ends_initialization_block()) {
502 ASSERT(target.type() == Reference::NAMED ||
503 target.type() == Reference::KEYED);
504 // End of initialization block. Revert to fast case. The
505 // argument to the runtime call is the receiver, which is the
506 // first value pushed as part of the reference, which is below
507 // the lhs value.
508 frame_->PushElementAt(target.size());
509 // Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1);
510 }
511 }
512 }
335 } 513 }
336 514
515
337 void CodeGenerator::VisitThrow(Throw* a) { 516 void CodeGenerator::VisitThrow(Throw* a) {
338 UNIMPLEMENTED(); 517 // UNIMPLEMENTED();
339 } 518 }
340 519
341 void CodeGenerator::VisitProperty(Property* a) { 520 void CodeGenerator::VisitProperty(Property* a) {
342 UNIMPLEMENTED(); 521 UNIMPLEMENTED();
343 } 522 }
344 523
345 void CodeGenerator::VisitCall(Call* a) { 524 void CodeGenerator::VisitCall(Call* a) {
346 UNIMPLEMENTED(); 525 UNIMPLEMENTED();
347 } 526 }
348 527
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
419 } 598 }
420 599
421 void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* a) { 600 void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* a) {
422 UNIMPLEMENTED(); 601 UNIMPLEMENTED();
423 } 602 }
424 603
425 void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* a) { 604 void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* a) {
426 UNIMPLEMENTED(); 605 UNIMPLEMENTED();
427 } 606 }
428 607
608 // -----------------------------------------------------------------------------
609 // CodeGenerator implementation of Expressions
610
611 void CodeGenerator::Load(Expression* x, TypeofState typeof_state) {
612 #ifdef DEBUG
613 int original_height = frame_->height();
614 #endif
615 ASSERT(!in_spilled_code());
616 JumpTarget true_target;
617 JumpTarget false_target;
618 ControlDestination dest(&true_target, &false_target, true);
619 LoadCondition(x, typeof_state, &dest, false);
620
621 if (dest.false_was_fall_through()) {
622 // The false target was just bound.
623 JumpTarget loaded;
624 frame_->Push(Factory::false_value());
625 // There may be dangling jumps to the true target.
626 if (true_target.is_linked()) {
627 loaded.Jump();
628 true_target.Bind();
629 frame_->Push(Factory::true_value());
630 loaded.Bind();
631 }
632
633 } else if (dest.is_used()) {
634 // There is true, and possibly false, control flow (with true as
635 // the fall through).
636 JumpTarget loaded;
637 frame_->Push(Factory::true_value());
638 if (false_target.is_linked()) {
639 loaded.Jump();
640 false_target.Bind();
641 frame_->Push(Factory::false_value());
642 loaded.Bind();
643 }
644
645 } else {
646 // We have a valid value on top of the frame, but we still may
647 // have dangling jumps to the true and false targets from nested
648 // subexpressions (eg, the left subexpressions of the
649 // short-circuited boolean operators).
650 ASSERT(has_valid_frame());
651 if (true_target.is_linked() || false_target.is_linked()) {
652 JumpTarget loaded;
653 loaded.Jump(); // Don't lose the current TOS.
654 if (true_target.is_linked()) {
655 true_target.Bind();
656 frame_->Push(Factory::true_value());
657 if (false_target.is_linked()) {
658 loaded.Jump();
659 }
660 }
661 if (false_target.is_linked()) {
662 false_target.Bind();
663 frame_->Push(Factory::false_value());
664 }
665 loaded.Bind();
666 }
667 }
668
669 ASSERT(has_valid_frame());
670 ASSERT(frame_->height() == original_height + 1);
671 }
672
673
674 // Emit code to load the value of an expression to the top of the
675 // frame. If the expression is boolean-valued it may be compiled (or
676 // partially compiled) into control flow to the control destination.
677 // If force_control is true, control flow is forced.
678 void CodeGenerator::LoadCondition(Expression* x,
679 TypeofState typeof_state,
680 ControlDestination* dest,
681 bool force_control) {
682 ASSERT(!in_spilled_code());
683 int original_height = frame_->height();
684
685 { CodeGenState new_state(this, typeof_state, dest);
686 Visit(x);
687
688 // If we hit a stack overflow, we may not have actually visited
689 // the expression. In that case, we ensure that we have a
690 // valid-looking frame state because we will continue to generate
691 // code as we unwind the C++ stack.
692 //
693 // It's possible to have both a stack overflow and a valid frame
694 // state (eg, a subexpression overflowed, visiting it returned
695 // with a dummied frame state, and visiting this expression
696 // returned with a normal-looking state).
697 if (HasStackOverflow() &&
698 !dest->is_used() &&
699 frame_->height() == original_height) {
700 dest->Goto(true);
701 }
702 }
703
704 if (force_control && !dest->is_used()) {
705 // Convert the TOS value into flow to the control destination.
706 // TODO(X64): Make control flow to control destinations work.
707 // ToBoolean(dest);
708 }
709
710 ASSERT(!(force_control && !dest->is_used()));
711 ASSERT(dest->is_used() || frame_->height() == original_height + 1);
712 }
713
714
715 void CodeGenerator::LoadUnsafeSmi(Register target, Handle<Object> value) {
716 UNIMPLEMENTED();
717 // TODO(X64): Implement security policy for loads of smis.
718 }
719
720
721 bool CodeGenerator::IsUnsafeSmi(Handle<Object> value) {
722 return false;
723 }
724
725 //------------------------------------------------------------------------------
726 // CodeGenerator implementation of variables, lookups, and stores.
727
728 Reference::Reference(CodeGenerator* cgen, Expression* expression)
729 : cgen_(cgen), expression_(expression), type_(ILLEGAL) {
730 cgen->LoadReference(this);
731 }
732
733
734 Reference::~Reference() {
735 cgen_->UnloadReference(this);
736 }
737
738
739 void CodeGenerator::LoadReference(Reference* ref) {
740 // References are loaded from both spilled and unspilled code. Set the
741 // state to unspilled to allow that (and explicitly spill after
742 // construction at the construction sites).
743 bool was_in_spilled_code = in_spilled_code_;
744 in_spilled_code_ = false;
745
746 Comment cmnt(masm_, "[ LoadReference");
747 Expression* e = ref->expression();
748 Property* property = e->AsProperty();
749 Variable* var = e->AsVariableProxy()->AsVariable();
750
751 if (property != NULL) {
752 // The expression is either a property or a variable proxy that rewrites
753 // to a property.
754 Load(property->obj());
755 // We use a named reference if the key is a literal symbol, unless it is
756 // a string that can be legally parsed as an integer. This is because
757 // otherwise we will not get into the slow case code that handles [] on
758 // String objects.
759 Literal* literal = property->key()->AsLiteral();
760 uint32_t dummy;
761 if (literal != NULL &&
762 literal->handle()->IsSymbol() &&
763 !String::cast(*(literal->handle()))->AsArrayIndex(&dummy)) {
764 ref->set_type(Reference::NAMED);
765 } else {
766 Load(property->key());
767 ref->set_type(Reference::KEYED);
768 }
769 } else if (var != NULL) {
770 // The expression is a variable proxy that does not rewrite to a
771 // property. Global variables are treated as named property references.
772 if (var->is_global()) {
773 LoadGlobal();
774 ref->set_type(Reference::NAMED);
775 } else {
776 ASSERT(var->slot() != NULL);
777 ref->set_type(Reference::SLOT);
778 }
779 } else {
780 // Anything else is a runtime error.
781 Load(e);
782 // frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
783 }
784
785 in_spilled_code_ = was_in_spilled_code;
786 }
787
788
789 void CodeGenerator::UnloadReference(Reference* ref) {
790 // Pop a reference from the stack while preserving TOS.
791 Comment cmnt(masm_, "[ UnloadReference");
792 frame_->Nip(ref->size());
793 }
794
795
796 void Reference::SetValue(InitState init_state) {
797 ASSERT(cgen_->HasValidEntryRegisters());
798 ASSERT(!is_illegal());
799 MacroAssembler* masm = cgen_->masm();
800 switch (type_) {
801 case SLOT: {
802 Comment cmnt(masm, "[ Store to Slot");
803 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
804 ASSERT(slot != NULL);
805 cgen_->StoreToSlot(slot, init_state);
806 break;
807 }
808 // TODO(X64): Make cases other than SLOT work.
809 /*
810 case NAMED: {
811 Comment cmnt(masm, "[ Store to named Property");
812 cgen_->frame()->Push(GetName());
813 Result answer = cgen_->frame()->CallStoreIC();
814 cgen_->frame()->Push(&answer);
815 break;
816 }
817
818 case KEYED: {
819 Comment cmnt(masm, "[ Store to keyed Property");
820
821 // Generate inlined version of the keyed store if the code is in
822 // a loop and the key is likely to be a smi.
823 Property* property = expression()->AsProperty();
824 ASSERT(property != NULL);
825 SmiAnalysis* key_smi_analysis = property->key()->type();
826
827 if (cgen_->loop_nesting() > 0 && key_smi_analysis->IsLikelySmi()) {
828 Comment cmnt(masm, "[ Inlined store to keyed Property");
829
830 // Get the receiver, key and value into registers.
831 Result value = cgen_->frame()->Pop();
832 Result key = cgen_->frame()->Pop();
833 Result receiver = cgen_->frame()->Pop();
834
835 Result tmp = cgen_->allocator_->Allocate();
836 ASSERT(tmp.is_valid());
837
838 // Determine whether the value is a constant before putting it
839 // in a register.
840 bool value_is_constant = value.is_constant();
841
842 // Make sure that value, key and receiver are in registers.
843 value.ToRegister();
844 key.ToRegister();
845 receiver.ToRegister();
846
847 DeferredReferenceSetKeyedValue* deferred =
848 new DeferredReferenceSetKeyedValue(value.reg(),
849 key.reg(),
850 receiver.reg());
851
852 // Check that the value is a smi if it is not a constant. We
853 // can skip the write barrier for smis and constants.
854 if (!value_is_constant) {
855 __ test(value.reg(), Immediate(kSmiTagMask));
856 deferred->Branch(not_zero);
857 }
858
859 // Check that the key is a non-negative smi.
860 __ test(key.reg(), Immediate(kSmiTagMask | 0x80000000));
861 deferred->Branch(not_zero);
862
863 // Check that the receiver is not a smi.
864 __ test(receiver.reg(), Immediate(kSmiTagMask));
865 deferred->Branch(zero);
866
867 // Check that the receiver is a JSArray.
868 __ mov(tmp.reg(),
869 FieldOperand(receiver.reg(), HeapObject::kMapOffset));
870 __ movzx_b(tmp.reg(),
871 FieldOperand(tmp.reg(), Map::kInstanceTypeOffset));
872 __ cmp(tmp.reg(), JS_ARRAY_TYPE);
873 deferred->Branch(not_equal);
874
875 // Check that the key is within bounds. Both the key and the
876 // length of the JSArray are smis.
877 __ cmp(key.reg(),
878 FieldOperand(receiver.reg(), JSArray::kLengthOffset));
879 deferred->Branch(greater_equal);
880
881 // Get the elements array from the receiver and check that it
882 // is not a dictionary.
883 __ mov(tmp.reg(),
884 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
885 // Bind the deferred code patch site to be able to locate the
886 // fixed array map comparison. When debugging, we patch this
887 // comparison to always fail so that we will hit the IC call
888 // in the deferred code which will allow the debugger to
889 // break for fast case stores.
890 __ bind(deferred->patch_site());
891 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
892 Immediate(Factory::fixed_array_map()));
893 deferred->Branch(not_equal);
894
895 // Store the value.
896 __ mov(Operand(tmp.reg(),
897 key.reg(),
898 times_2,
899 Array::kHeaderSize - kHeapObjectTag),
900 value.reg());
901 __ IncrementCounter(&Counters::keyed_store_inline, 1);
902
903 deferred->BindExit();
904
905 cgen_->frame()->Push(&receiver);
906 cgen_->frame()->Push(&key);
907 cgen_->frame()->Push(&value);
908 } else {
909 Result answer = cgen_->frame()->CallKeyedStoreIC();
910 // Make sure that we do not have a test instruction after the
911 // call. A test instruction after the call is used to
912 // indicate that we have generated an inline version of the
913 // keyed store.
914 __ nop();
915 cgen_->frame()->Push(&answer);
916 }
917 break;
918 }
919 */
920 default:
921 UNREACHABLE();
922 }
923 }
924
925
926 Operand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
927 // Currently, this assertion will fail if we try to assign to
928 // a constant variable that is constant because it is read-only
929 // (such as the variable referring to a named function expression).
930 // We need to implement assignments to read-only variables.
931 // Ideally, we should do this during AST generation (by converting
932 // such assignments into expression statements); however, in general
933 // we may not be able to make the decision until past AST generation,
934 // that is when the entire program is known.
935 ASSERT(slot != NULL);
936 int index = slot->index();
937 switch (slot->type()) {
938 case Slot::PARAMETER:
939 return frame_->ParameterAt(index);
940
941 case Slot::LOCAL:
942 return frame_->LocalAt(index);
943
944 case Slot::CONTEXT: {
945 // Follow the context chain if necessary.
946 ASSERT(!tmp.is(rsi)); // do not overwrite context register
947 Register context = rsi;
948 int chain_length = scope()->ContextChainLength(slot->var()->scope());
949 for (int i = 0; i < chain_length; i++) {
950 // Load the closure.
951 // (All contexts, even 'with' contexts, have a closure,
952 // and it is the same for all contexts inside a function.
953 // There is no need to go to the function context first.)
954 __ movq(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
955 // Load the function context (which is the incoming, outer context).
956 __ movq(tmp, FieldOperand(tmp, JSFunction::kContextOffset));
957 context = tmp;
958 }
959 // We may have a 'with' context now. Get the function context.
960 // (In fact this mov may never be the needed, since the scope analysis
961 // may not permit a direct context access in this case and thus we are
962 // always at a function context. However it is safe to dereference be-
963 // cause the function context of a function context is itself. Before
964 // deleting this mov we should try to create a counter-example first,
965 // though...)
966 __ movq(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
967 return ContextOperand(tmp, index);
968 }
969
970 default:
971 UNREACHABLE();
972 return Operand(rsp, 0);
973 }
974 }
975
976
977 Operand CodeGenerator::ContextSlotOperandCheckExtensions(Slot* slot,
978 Result tmp,
979 JumpTarget* slow) {
980 UNIMPLEMENTED();
981 return Operand(rsp, 0);
982 }
983
984
985 void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
986 if (slot->type() == Slot::LOOKUP) {
987 ASSERT(slot->var()->is_dynamic());
988
989 JumpTarget slow;
990 JumpTarget done;
991 Result value;
992
993 // Generate fast-case code for variables that might be shadowed by
994 // eval-introduced variables. Eval is used a lot without
995 // introducing variables. In those cases, we do not want to
996 // perform a runtime call for all variables in the scope
997 // containing the eval.
998 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
999 value = LoadFromGlobalSlotCheckExtensions(slot, typeof_state, &slow);
1000 // If there was no control flow to slow, we can exit early.
1001 if (!slow.is_linked()) {
1002 frame_->Push(&value);
1003 return;
1004 }
1005
1006 done.Jump(&value);
1007
1008 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
1009 Slot* potential_slot = slot->var()->local_if_not_shadowed()->slot();
1010 // Only generate the fast case for locals that rewrite to slots.
1011 // This rules out argument loads.
1012 if (potential_slot != NULL) {
1013 // Allocate a fresh register to use as a temp in
1014 // ContextSlotOperandCheckExtensions and to hold the result
1015 // value.
1016 value = allocator_->Allocate();
1017 ASSERT(value.is_valid());
1018 __ movq(value.reg(),
1019 ContextSlotOperandCheckExtensions(potential_slot,
1020 value,
1021 &slow));
1022 if (potential_slot->var()->mode() == Variable::CONST) {
1023 __ movq(kScratchRegister, Factory::the_hole_value(),
1024 RelocInfo::EMBEDDED_OBJECT);
1025 __ cmpq(value.reg(), kScratchRegister);
1026 done.Branch(not_equal, &value);
1027 __ movq(value.reg(), Factory::undefined_value(),
1028 RelocInfo::EMBEDDED_OBJECT);
1029 }
1030 // There is always control flow to slow from
1031 // ContextSlotOperandCheckExtensions so we have to jump around
1032 // it.
1033 done.Jump(&value);
1034 }
1035 }
1036
1037 slow.Bind();
1038 // A runtime call is inevitable. We eagerly sync frame elements
1039 // to memory so that we can push the arguments directly into place
1040 // on top of the frame.
1041 frame_->SyncRange(0, frame_->element_count() - 1);
1042 frame_->EmitPush(rsi);
1043 __ movq(kScratchRegister, slot->var()->name(), RelocInfo::EMBEDDED_OBJECT);
1044 frame_->EmitPush(kScratchRegister);
1045 if (typeof_state == INSIDE_TYPEOF) {
1046 // value =
1047 // frame_->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
1048 } else {
1049 // value = frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
1050 }
1051
1052 done.Bind(&value);
1053 frame_->Push(&value);
1054
1055 } else if (slot->var()->mode() == Variable::CONST) {
1056 // Const slots may contain 'the hole' value (the constant hasn't been
1057 // initialized yet) which needs to be converted into the 'undefined'
1058 // value.
1059 //
1060 // We currently spill the virtual frame because constants use the
1061 // potentially unsafe direct-frame access of SlotOperand.
1062 VirtualFrame::SpilledScope spilled_scope;
1063 Comment cmnt(masm_, "[ Load const");
1064 JumpTarget exit;
1065 __ movq(rcx, SlotOperand(slot, rcx));
1066 __ movq(kScratchRegister, Factory::the_hole_value(),
1067 RelocInfo::EMBEDDED_OBJECT);
1068 __ cmpq(rcx, kScratchRegister);
1069 exit.Branch(not_equal);
1070 __ movq(rcx, Factory::undefined_value(), RelocInfo::EMBEDDED_OBJECT);
1071 exit.Bind();
1072 frame_->EmitPush(rcx);
1073
1074 } else if (slot->type() == Slot::PARAMETER) {
1075 frame_->PushParameterAt(slot->index());
1076
1077 } else if (slot->type() == Slot::LOCAL) {
1078 frame_->PushLocalAt(slot->index());
1079
1080 } else {
1081 // The other remaining slot types (LOOKUP and GLOBAL) cannot reach
1082 // here.
1083 //
1084 // The use of SlotOperand below is safe for an unspilled frame
1085 // because it will always be a context slot.
1086 ASSERT(slot->type() == Slot::CONTEXT);
1087 Result temp = allocator_->Allocate();
1088 ASSERT(temp.is_valid());
1089 __ movq(temp.reg(), SlotOperand(slot, temp.reg()));
1090 frame_->Push(&temp);
1091 }
1092 }
1093
1094
1095 void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
1096 // TODO(X64): Enable more types of slot.
1097
1098 if (slot->type() == Slot::LOOKUP) {
1099 UNIMPLEMENTED();
1100 /*
1101 ASSERT(slot->var()->is_dynamic());
1102
1103 // For now, just do a runtime call. Since the call is inevitable,
1104 // we eagerly sync the virtual frame so we can directly push the
1105 // arguments into place.
1106 frame_->SyncRange(0, frame_->element_count() - 1);
1107
1108 frame_->EmitPush(esi);
1109 frame_->EmitPush(Immediate(slot->var()->name()));
1110
1111 Result value;
1112 if (init_state == CONST_INIT) {
1113 // Same as the case for a normal store, but ignores attribute
1114 // (e.g. READ_ONLY) of context slot so that we can initialize const
1115 // properties (introduced via eval("const foo = (some expr);")). Also,
1116 // uses the current function context instead of the top context.
1117 //
1118 // Note that we must declare the foo upon entry of eval(), via a
1119 // context slot declaration, but we cannot initialize it at the same
1120 // time, because the const declaration may be at the end of the eval
1121 // code (sigh...) and the const variable may have been used before
1122 // (where its value is 'undefined'). Thus, we can only do the
1123 // initialization when we actually encounter the expression and when
1124 // the expression operands are defined and valid, and thus we need the
1125 // split into 2 operations: declaration of the context slot followed
1126 // by initialization.
1127 value = frame_->CallRuntime(Runtime::kInitializeConstContextSlot, 3);
1128 } else {
1129 value = frame_->CallRuntime(Runtime::kStoreContextSlot, 3);
1130 }
1131 // Storing a variable must keep the (new) value on the expression
1132 // stack. This is necessary for compiling chained assignment
1133 // expressions.
1134 frame_->Push(&value);
1135 */
1136 } else {
1137 ASSERT(!slot->var()->is_dynamic());
1138
1139 JumpTarget exit;
1140 if (init_state == CONST_INIT) {
1141 ASSERT(slot->var()->mode() == Variable::CONST);
1142 // Only the first const initialization must be executed (the slot
1143 // still contains 'the hole' value). When the assignment is executed,
1144 // the code is identical to a normal store (see below).
1145 //
1146 // We spill the frame in the code below because the direct-frame
1147 // access of SlotOperand is potentially unsafe with an unspilled
1148 // frame.
1149 VirtualFrame::SpilledScope spilled_scope;
1150 Comment cmnt(masm_, "[ Init const");
1151 __ movq(rcx, SlotOperand(slot, rcx));
1152 __ movq(kScratchRegister, Factory::the_hole_value(),
1153 RelocInfo::EMBEDDED_OBJECT);
1154 __ cmpq(rcx, kScratchRegister);
1155 exit.Branch(not_equal);
1156 }
1157
1158 // We must execute the store. Storing a variable must keep the (new)
1159 // value on the stack. This is necessary for compiling assignment
1160 // expressions.
1161 //
1162 // Note: We will reach here even with slot->var()->mode() ==
1163 // Variable::CONST because of const declarations which will initialize
1164 // consts to 'the hole' value and by doing so, end up calling this code.
1165 if (slot->type() == Slot::PARAMETER) {
1166 frame_->StoreToParameterAt(slot->index());
1167 } else if (slot->type() == Slot::LOCAL) {
1168 frame_->StoreToLocalAt(slot->index());
1169 } else {
1170 // The other slot types (LOOKUP and GLOBAL) cannot reach here.
1171 //
1172 // The use of SlotOperand below is safe for an unspilled frame
1173 // because the slot is a context slot.
1174 /*
1175 ASSERT(slot->type() == Slot::CONTEXT);
1176 frame_->Dup();
1177 Result value = frame_->Pop();
1178 value.ToRegister();
1179 Result start = allocator_->Allocate();
1180 ASSERT(start.is_valid());
1181 __ mov(SlotOperand(slot, start.reg()), value.reg());
1182 // RecordWrite may destroy the value registers.
1183 //
1184 // TODO(204): Avoid actually spilling when the value is not
1185 // needed (probably the common case).
1186 frame_->Spill(value.reg());
1187 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
1188 Result temp = allocator_->Allocate();
1189 ASSERT(temp.is_valid());
1190 __ RecordWrite(start.reg(), offset, value.reg(), temp.reg());
1191 // The results start, value, and temp are unused by going out of
1192 // scope.
1193 */
1194 }
1195
1196 exit.Bind();
1197 }
1198 }
1199
1200
1201 Result CodeGenerator::LoadFromGlobalSlotCheckExtensions(
1202 Slot* slot,
1203 TypeofState typeof_state,
1204 JumpTarget* slow) {
1205 UNIMPLEMENTED();
1206 return Result(rax);
1207 }
1208
1209
1210 void CodeGenerator::LoadGlobal() {
1211 if (in_spilled_code()) {
1212 frame_->EmitPush(GlobalObject());
1213 } else {
1214 Result temp = allocator_->Allocate();
1215 __ movq(temp.reg(), GlobalObject());
1216 frame_->Push(&temp);
1217 }
1218 }
1219
429 #undef __ 1220 #undef __
1221
430 // End of CodeGenerator implementation. 1222 // End of CodeGenerator implementation.
431 1223
432 // ----------------------------------------------------------------------------- 1224 // -----------------------------------------------------------------------------
433 // Implementation of stubs. 1225 // Implementation of stubs.
434 1226
435 // Stub classes have public member named masm, not masm_. 1227 // Stub classes have public member named masm, not masm_.
436 #define __ ACCESS_MASM(masm) 1228 #define __ ACCESS_MASM(masm)
437 1229
438 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) { 1230 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
439 // Check that stack should contain frame pointer, code pointer, state and 1231 // Check that stack should contain frame pointer, code pointer, state and
(...skipping 337 matching lines...) Expand 10 before | Expand all | Expand 10 after
777 1569
778 // Restore frame pointer and return. 1570 // Restore frame pointer and return.
779 __ pop(rbp); 1571 __ pop(rbp);
780 __ ret(0); 1572 __ ret(0);
781 } 1573 }
782 1574
783 1575
784 #undef __ 1576 #undef __
785 1577
786 } } // namespace v8::internal 1578 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/x64/assembler-x64.cc ('k') | src/x64/macro-assembler-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698