OLD | NEW |
---|---|
1 // Copyright 2006-2009 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 19 matching lines...) Expand all Loading... | |
30 #include "bootstrapper.h" | 30 #include "bootstrapper.h" |
31 #include "codegen-inl.h" | 31 #include "codegen-inl.h" |
32 #include "debug.h" | 32 #include "debug.h" |
33 #include "parser.h" | 33 #include "parser.h" |
34 #include "register-allocator-inl.h" | 34 #include "register-allocator-inl.h" |
35 #include "runtime.h" | 35 #include "runtime.h" |
36 #include "scopes.h" | 36 #include "scopes.h" |
37 | 37 |
38 namespace v8 { namespace internal { | 38 namespace v8 { namespace internal { |
39 | 39 |
40 #define __ masm_-> | 40 #define __ DEFINE_MASM(masm_) |
41 | 41 |
42 // ------------------------------------------------------------------------- | 42 // ------------------------------------------------------------------------- |
43 // CodeGenState implementation. | 43 // CodeGenState implementation. |
44 | 44 |
45 CodeGenState::CodeGenState(CodeGenerator* owner) | 45 CodeGenState::CodeGenState(CodeGenerator* owner) |
46 : owner_(owner), | 46 : owner_(owner), |
47 typeof_state_(NOT_INSIDE_TYPEOF), | 47 typeof_state_(NOT_INSIDE_TYPEOF), |
48 destination_(NULL), | 48 destination_(NULL), |
49 previous_(NULL) { | 49 previous_(NULL) { |
50 owner_->set_state(this); | 50 owner_->set_state(this); |
(...skipping 1951 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2002 // contain a reference to eax (it is prepared for the return by spilling | 2002 // contain a reference to eax (it is prepared for the return by spilling |
2003 // all registers). | 2003 // all registers). |
2004 if (FLAG_trace) { | 2004 if (FLAG_trace) { |
2005 frame_->Push(return_value); | 2005 frame_->Push(return_value); |
2006 *return_value = frame_->CallRuntime(Runtime::kTraceExit, 1); | 2006 *return_value = frame_->CallRuntime(Runtime::kTraceExit, 1); |
2007 } | 2007 } |
2008 return_value->ToRegister(eax); | 2008 return_value->ToRegister(eax); |
2009 | 2009 |
2010 // Add a label for checking the size of the code used for returning. | 2010 // Add a label for checking the size of the code used for returning. |
2011 Label check_exit_codesize; | 2011 Label check_exit_codesize; |
2012 __ bind(&check_exit_codesize); | 2012 masm_->bind(&check_exit_codesize); |
2013 | 2013 |
2014 // Leave the frame and return popping the arguments and the | 2014 // Leave the frame and return popping the arguments and the |
2015 // receiver. | 2015 // receiver. |
2016 frame_->Exit(); | 2016 frame_->Exit(); |
2017 __ ret((scope_->num_parameters() + 1) * kPointerSize); | 2017 masm_->ret((scope_->num_parameters() + 1) * kPointerSize); |
2018 DeleteFrame(); | 2018 DeleteFrame(); |
2019 | 2019 |
2020 // Check that the size of the code used for returning matches what is | 2020 // Check that the size of the code used for returning matches what is |
2021 // expected by the debugger. | 2021 // expected by the debugger. |
2022 ASSERT_EQ(Debug::kIa32JSReturnSequenceLength, | 2022 ASSERT_EQ(Debug::kIa32JSReturnSequenceLength, |
2023 __ SizeOfCodeGeneratedSince(&check_exit_codesize)); | 2023 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize)); |
2024 } | 2024 } |
2025 | 2025 |
2026 | 2026 |
2027 void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) { | 2027 void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) { |
2028 ASSERT(!in_spilled_code()); | 2028 ASSERT(!in_spilled_code()); |
2029 Comment cmnt(masm_, "[ WithEnterStatement"); | 2029 Comment cmnt(masm_, "[ WithEnterStatement"); |
2030 CodeForStatementPosition(node); | 2030 CodeForStatementPosition(node); |
2031 Load(node->expression()); | 2031 Load(node->expression()); |
2032 Result context(this); | 2032 Result context(this); |
2033 if (node->is_catch_block()) { | 2033 if (node->is_catch_block()) { |
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2136 | 2136 |
2137 // 0 is placeholder. | 2137 // 0 is placeholder. |
2138 // Jump to the address at table_address + 2 * smi_value.reg(). | 2138 // Jump to the address at table_address + 2 * smi_value.reg(). |
2139 // The target of the jump is read from table_address + 4 * switch_value. | 2139 // The target of the jump is read from table_address + 4 * switch_value. |
2140 // The Smi encoding of smi_value.reg() is 2 * switch_value. | 2140 // The Smi encoding of smi_value.reg() is 2 * switch_value. |
2141 smi_value.ToRegister(); | 2141 smi_value.ToRegister(); |
2142 __ jmp(Operand(smi_value.reg(), smi_value.reg(), | 2142 __ jmp(Operand(smi_value.reg(), smi_value.reg(), |
2143 times_1, 0x0, RelocInfo::INTERNAL_REFERENCE)); | 2143 times_1, 0x0, RelocInfo::INTERNAL_REFERENCE)); |
2144 smi_value.Unuse(); | 2144 smi_value.Unuse(); |
2145 // Calculate address to overwrite later with actual address of table. | 2145 // Calculate address to overwrite later with actual address of table. |
2146 int32_t jump_table_ref = __ pc_offset() - sizeof(int32_t); | 2146 int32_t jump_table_ref = masm_->pc_offset() - sizeof(int32_t); |
2147 __ Align(4); | 2147 __ Align(4); |
2148 Label table_start; | 2148 Label table_start; |
2149 __ bind(&table_start); | 2149 __ bind(&table_start); |
2150 __ WriteInternalReference(jump_table_ref, table_start); | 2150 __ WriteInternalReference(jump_table_ref, table_start); |
2151 | 2151 |
2152 for (int i = 0; i < range; i++) { | 2152 for (int i = 0; i < range; i++) { |
2153 // These are the table entries. 0x0 is the placeholder for case address. | 2153 // These are the table entries. 0x0 is the placeholder for case address. |
2154 __ dd(0x0, RelocInfo::INTERNAL_REFERENCE); | 2154 __ dd(0x0, RelocInfo::INTERNAL_REFERENCE); |
2155 } | 2155 } |
2156 | 2156 |
(...skipping 1222 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3379 // context extensions. If we have reached an eval scope, we check | 3379 // context extensions. If we have reached an eval scope, we check |
3380 // all extensions from this point. | 3380 // all extensions from this point. |
3381 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break; | 3381 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break; |
3382 s = s->outer_scope(); | 3382 s = s->outer_scope(); |
3383 } | 3383 } |
3384 | 3384 |
3385 if (s->is_eval_scope()) { | 3385 if (s->is_eval_scope()) { |
3386 // Loop up the context chain. There is no frame effect so it is | 3386 // Loop up the context chain. There is no frame effect so it is |
3387 // safe to use raw labels here. | 3387 // safe to use raw labels here. |
3388 Label next, fast; | 3388 Label next, fast; |
3389 if (!context.reg().is(tmp.reg())) __ mov(tmp.reg(), context.reg()); | 3389 if (!context.reg().is(tmp.reg())) { |
3390 __ mov(tmp.reg(), context.reg()); | |
3391 } | |
3390 __ bind(&next); | 3392 __ bind(&next); |
3391 // Terminate at global context. | 3393 // Terminate at global context. |
3392 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset), | 3394 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset), |
3393 Immediate(Factory::global_context_map())); | 3395 Immediate(Factory::global_context_map())); |
3394 __ j(equal, &fast); | 3396 __ j(equal, &fast); |
3395 // Check that extension is NULL. | 3397 // Check that extension is NULL. |
3396 __ cmp(ContextOperand(tmp.reg(), Context::EXTENSION_INDEX), Immediate(0)); | 3398 __ cmp(ContextOperand(tmp.reg(), Context::EXTENSION_INDEX), Immediate(0)); |
3397 slow->Branch(not_equal, not_taken); | 3399 slow->Branch(not_equal, not_taken); |
3398 // Load next context in chain. | 3400 // Load next context in chain. |
3399 __ mov(tmp.reg(), ContextOperand(tmp.reg(), Context::CLOSURE_INDEX)); | 3401 __ mov(tmp.reg(), ContextOperand(tmp.reg(), Context::CLOSURE_INDEX)); |
(...skipping 1869 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5269 RelocInfo::Mode mode = is_global_ | 5271 RelocInfo::Mode mode = is_global_ |
5270 ? RelocInfo::CODE_TARGET_CONTEXT | 5272 ? RelocInfo::CODE_TARGET_CONTEXT |
5271 : RelocInfo::CODE_TARGET; | 5273 : RelocInfo::CODE_TARGET; |
5272 Result value = cgen->frame()->CallKeyedLoadIC(mode); | 5274 Result value = cgen->frame()->CallKeyedLoadIC(mode); |
5273 // The result needs to be specifically the eax register because the | 5275 // The result needs to be specifically the eax register because the |
5274 // offset to the patch site will be expected in a test eax | 5276 // offset to the patch site will be expected in a test eax |
5275 // instruction. | 5277 // instruction. |
5276 ASSERT(value.is_register() && value.reg().is(eax)); | 5278 ASSERT(value.is_register() && value.reg().is(eax)); |
5277 // The delta from the start of the map-compare instruction to the | 5279 // The delta from the start of the map-compare instruction to the |
5278 // test eax instruction. | 5280 // test eax instruction. |
5279 int delta_to_patch_site = __ SizeOfCodeGeneratedSince(patch_site()); | 5281 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site()); |
Christian Plesner Hansen
2009/04/21 11:47:32
Maybe a word somewhere about why you have to use m
| |
5280 __ test(value.reg(), Immediate(-delta_to_patch_site)); | 5282 __ test(value.reg(), Immediate(-delta_to_patch_site)); |
5281 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1); | 5283 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1); |
5282 | 5284 |
5283 // The receiver and key were spilled by the call, so their state as | 5285 // The receiver and key were spilled by the call, so their state as |
5284 // constants or copies has been changed. Thus, they need to be | 5286 // constants or copies has been changed. Thus, they need to be |
5285 // "mergable" in the block at the exit label and are therefore | 5287 // "mergable" in the block at the exit label and are therefore |
5286 // passed as return results here. | 5288 // passed as return results here. |
5287 key = cgen->frame()->Pop(); | 5289 key = cgen->frame()->Pop(); |
5288 receiver = cgen->frame()->Pop(); | 5290 receiver = cgen->frame()->Pop(); |
5289 exit_.Jump(&receiver, &key, &value); | 5291 exit_.Jump(&receiver, &key, &value); |
5290 } | 5292 } |
5291 | 5293 |
5292 | 5294 |
5293 #undef __ | 5295 #undef __ |
5294 #define __ masm-> | 5296 #define __ DEFINE_MASM(masm) |
5295 | 5297 |
5296 Handle<String> Reference::GetName() { | 5298 Handle<String> Reference::GetName() { |
5297 ASSERT(type_ == NAMED); | 5299 ASSERT(type_ == NAMED); |
5298 Property* property = expression_->AsProperty(); | 5300 Property* property = expression_->AsProperty(); |
5299 if (property == NULL) { | 5301 if (property == NULL) { |
5300 // Global variable reference treated as a named property reference. | 5302 // Global variable reference treated as a named property reference. |
5301 VariableProxy* proxy = expression_->AsVariableProxy(); | 5303 VariableProxy* proxy = expression_->AsVariableProxy(); |
5302 ASSERT(proxy->AsVariable() != NULL); | 5304 ASSERT(proxy->AsVariable() != NULL); |
5303 ASSERT(proxy->AsVariable()->is_global()); | 5305 ASSERT(proxy->AsVariable()->is_global()); |
5304 return proxy->name(); | 5306 return proxy->name(); |
(...skipping 261 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5566 __ bind(&true_result); | 5568 __ bind(&true_result); |
5567 __ mov(eax, 1); | 5569 __ mov(eax, 1); |
5568 __ ret(1 * kPointerSize); | 5570 __ ret(1 * kPointerSize); |
5569 __ bind(&false_result); | 5571 __ bind(&false_result); |
5570 __ mov(eax, 0); | 5572 __ mov(eax, 0); |
5571 __ ret(1 * kPointerSize); | 5573 __ ret(1 * kPointerSize); |
5572 } | 5574 } |
5573 | 5575 |
5574 | 5576 |
5575 #undef __ | 5577 #undef __ |
5576 #define __ masm_-> | 5578 #define __ DEFINE_MASM(masm_) |
5577 | 5579 |
5578 Result DeferredInlineBinaryOperation::GenerateInlineCode(Result* left, | 5580 Result DeferredInlineBinaryOperation::GenerateInlineCode(Result* left, |
5579 Result* right) { | 5581 Result* right) { |
5580 // Perform fast-case smi code for the operation (left <op> right) and | 5582 // Perform fast-case smi code for the operation (left <op> right) and |
5581 // returns the result in a Result. | 5583 // returns the result in a Result. |
5582 // If any fast-case tests fail, it jumps to the slow-case deferred code, | 5584 // If any fast-case tests fail, it jumps to the slow-case deferred code, |
5583 // which calls the binary operation stub, with the arguments (in registers) | 5585 // which calls the binary operation stub, with the arguments (in registers) |
5584 // on top of the frame. | 5586 // on top of the frame. |
5585 // Consumes its arguments (sets left and right to invalid and frees their | 5587 // Consumes its arguments (sets left and right to invalid and frees their |
5586 // registers). | 5588 // registers). |
(...skipping 313 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5900 UNREACHABLE(); | 5902 UNREACHABLE(); |
5901 break; | 5903 break; |
5902 } | 5904 } |
5903 left->Unuse(); | 5905 left->Unuse(); |
5904 right->Unuse(); | 5906 right->Unuse(); |
5905 return answer; | 5907 return answer; |
5906 } | 5908 } |
5907 | 5909 |
5908 | 5910 |
5909 #undef __ | 5911 #undef __ |
5910 #define __ masm-> | 5912 #define __ DEFINE_MASM(masm) |
5911 | 5913 |
5912 void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) { | 5914 void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) { |
5913 // Perform fast-case smi code for the operation (eax <op> ebx) and | 5915 // Perform fast-case smi code for the operation (eax <op> ebx) and |
5914 // leave result in register eax. | 5916 // leave result in register eax. |
5915 | 5917 |
5916 // Prepare the smi check of both operands by or'ing them together | 5918 // Prepare the smi check of both operands by or'ing them together |
5917 // before checking against the smi mask. | 5919 // before checking against the smi mask. |
5918 __ mov(ecx, Operand(ebx)); | 5920 __ mov(ecx, Operand(ebx)); |
5919 __ or_(ecx, Operand(eax)); | 5921 __ or_(ecx, Operand(eax)); |
5920 | 5922 |
(...skipping 304 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
6225 // pending #IA exception. Clear it. | 6227 // pending #IA exception. Clear it. |
6226 __ fnclex(); | 6228 __ fnclex(); |
6227 } else { | 6229 } else { |
6228 // The non-SSE3 variant does early bailout if the right | 6230 // The non-SSE3 variant does early bailout if the right |
6229 // operand isn't a 32-bit integer, so we may have a single | 6231 // operand isn't a 32-bit integer, so we may have a single |
6230 // value on the FPU stack we need to get rid of. | 6232 // value on the FPU stack we need to get rid of. |
6231 __ ffree(0); | 6233 __ ffree(0); |
6232 } | 6234 } |
6233 | 6235 |
6234 // SHR should return uint32 - go to runtime for non-smi/negative result. | 6236 // SHR should return uint32 - go to runtime for non-smi/negative result. |
6235 if (op_ == Token::SHR) __ bind(&non_smi_result); | 6237 if (op_ == Token::SHR) { |
6238 __ bind(&non_smi_result); | |
6239 } | |
6236 __ mov(eax, Operand(esp, 1 * kPointerSize)); | 6240 __ mov(eax, Operand(esp, 1 * kPointerSize)); |
6237 __ mov(edx, Operand(esp, 2 * kPointerSize)); | 6241 __ mov(edx, Operand(esp, 2 * kPointerSize)); |
6238 break; | 6242 break; |
6239 } | 6243 } |
6240 default: UNREACHABLE(); break; | 6244 default: UNREACHABLE(); break; |
6241 } | 6245 } |
6242 | 6246 |
6243 // If all else fails, use the runtime system to get the correct | 6247 // If all else fails, use the runtime system to get the correct |
6244 // result. | 6248 // result. |
6245 __ bind(&call_runtime); | 6249 __ bind(&call_runtime); |
(...skipping 868 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
7114 | 7118 |
7115 // Slow-case: Go through the JavaScript implementation. | 7119 // Slow-case: Go through the JavaScript implementation. |
7116 __ bind(&slow); | 7120 __ bind(&slow); |
7117 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); | 7121 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); |
7118 } | 7122 } |
7119 | 7123 |
7120 | 7124 |
7121 #undef __ | 7125 #undef __ |
7122 | 7126 |
7123 } } // namespace v8::internal | 7127 } } // namespace v8::internal |
OLD | NEW |