OLD | NEW |
---|---|
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 333 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
344 { Assembler::BlockConstPoolScope block_const_pool(masm_); | 344 { Assembler::BlockConstPoolScope block_const_pool(masm_); |
345 // Tear down the frame which will restore the caller's frame pointer and | 345 // Tear down the frame which will restore the caller's frame pointer and |
346 // the link register. | 346 // the link register. |
347 frame_->Exit(); | 347 frame_->Exit(); |
348 | 348 |
349 // Here we use masm_-> instead of the __ macro to avoid the code coverage | 349 // Here we use masm_-> instead of the __ macro to avoid the code coverage |
350 // tool from instrumenting as we rely on the code size here. | 350 // tool from instrumenting as we rely on the code size here. |
351 int32_t sp_delta = (scope()->num_parameters() + 1) * kPointerSize; | 351 int32_t sp_delta = (scope()->num_parameters() + 1) * kPointerSize; |
352 masm_->add(sp, sp, Operand(sp_delta)); | 352 masm_->add(sp, sp, Operand(sp_delta)); |
353 masm_->Jump(lr); | 353 masm_->Jump(lr); |
354 } | |
355 | 354 |
356 #ifdef DEBUG | 355 #ifdef DEBUG |
357 // Check that the size of the code used for returning matches what is | 356 // Check that the size of the code used for returning matches what is |
Erik Corry
2010/04/27 08:44:28
Indentation
Søren Thygesen Gjesse
2010/04/27 09:10:20
Done.
| |
358 // expected by the debugger. If the sp_delts above cannot be encoded in the | 357 // expected by the debugger. If the sp_delts above cannot be encoded in the |
359 // add instruction the add will generate two instructions. | 358 // add instruction the add will generate two instructions. |
360 int return_sequence_length = | 359 int return_sequence_length = |
361 masm_->InstructionsGeneratedSince(&check_exit_codesize); | 360 masm_->InstructionsGeneratedSince(&check_exit_codesize); |
362 CHECK(return_sequence_length == Assembler::kJSReturnSequenceLength || | 361 CHECK(return_sequence_length == Assembler::kJSReturnSequenceLength || |
363 return_sequence_length == Assembler::kJSReturnSequenceLength + 1); | 362 return_sequence_length == Assembler::kJSReturnSequenceLength + 1); |
364 #endif | 363 #endif |
364 } | |
365 } | 365 } |
366 | 366 |
367 // Adjust for function-level loop nesting. | 367 // Adjust for function-level loop nesting. |
368 ASSERT(loop_nesting_ == info->loop_nesting()); | 368 ASSERT(loop_nesting_ == info->loop_nesting()); |
369 loop_nesting_ = 0; | 369 loop_nesting_ = 0; |
370 | 370 |
371 // Code generation state must be reset. | 371 // Code generation state must be reset. |
372 ASSERT(!has_cc()); | 372 ASSERT(!has_cc()); |
373 ASSERT(state_ == NULL); | 373 ASSERT(state_ == NULL); |
374 ASSERT(loop_nesting() == 0); | 374 ASSERT(loop_nesting() == 0); |
(...skipping 4848 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5223 (!has_cc() && frame_->height() == original_height + 1)); | 5223 (!has_cc() && frame_->height() == original_height + 1)); |
5224 } | 5224 } |
5225 | 5225 |
5226 | 5226 |
5227 class DeferredReferenceGetNamedValue: public DeferredCode { | 5227 class DeferredReferenceGetNamedValue: public DeferredCode { |
5228 public: | 5228 public: |
5229 explicit DeferredReferenceGetNamedValue(Handle<String> name) : name_(name) { | 5229 explicit DeferredReferenceGetNamedValue(Handle<String> name) : name_(name) { |
5230 set_comment("[ DeferredReferenceGetNamedValue"); | 5230 set_comment("[ DeferredReferenceGetNamedValue"); |
5231 } | 5231 } |
5232 | 5232 |
5233 virtual void BeforeGenerate(); | |
5234 virtual void Generate(); | 5233 virtual void Generate(); |
5235 virtual void AfterGenerate(); | |
5236 | 5234 |
5237 private: | 5235 private: |
5238 Handle<String> name_; | 5236 Handle<String> name_; |
5239 }; | 5237 }; |
5240 | 5238 |
5241 | 5239 |
5242 void DeferredReferenceGetNamedValue::BeforeGenerate() { | 5240 void DeferredReferenceGetNamedValue::Generate() { |
5243 __ StartBlockConstPool(); | 5241 __ DecrementCounter(&Counters::named_load_inline, 1, r1, r2); |
5242 __ IncrementCounter(&Counters::named_load_inline_miss, 1, r1, r2); | |
5243 | |
5244 // Setup the name register and call load IC. | |
5245 __ mov(r2, Operand(name_)); | |
5246 | |
5247 // The rest of the instructions in the deferred code must be together. | |
5248 { Assembler::BlockConstPoolScope block_const_pool(masm_); | |
5249 | |
5250 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); | |
5251 __ Call(ic, RelocInfo::CODE_TARGET); | |
5252 // The call must be followed by a nop(1) instruction to indicate that the | |
5253 // inobject has been inlined. | |
Erik Corry
2010/04/27 08:44:28
inobject -> in-object case
Søren Thygesen Gjesse
2010/04/27 09:10:20
Done.
| |
5254 __ nop(NAMED_PROPERTY_LOAD_INLINED); | |
5255 | |
5256 // Block the constant pool for one more instruction after leaving this | |
5257 // constant pool block scope to include the branch instruction ending the | |
5258 // deferred code. | |
5259 __ BlockConstPoolFor(1); | |
5260 } | |
5244 } | 5261 } |
5245 | 5262 |
5246 | 5263 |
5247 void DeferredReferenceGetNamedValue::Generate() { | |
5248 __ IncrementCounter(&Counters::named_load_inline_miss, 1, r1, r2); | |
5249 // Setup the name register and call load IC. | |
5250 __ mov(r2, Operand(name_)); | |
5251 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); | |
5252 __ Call(ic, RelocInfo::CODE_TARGET); | |
5253 // The call must be followed by a nop(1) instruction to indicate that the | |
5254 // inobject has been inlined. | |
5255 __ nop(NAMED_PROPERTY_LOAD_INLINED); | |
5256 } | |
5257 | |
5258 | |
5259 void DeferredReferenceGetNamedValue::AfterGenerate() { | |
5260 __ EndBlockConstPool(); | |
5261 } | |
5262 | |
5263 | |
5264 void CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) { | 5264 void CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) { |
5265 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) { | 5265 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) { |
5266 Comment cmnt(masm(), "[ Load from named Property"); | 5266 Comment cmnt(masm(), "[ Load from named Property"); |
5267 // Setup the name register and call load IC. | 5267 // Setup the name register and call load IC. |
5268 __ mov(r2, Operand(name)); | 5268 __ mov(r2, Operand(name)); |
5269 frame_->CallLoadIC(is_contextual | 5269 frame_->CallLoadIC(is_contextual |
5270 ? RelocInfo::CODE_TARGET_CONTEXT | 5270 ? RelocInfo::CODE_TARGET_CONTEXT |
5271 : RelocInfo::CODE_TARGET); | 5271 : RelocInfo::CODE_TARGET); |
5272 } else { | 5272 } else { |
5273 // Inline the inobject property case. | 5273 // Inline the inobject property case. |
5274 Comment cmnt(masm(), "[ Inlined named property load"); | 5274 Comment cmnt(masm(), "[ Inlined named property load"); |
5275 | 5275 |
5276 DeferredReferenceGetNamedValue* deferred = | 5276 DeferredReferenceGetNamedValue* deferred = |
5277 new DeferredReferenceGetNamedValue(name); | 5277 new DeferredReferenceGetNamedValue(name); |
5278 | 5278 |
5279 // Counter will be decremented in the deferred code. Placed here to avoid | |
5280 // having it in the instruction stream below where patching will occour. | |
Erik Corry
2010/04/27 08:44:28
occour -> occur
Søren Thygesen Gjesse
2010/04/27 09:10:20
Done.
| |
5281 __ IncrementCounter(&Counters::named_load_inline, 1, | |
5282 frame_->scratch0(), frame_->scratch1()); | |
5283 | |
5279 // The following instructions are the inlined load of an in-object property. | 5284 // The following instructions are the inlined load of an in-object property. |
5280 // Parts of this code is patched, so the exact instructions generated needs | 5285 // Parts of this code is patched, so the exact instructions generated needs |
5281 // to be fixed. Therefore the instruction pool is blocked when generating | 5286 // to be fixed. Therefore the instruction pool is blocked when generating |
5282 // this code | 5287 // this code |
5283 #ifdef DEBUG | 5288 #ifdef DEBUG |
5284 int kInlinedNamedLoadInstructions = 8; | 5289 int kInlinedNamedLoadInstructions = 8; |
5285 Label check_inlined_codesize; | 5290 Label check_inlined_codesize; |
5286 masm_->bind(&check_inlined_codesize); | 5291 masm_->bind(&check_inlined_codesize); |
5287 #endif | 5292 #endif |
5288 { Assembler::BlockConstPoolScope block_const_pool(masm_); | 5293 { Assembler::BlockConstPoolScope block_const_pool(masm_); |
5289 // Load the receiver from the stack. | 5294 // Load the receiver from the stack. |
5290 __ ldr(r1, MemOperand(sp, 0)); | 5295 __ ldr(r1, MemOperand(sp, 0)); |
5291 | 5296 |
5292 // Check that the receiver is a heap object. | 5297 // Check that the receiver is a heap object. |
5293 __ tst(r1, Operand(kSmiTagMask)); | 5298 __ tst(r1, Operand(kSmiTagMask)); |
5294 deferred->Branch(eq); | 5299 deferred->Branch(eq); |
5295 | 5300 |
5296 // Check the map. The null map used below is patched by the inline cache | 5301 // Check the map. The null map used below is patched by the inline cache |
5297 // code. | 5302 // code. |
5298 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); | 5303 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); |
5299 __ mov(r3, Operand(Factory::null_value())); | 5304 __ mov(r3, Operand(Factory::null_value())); |
5300 __ cmp(r2, r3); | 5305 __ cmp(r2, r3); |
5301 deferred->Branch(ne); | 5306 deferred->Branch(ne); |
5302 | 5307 |
5303 // Use initially use an invalid index. The index will be patched by the | 5308 // Use initially use an invalid index. The index will be patched by the |
5304 // inline cache code. | 5309 // inline cache code. |
5305 __ ldr(r0, MemOperand(r1, 0)); | 5310 __ ldr(r0, MemOperand(r1, 0)); |
5311 | |
5312 // Make sure that the expected number of instructions are generated. | |
5313 ASSERT_EQ(kInlinedNamedLoadInstructions, | |
5314 masm_->InstructionsGeneratedSince(&check_inlined_codesize)); | |
5306 } | 5315 } |
5307 | 5316 |
5308 // Make sure that the expected number of instructions are generated. | |
5309 ASSERT_EQ(kInlinedNamedLoadInstructions, | |
5310 masm_->InstructionsGeneratedSince(&check_inlined_codesize)); | |
5311 | |
5312 __ IncrementCounter(&Counters::named_load_inline, 1, r1, r2); | |
5313 deferred->BindExit(); | 5317 deferred->BindExit(); |
5314 } | 5318 } |
5315 } | 5319 } |
5316 | 5320 |
5317 | 5321 |
5318 void CodeGenerator::EmitKeyedLoad(bool is_global) { | 5322 void CodeGenerator::EmitKeyedLoad(bool is_global) { |
5319 Comment cmnt(masm_, "[ Load from keyed Property"); | 5323 Comment cmnt(masm_, "[ Load from keyed Property"); |
5320 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); | 5324 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); |
5321 RelocInfo::Mode rmode = is_global | 5325 RelocInfo::Mode rmode = is_global |
5322 ? RelocInfo::CODE_TARGET_CONTEXT | 5326 ? RelocInfo::CODE_TARGET_CONTEXT |
(...skipping 4022 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
9345 | 9349 |
9346 // Just jump to runtime to add the two strings. | 9350 // Just jump to runtime to add the two strings. |
9347 __ bind(&string_add_runtime); | 9351 __ bind(&string_add_runtime); |
9348 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); | 9352 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); |
9349 } | 9353 } |
9350 | 9354 |
9351 | 9355 |
9352 #undef __ | 9356 #undef __ |
9353 | 9357 |
9354 } } // namespace v8::internal | 9358 } } // namespace v8::internal |
OLD | NEW |