| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 3685 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3696 frame_->EmitPush(r0); | 3696 frame_->EmitPush(r0); |
| 3697 } | 3697 } |
| 3698 | 3698 |
| 3699 } else { | 3699 } else { |
| 3700 // ------------------------------------------- | 3700 // ------------------------------------------- |
| 3701 // JavaScript example: 'array[index](1, 2, 3)' | 3701 // JavaScript example: 'array[index](1, 2, 3)' |
| 3702 // ------------------------------------------- | 3702 // ------------------------------------------- |
| 3703 | 3703 |
| 3704 LoadAndSpill(property->obj()); | 3704 LoadAndSpill(property->obj()); |
| 3705 LoadAndSpill(property->key()); | 3705 LoadAndSpill(property->key()); |
| 3706 EmitKeyedLoad(false); | 3706 EmitKeyedLoad(); |
| 3707 frame_->Drop(); // key | 3707 frame_->Drop(); // key |
| 3708 // Put the function below the receiver. | 3708 // Put the function below the receiver. |
| 3709 if (property->is_synthetic()) { | 3709 if (property->is_synthetic()) { |
| 3710 // Use the global receiver. | 3710 // Use the global receiver. |
| 3711 frame_->Drop(); | 3711 frame_->Drop(); |
| 3712 frame_->EmitPush(r0); | 3712 frame_->EmitPush(r0); |
| 3713 LoadGlobalReceiver(r0); | 3713 LoadGlobalReceiver(r0); |
| 3714 } else { | 3714 } else { |
| 3715 frame_->EmitPop(r1); // receiver | 3715 frame_->EmitPop(r1); // receiver |
| 3716 frame_->EmitPush(r0); // function | 3716 frame_->EmitPush(r0); // function |
| (...skipping 1526 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5243 | 5243 |
| 5244 // Setup the name register and call load IC. | 5244 // Setup the name register and call load IC. |
| 5245 __ mov(r2, Operand(name_)); | 5245 __ mov(r2, Operand(name_)); |
| 5246 | 5246 |
| 5247 // The rest of the instructions in the deferred code must be together. | 5247 // The rest of the instructions in the deferred code must be together. |
| 5248 { Assembler::BlockConstPoolScope block_const_pool(masm_); | 5248 { Assembler::BlockConstPoolScope block_const_pool(masm_); |
| 5249 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); | 5249 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); |
| 5250 __ Call(ic, RelocInfo::CODE_TARGET); | 5250 __ Call(ic, RelocInfo::CODE_TARGET); |
| 5251 // The call must be followed by a nop(1) instruction to indicate that the | 5251 // The call must be followed by a nop(1) instruction to indicate that the |
| 5252 // in-object has been inlined. | 5252 // in-object has been inlined. |
| 5253 __ nop(NAMED_PROPERTY_LOAD_INLINED); | 5253 __ nop(PROPERTY_LOAD_INLINED); |
| 5254 | 5254 |
| 5255 // Block the constant pool for one more instruction after leaving this | 5255 // Block the constant pool for one more instruction after leaving this |
| 5256 // constant pool block scope to include the branch instruction ending the | 5256 // constant pool block scope to include the branch instruction ending the |
| 5257 // deferred code. | 5257 // deferred code. |
| 5258 __ BlockConstPoolFor(1); | 5258 __ BlockConstPoolFor(1); |
| 5259 } | 5259 } |
| 5260 } | 5260 } |
| 5261 | 5261 |
| 5262 | 5262 |
| 5263 class DeferredReferenceGetKeyedValue: public DeferredCode { |
| 5264 public: |
| 5265 DeferredReferenceGetKeyedValue() { |
| 5266 set_comment("[ DeferredReferenceGetKeyedValue"); |
| 5267 } |
| 5268 |
| 5269 virtual void Generate(); |
| 5270 }; |
| 5271 |
| 5272 |
| 5273 void DeferredReferenceGetKeyedValue::Generate() { |
| 5274 __ DecrementCounter(&Counters::keyed_load_inline, 1, r1, r2); |
| 5275 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1, r1, r2); |
| 5276 |
| 5277 // The rest of the instructions in the deferred code must be together. |
| 5278 { Assembler::BlockConstPoolScope block_const_pool(masm_); |
| 5279 // Call keyed load IC. It has all arguments on the stack. |
| 5280 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); |
| 5281 __ Call(ic, RelocInfo::CODE_TARGET); |
| 5282 // The call must be followed by a nop instruction to indicate that the |
| 5283 // keyed load has been inlined. |
| 5284 __ nop(PROPERTY_LOAD_INLINED); |
| 5285 |
| 5286 // Block the constant pool for one more instruction after leaving this |
| 5287 // constant pool block scope to include the branch instruction ending the |
| 5288 // deferred code. |
| 5289 __ BlockConstPoolFor(1); |
| 5290 } |
| 5291 } |
| 5292 |
| 5293 |
| 5263 void CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) { | 5294 void CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) { |
| 5264 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) { | 5295 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) { |
| 5265 Comment cmnt(masm(), "[ Load from named Property"); | 5296 Comment cmnt(masm(), "[ Load from named Property"); |
| 5266 // Setup the name register and call load IC. | 5297 // Setup the name register and call load IC. |
| 5267 __ mov(r2, Operand(name)); | 5298 __ mov(r2, Operand(name)); |
| 5268 frame_->CallLoadIC(is_contextual | 5299 frame_->CallLoadIC(is_contextual |
| 5269 ? RelocInfo::CODE_TARGET_CONTEXT | 5300 ? RelocInfo::CODE_TARGET_CONTEXT |
| 5270 : RelocInfo::CODE_TARGET); | 5301 : RelocInfo::CODE_TARGET); |
| 5271 } else { | 5302 } else { |
| 5272 // Inline the inobject property case. | 5303 // Inline the in-object property case. |
| 5273 Comment cmnt(masm(), "[ Inlined named property load"); | 5304 Comment cmnt(masm(), "[ Inlined named property load"); |
| 5274 | 5305 |
| 5275 DeferredReferenceGetNamedValue* deferred = | 5306 DeferredReferenceGetNamedValue* deferred = |
| 5276 new DeferredReferenceGetNamedValue(name); | 5307 new DeferredReferenceGetNamedValue(name); |
| 5277 | 5308 |
| 5278 // Counter will be decremented in the deferred code. Placed here to avoid | 5309 // Counter will be decremented in the deferred code. Placed here to avoid |
| 5279 // having it in the instruction stream below where patching will occur. | 5310 // having it in the instruction stream below where patching will occur. |
| 5280 __ IncrementCounter(&Counters::named_load_inline, 1, | 5311 __ IncrementCounter(&Counters::named_load_inline, 1, |
| 5281 frame_->scratch0(), frame_->scratch1()); | 5312 frame_->scratch0(), frame_->scratch1()); |
| 5282 | 5313 |
| (...skipping 14 matching lines...) Expand all Loading... |
| 5297 __ tst(r1, Operand(kSmiTagMask)); | 5328 __ tst(r1, Operand(kSmiTagMask)); |
| 5298 deferred->Branch(eq); | 5329 deferred->Branch(eq); |
| 5299 | 5330 |
| 5300 // Check the map. The null map used below is patched by the inline cache | 5331 // Check the map. The null map used below is patched by the inline cache |
| 5301 // code. | 5332 // code. |
| 5302 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); | 5333 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); |
| 5303 __ mov(r3, Operand(Factory::null_value())); | 5334 __ mov(r3, Operand(Factory::null_value())); |
| 5304 __ cmp(r2, r3); | 5335 __ cmp(r2, r3); |
| 5305 deferred->Branch(ne); | 5336 deferred->Branch(ne); |
| 5306 | 5337 |
| 5307 // Use initially use an invalid index. The index will be patched by the | 5338 // Initially use an invalid index. The index will be patched by the |
| 5308 // inline cache code. | 5339 // inline cache code. |
| 5309 __ ldr(r0, MemOperand(r1, 0)); | 5340 __ ldr(r0, MemOperand(r1, 0)); |
| 5310 | 5341 |
| 5311 // Make sure that the expected number of instructions are generated. | 5342 // Make sure that the expected number of instructions are generated. |
| 5312 ASSERT_EQ(kInlinedNamedLoadInstructions, | 5343 ASSERT_EQ(kInlinedNamedLoadInstructions, |
| 5313 masm_->InstructionsGeneratedSince(&check_inlined_codesize)); | 5344 masm_->InstructionsGeneratedSince(&check_inlined_codesize)); |
| 5314 } | 5345 } |
| 5315 | 5346 |
| 5316 deferred->BindExit(); | 5347 deferred->BindExit(); |
| 5317 } | 5348 } |
| 5318 } | 5349 } |
| 5319 | 5350 |
| 5320 | 5351 |
| 5321 void CodeGenerator::EmitKeyedLoad(bool is_global) { | 5352 void CodeGenerator::EmitKeyedLoad() { |
| 5322 Comment cmnt(masm_, "[ Load from keyed Property"); | 5353 if (loop_nesting() == 0) { |
| 5323 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); | 5354 Comment cmnt(masm_, "[ Load from keyed property"); |
| 5324 RelocInfo::Mode rmode = is_global | 5355 frame_->CallKeyedLoadIC(); |
| 5325 ? RelocInfo::CODE_TARGET_CONTEXT | 5356 } else { |
| 5326 : RelocInfo::CODE_TARGET; | 5357 // Inline the keyed load. |
| 5327 frame_->CallCodeObject(ic, rmode, 0); | 5358 Comment cmnt(masm_, "[ Inlined load from keyed property"); |
| 5359 |
| 5360 DeferredReferenceGetKeyedValue* deferred = |
| 5361 new DeferredReferenceGetKeyedValue(); |
| 5362 |
| 5363 // Counter will be decremented in the deferred code. Placed here to avoid |
| 5364 // having it in the instruction stream below where patching will occur. |
| 5365 __ IncrementCounter(&Counters::keyed_load_inline, 1, |
| 5366 frame_->scratch0(), frame_->scratch1()); |
| 5367 |
| 5368 // Load the receiver from the stack. |
| 5369 __ ldr(r0, MemOperand(sp, kPointerSize)); |
| 5370 |
| 5371 // Check that the receiver is a heap object. |
| 5372 __ tst(r0, Operand(kSmiTagMask)); |
| 5373 deferred->Branch(eq); |
| 5374 |
| 5375 // The following instructions are the inlined load keyed property. Parts |
| 5376 // of this code are patched, so the exact number of instructions generated |
| 5377 // need to be fixed. Therefore the constant pool is blocked while generating |
| 5378 // this code. |
| 5379 #ifdef DEBUG |
| 5380 int kInlinedKeyedLoadInstructions = 20; |
| 5381 Label check_inlined_codesize; |
| 5382 masm_->bind(&check_inlined_codesize); |
| 5383 #endif |
| 5384 { Assembler::BlockConstPoolScope block_const_pool(masm_); |
| 5385 // Check the map. The null map used below is patched by the inline cache |
| 5386 // code. |
| 5387 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); |
| 5388 __ mov(r2, Operand(Factory::null_value())); |
| 5389 __ cmp(r1, r2); |
| 5390 deferred->Branch(ne); |
| 5391 |
| 5392 // Load the key from the stack. |
| 5393 __ ldr(r1, MemOperand(sp, 0)); |
| 5394 |
| 5395 // Check that the key is a smi. |
| 5396 __ tst(r1, Operand(kSmiTagMask)); |
| 5397 deferred->Branch(ne); |
| 5398 |
| 5399 // Get the elements array from the receiver and check that it |
| 5400 // is not a dictionary. |
| 5401 __ ldr(r2, FieldMemOperand(r0, JSObject::kElementsOffset)); |
| 5402 __ ldr(r3, FieldMemOperand(r2, JSObject::kMapOffset)); |
| 5403 __ LoadRoot(r4, Heap::kFixedArrayMapRootIndex); |
| 5404 __ cmp(r3, r4); |
| 5405 deferred->Branch(ne); |
| 5406 |
| 5407 // Check that key is within bounds. |
| 5408 __ ldr(r3, FieldMemOperand(r2, FixedArray::kLengthOffset)); |
| 5409 __ cmp(r3, Operand(r1, ASR, kSmiTagSize)); |
| 5410 deferred->Branch(ls); // Unsigned less equal. |
| 5411 |
| 5412 // Load and check that the result is not the hole (r1 is a smi). |
| 5413 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex); |
| 5414 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 5415 __ ldr(r0, MemOperand(r2, r1, LSL, |
| 5416 kPointerSizeLog2 - (kSmiTagSize + kSmiShiftSize))); |
| 5417 __ cmp(r0, r3); |
| 5418 deferred->Branch(eq); |
| 5419 |
| 5420 // Make sure that the expected number of instructions are generated. |
| 5421 ASSERT_EQ(kInlinedKeyedLoadInstructions, |
| 5422 masm_->InstructionsGeneratedSince(&check_inlined_codesize)); |
| 5423 } |
| 5424 |
| 5425 deferred->BindExit(); |
| 5426 } |
| 5328 } | 5427 } |
| 5329 | 5428 |
| 5330 | 5429 |
| 5331 #ifdef DEBUG | 5430 #ifdef DEBUG |
| 5332 bool CodeGenerator::HasValidEntryRegisters() { return true; } | 5431 bool CodeGenerator::HasValidEntryRegisters() { return true; } |
| 5333 #endif | 5432 #endif |
| 5334 | 5433 |
| 5335 | 5434 |
| 5336 #undef __ | 5435 #undef __ |
| 5337 #define __ ACCESS_MASM(masm) | 5436 #define __ ACCESS_MASM(masm) |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5376 case NAMED: { | 5475 case NAMED: { |
| 5377 Variable* var = expression_->AsVariableProxy()->AsVariable(); | 5476 Variable* var = expression_->AsVariableProxy()->AsVariable(); |
| 5378 bool is_global = var != NULL; | 5477 bool is_global = var != NULL; |
| 5379 ASSERT(!is_global || var->is_global()); | 5478 ASSERT(!is_global || var->is_global()); |
| 5380 cgen_->EmitNamedLoad(GetName(), is_global); | 5479 cgen_->EmitNamedLoad(GetName(), is_global); |
| 5381 cgen_->frame()->EmitPush(r0); | 5480 cgen_->frame()->EmitPush(r0); |
| 5382 break; | 5481 break; |
| 5383 } | 5482 } |
| 5384 | 5483 |
| 5385 case KEYED: { | 5484 case KEYED: { |
| 5386 // TODO(181): Implement inlined version of array indexing once | |
| 5387 // loop nesting is properly tracked on ARM. | |
| 5388 ASSERT(property != NULL); | 5485 ASSERT(property != NULL); |
| 5389 Variable* var = expression_->AsVariableProxy()->AsVariable(); | 5486 cgen_->EmitKeyedLoad(); |
| 5390 ASSERT(var == NULL || var->is_global()); | |
| 5391 cgen_->EmitKeyedLoad(var != NULL); | |
| 5392 cgen_->frame()->EmitPush(r0); | 5487 cgen_->frame()->EmitPush(r0); |
| 5393 break; | 5488 break; |
| 5394 } | 5489 } |
| 5395 | 5490 |
| 5396 default: | 5491 default: |
| 5397 UNREACHABLE(); | 5492 UNREACHABLE(); |
| 5398 } | 5493 } |
| 5399 | 5494 |
| 5400 if (!persist_after_get_) { | 5495 if (!persist_after_get_) { |
| 5401 cgen_->UnloadReference(this); | 5496 cgen_->UnloadReference(this); |
| (...skipping 3946 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 9348 | 9443 |
| 9349 // Just jump to runtime to add the two strings. | 9444 // Just jump to runtime to add the two strings. |
| 9350 __ bind(&string_add_runtime); | 9445 __ bind(&string_add_runtime); |
| 9351 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); | 9446 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); |
| 9352 } | 9447 } |
| 9353 | 9448 |
| 9354 | 9449 |
| 9355 #undef __ | 9450 #undef __ |
| 9356 | 9451 |
| 9357 } } // namespace v8::internal | 9452 } } // namespace v8::internal |
| OLD | NEW |