OLD | NEW |
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 340 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
351 // Runtime::TraceExit returns the parameter as it is. | 351 // Runtime::TraceExit returns the parameter as it is. |
352 frame_->EmitPush(r0); | 352 frame_->EmitPush(r0); |
353 frame_->CallRuntime(Runtime::kTraceExit, 1); | 353 frame_->CallRuntime(Runtime::kTraceExit, 1); |
354 } | 354 } |
355 | 355 |
356 #ifdef DEBUG | 356 #ifdef DEBUG |
357 // Add a label for checking the size of the code used for returning. | 357 // Add a label for checking the size of the code used for returning. |
358 Label check_exit_codesize; | 358 Label check_exit_codesize; |
359 masm_->bind(&check_exit_codesize); | 359 masm_->bind(&check_exit_codesize); |
360 #endif | 360 #endif |
361 | 361 // Make sure that the constant pool is not emitted inside of the return |
362 { // NOLINT | 362 // sequence. |
363 // Make sure that the constant pool is not emitted inside of the return | 363 { Assembler::BlockConstPoolScope block_const_pool(masm_); |
364 // sequence. | |
365 Assembler::BlockConstPoolScope block_const_pool(masm_); | |
366 | |
367 // Tear down the frame which will restore the caller's frame pointer and | 364 // Tear down the frame which will restore the caller's frame pointer and |
368 // the link register. | 365 // the link register. |
369 frame_->Exit(); | 366 frame_->Exit(); |
370 | 367 |
371 // Here we use masm_-> instead of the __ macro to avoid the code coverage | 368 // Here we use masm_-> instead of the __ macro to avoid the code coverage |
372 // tool from instrumenting as we rely on the code size here. | 369 // tool from instrumenting as we rely on the code size here. |
373 int32_t sp_delta = (scope()->num_parameters() + 1) * kPointerSize; | 370 int32_t sp_delta = (scope()->num_parameters() + 1) * kPointerSize; |
374 masm_->add(sp, sp, Operand(sp_delta)); | 371 masm_->add(sp, sp, Operand(sp_delta)); |
375 masm_->Jump(lr); | 372 masm_->Jump(lr); |
376 } | 373 } |
377 | 374 |
378 #ifdef DEBUG | 375 #ifdef DEBUG |
379 // Check that the size of the code used for returning matches what is | 376 // Check that the size of the code used for returning matches what is |
380 // expected by the debugger. If the sp_delts above cannot be encoded in the | 377 // expected by the debugger. If the sp_delts above cannot be encoded in the |
381 // add instruction the add will generate two instructions. | 378 // add instruction the add will generate two instructions. |
382 int return_sequence_length = | 379 int return_sequence_length = |
383 masm_->InstructionsGeneratedSince(&check_exit_codesize); | 380 masm_->InstructionsGeneratedSince(&check_exit_codesize); |
384 CHECK(return_sequence_length == Assembler::kJSReturnSequenceLength || | 381 CHECK(return_sequence_length == Assembler::kJSReturnSequenceLength || |
385 return_sequence_length == Assembler::kJSReturnSequenceLength + 1); | 382 return_sequence_length == Assembler::kJSReturnSequenceLength + 1); |
386 #endif | 383 #endif |
387 } | 384 } |
388 | 385 |
389 // Adjust for function-level loop nesting. | 386 // Adjust for function-level loop nesting. |
390 ASSERT(loop_nesting_ == info->loop_nesting()); | 387 ASSERT(loop_nesting_ == info->loop_nesting()); |
391 loop_nesting_ = 0; | 388 loop_nesting_ = 0; |
392 | 389 |
393 // Code generation state must be reset. | 390 // Code generation state must be reset. |
394 ASSERT(!has_cc()); | 391 ASSERT(!has_cc()); |
395 ASSERT(state_ == NULL); | 392 ASSERT(state_ == NULL); |
| 393 ASSERT(loop_nesting() == 0); |
396 ASSERT(!function_return_is_shadowed_); | 394 ASSERT(!function_return_is_shadowed_); |
397 function_return_.Unuse(); | 395 function_return_.Unuse(); |
398 DeleteFrame(); | 396 DeleteFrame(); |
399 | 397 |
400 // Process any deferred code using the register allocator. | 398 // Process any deferred code using the register allocator. |
401 if (!HasStackOverflow()) { | 399 if (!HasStackOverflow()) { |
402 ProcessDeferred(); | 400 ProcessDeferred(); |
403 } | 401 } |
404 | 402 |
405 allocator_ = NULL; | 403 allocator_ = NULL; |
(...skipping 2527 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2933 __ ldr(tmp2, ContextOperand(tmp, Context::EXTENSION_INDEX)); | 2931 __ ldr(tmp2, ContextOperand(tmp, Context::EXTENSION_INDEX)); |
2934 __ tst(tmp2, tmp2); | 2932 __ tst(tmp2, tmp2); |
2935 slow->Branch(ne); | 2933 slow->Branch(ne); |
2936 // Load next context in chain. | 2934 // Load next context in chain. |
2937 __ ldr(tmp, ContextOperand(tmp, Context::CLOSURE_INDEX)); | 2935 __ ldr(tmp, ContextOperand(tmp, Context::CLOSURE_INDEX)); |
2938 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset)); | 2936 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset)); |
2939 __ b(&next); | 2937 __ b(&next); |
2940 __ bind(&fast); | 2938 __ bind(&fast); |
2941 } | 2939 } |
2942 | 2940 |
2943 // All extension objects were empty and it is safe to use a global | |
2944 // load IC call. | |
2945 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); | |
2946 // Load the global object. | 2941 // Load the global object. |
2947 LoadGlobal(); | 2942 LoadGlobal(); |
2948 // Setup the name register. | 2943 // Setup the name register and call load IC. |
2949 __ mov(r2, Operand(slot->var()->name())); | 2944 __ mov(r2, Operand(slot->var()->name())); |
2950 // Call IC stub. | 2945 frame_->CallLoadIC(typeof_state == INSIDE_TYPEOF |
2951 if (typeof_state == INSIDE_TYPEOF) { | 2946 ? RelocInfo::CODE_TARGET |
2952 frame_->CallCodeObject(ic, RelocInfo::CODE_TARGET, 0); | 2947 : RelocInfo::CODE_TARGET_CONTEXT); |
2953 } else { | |
2954 frame_->CallCodeObject(ic, RelocInfo::CODE_TARGET_CONTEXT, 0); | |
2955 } | |
2956 | |
2957 // Drop the global object. The result is in r0. | 2948 // Drop the global object. The result is in r0. |
2958 frame_->Drop(); | 2949 frame_->Drop(); |
2959 } | 2950 } |
2960 | 2951 |
2961 | 2952 |
2962 void CodeGenerator::VisitSlot(Slot* node) { | 2953 void CodeGenerator::VisitSlot(Slot* node) { |
2963 #ifdef DEBUG | 2954 #ifdef DEBUG |
2964 int original_height = frame_->height(); | 2955 int original_height = frame_->height(); |
2965 #endif | 2956 #endif |
2966 Comment cmnt(masm_, "[ Slot"); | 2957 Comment cmnt(masm_, "[ Slot"); |
(...skipping 1961 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4928 } | 4919 } |
4929 | 4920 |
4930 default: | 4921 default: |
4931 UNREACHABLE(); | 4922 UNREACHABLE(); |
4932 } | 4923 } |
4933 ASSERT((has_cc() && frame_->height() == original_height) || | 4924 ASSERT((has_cc() && frame_->height() == original_height) || |
4934 (!has_cc() && frame_->height() == original_height + 1)); | 4925 (!has_cc() && frame_->height() == original_height + 1)); |
4935 } | 4926 } |
4936 | 4927 |
4937 | 4928 |
| 4929 class DeferredReferenceGetNamedValue: public DeferredCode { |
| 4930 public: |
| 4931 explicit DeferredReferenceGetNamedValue(Handle<String> name) : name_(name) { |
| 4932 set_comment("[ DeferredReferenceGetNamedValue"); |
| 4933 } |
| 4934 |
| 4935 virtual void Generate(); |
| 4936 |
| 4937 private: |
| 4938 Handle<String> name_; |
| 4939 }; |
| 4940 |
| 4941 |
| 4942 void DeferredReferenceGetNamedValue::Generate() { |
| 4943 __ IncrementCounter(&Counters::named_load_inline_miss, 1, r1, r2); |
| 4944 // Setup the name register and call load IC. |
| 4945 __ mov(r2, Operand(name_)); |
| 4946 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); |
| 4947 __ Call(ic, RelocInfo::CODE_TARGET); |
| 4948 // The call must be followed by a b instruction to indicate that the inobject |
| 4949 // property case was inlined. Jumping back from the deferred code ensures |
| 4950 // that. |
| 4951 } |
| 4952 |
| 4953 |
| 4954 void CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) { |
| 4955 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) { |
| 4956 Comment cmnt(masm(), "[ Load from named Property"); |
| 4957 // Setup the name register and call load IC. |
| 4958 __ mov(r2, Operand(name)); |
| 4959 frame_->CallLoadIC(is_contextual |
| 4960 ? RelocInfo::CODE_TARGET_CONTEXT |
| 4961 : RelocInfo::CODE_TARGET); |
| 4962 } else { |
| 4963 // Inline the inobject property case. |
| 4964 Comment cmnt(masm(), "[ Inlined named property load"); |
| 4965 |
| 4966 DeferredReferenceGetNamedValue* deferred = |
| 4967 new DeferredReferenceGetNamedValue(name); |
| 4968 |
| 4969 // The following instructions are the inlined load of an in-object property. |
| 4970 // Parts of this code is patched, so the exact instructions generated needs |
| 4971 // to be fixed. Therefore the instruction pool is blocked when generating |
| 4972 // this code |
| 4973 #ifdef DEBUG |
| 4974 int kInlinedNamedLoadInstructions = 8; |
| 4975 Label check_inlined_codesize; |
| 4976 masm_->bind(&check_inlined_codesize); |
| 4977 #endif |
| 4978 { Assembler::BlockConstPoolScope block_const_pool(masm_); |
| 4979 // Load the receiver from the stack. |
| 4980 __ ldr(r1, MemOperand(sp, 0)); |
| 4981 |
| 4982 // Check that the receiver is a heap object. |
| 4983 __ tst(r1, Operand(kSmiTagMask)); |
| 4984 deferred->Branch(eq); |
| 4985 |
| 4986 // Check the map. The null map used below is patched by the inline cache |
| 4987 // code. |
| 4988 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); |
| 4989 __ mov(r3, Operand(Factory::null_value())); |
| 4990 __ cmp(r2, r3); |
| 4991 deferred->Branch(ne); |
| 4992 |
| 4993 // Use initially use an invalid index. The index will be patched by the |
| 4994 // inline cache code. |
| 4995 __ ldr(r0, MemOperand(r1, 0)); |
| 4996 } |
| 4997 |
| 4998 // Make sure that the expected number of instructions are generated. |
| 4999 ASSERT_EQ(kInlinedNamedLoadInstructions, |
| 5000 masm_->InstructionsGeneratedSince(&check_inlined_codesize)); |
| 5001 |
| 5002 __ IncrementCounter(&Counters::named_load_inline, 1, r1, r2); |
| 5003 deferred->BindExit(); |
| 5004 } |
| 5005 } |
| 5006 |
| 5007 |
4938 void CodeGenerator::EmitKeyedLoad(bool is_global) { | 5008 void CodeGenerator::EmitKeyedLoad(bool is_global) { |
4939 Comment cmnt(masm_, "[ Load from keyed Property"); | 5009 Comment cmnt(masm_, "[ Load from keyed Property"); |
4940 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); | 5010 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); |
4941 RelocInfo::Mode rmode = is_global | 5011 RelocInfo::Mode rmode = is_global |
4942 ? RelocInfo::CODE_TARGET_CONTEXT | 5012 ? RelocInfo::CODE_TARGET_CONTEXT |
4943 : RelocInfo::CODE_TARGET; | 5013 : RelocInfo::CODE_TARGET; |
4944 frame_->CallCodeObject(ic, rmode, 0); | 5014 frame_->CallCodeObject(ic, rmode, 0); |
4945 } | 5015 } |
4946 | 5016 |
4947 | 5017 |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4984 switch (type_) { | 5054 switch (type_) { |
4985 case SLOT: { | 5055 case SLOT: { |
4986 Comment cmnt(masm, "[ Load from Slot"); | 5056 Comment cmnt(masm, "[ Load from Slot"); |
4987 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot(); | 5057 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot(); |
4988 ASSERT(slot != NULL); | 5058 ASSERT(slot != NULL); |
4989 cgen_->LoadFromSlot(slot, NOT_INSIDE_TYPEOF); | 5059 cgen_->LoadFromSlot(slot, NOT_INSIDE_TYPEOF); |
4990 break; | 5060 break; |
4991 } | 5061 } |
4992 | 5062 |
4993 case NAMED: { | 5063 case NAMED: { |
4994 VirtualFrame* frame = cgen_->frame(); | |
4995 Comment cmnt(masm, "[ Load from named Property"); | |
4996 Handle<String> name(GetName()); | |
4997 Variable* var = expression_->AsVariableProxy()->AsVariable(); | 5064 Variable* var = expression_->AsVariableProxy()->AsVariable(); |
4998 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); | 5065 bool is_global = var != NULL; |
4999 // Setup the name register. | 5066 ASSERT(!is_global || var->is_global()); |
5000 __ mov(r2, Operand(name)); | 5067 cgen_->EmitNamedLoad(GetName(), is_global); |
5001 ASSERT(var == NULL || var->is_global()); | 5068 cgen_->frame()->EmitPush(r0); |
5002 RelocInfo::Mode rmode = (var == NULL) | |
5003 ? RelocInfo::CODE_TARGET | |
5004 : RelocInfo::CODE_TARGET_CONTEXT; | |
5005 frame->CallCodeObject(ic, rmode, 0); | |
5006 frame->EmitPush(r0); | |
5007 break; | 5069 break; |
5008 } | 5070 } |
5009 | 5071 |
5010 case KEYED: { | 5072 case KEYED: { |
5011 // TODO(181): Implement inlined version of array indexing once | 5073 // TODO(181): Implement inlined version of array indexing once |
5012 // loop nesting is properly tracked on ARM. | 5074 // loop nesting is properly tracked on ARM. |
5013 ASSERT(property != NULL); | 5075 ASSERT(property != NULL); |
5014 Variable* var = expression_->AsVariableProxy()->AsVariable(); | 5076 Variable* var = expression_->AsVariableProxy()->AsVariable(); |
5015 ASSERT(var == NULL || var->is_global()); | 5077 ASSERT(var == NULL || var->is_global()); |
5016 cgen_->EmitKeyedLoad(var != NULL); | 5078 cgen_->EmitKeyedLoad(var != NULL); |
(...skipping 3938 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
8955 | 9017 |
8956 // Just jump to runtime to add the two strings. | 9018 // Just jump to runtime to add the two strings. |
8957 __ bind(&string_add_runtime); | 9019 __ bind(&string_add_runtime); |
8958 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); | 9020 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); |
8959 } | 9021 } |
8960 | 9022 |
8961 | 9023 |
8962 #undef __ | 9024 #undef __ |
8963 | 9025 |
8964 } } // namespace v8::internal | 9026 } } // namespace v8::internal |
OLD | NEW |