Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 9126 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 9137 } | 9137 } |
| 9138 #endif | 9138 #endif |
| 9139 | 9139 |
| 9140 | 9140 |
| 9141 // Emit a LoadIC call to get the value from receiver and leave it in | 9141 // Emit a LoadIC call to get the value from receiver and leave it in |
| 9142 // dst. | 9142 // dst. |
| 9143 class DeferredReferenceGetNamedValue: public DeferredCode { | 9143 class DeferredReferenceGetNamedValue: public DeferredCode { |
| 9144 public: | 9144 public: |
| 9145 DeferredReferenceGetNamedValue(Register dst, | 9145 DeferredReferenceGetNamedValue(Register dst, |
| 9146 Register receiver, | 9146 Register receiver, |
| 9147 Handle<String> name) | 9147 Handle<String> name, |
| 9148 : dst_(dst), receiver_(receiver), name_(name) { | 9148 bool is_contextual) |
| 9149 set_comment("[ DeferredReferenceGetNamedValue"); | 9149 : dst_(dst), |
| 9150 receiver_(receiver), | |
| 9151 name_(name), | |
| 9152 is_contextual_(is_contextual) { | |
| 9153 set_comment(is_contextual | |
| 9154 ? "[ DeferredReferenceGetNamedValue (contextual)" | |
| 9155 : "[ DeferredReferenceGetNamedValue"); | |
| 9150 } | 9156 } |
| 9151 | 9157 |
| 9152 virtual void Generate(); | 9158 virtual void Generate(); |
| 9153 | 9159 |
| 9154 Label* patch_site() { return &patch_site_; } | 9160 Label* patch_site() { return &patch_site_; } |
| 9155 | 9161 |
| 9156 private: | 9162 private: |
| 9157 Label patch_site_; | 9163 Label patch_site_; |
| 9158 Register dst_; | 9164 Register dst_; |
| 9159 Register receiver_; | 9165 Register receiver_; |
| 9160 Handle<String> name_; | 9166 Handle<String> name_; |
| 9167 bool is_contextual_; | |
| 9161 }; | 9168 }; |
| 9162 | 9169 |
| 9163 | 9170 |
| 9164 void DeferredReferenceGetNamedValue::Generate() { | 9171 void DeferredReferenceGetNamedValue::Generate() { |
| 9165 if (!receiver_.is(eax)) { | 9172 if (!receiver_.is(eax)) { |
| 9166 __ mov(eax, receiver_); | 9173 __ mov(eax, receiver_); |
| 9167 } | 9174 } |
| 9168 __ Set(ecx, Immediate(name_)); | 9175 __ Set(ecx, Immediate(name_)); |
| 9169 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); | 9176 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); |
| 9170 __ call(ic, RelocInfo::CODE_TARGET); | 9177 RelocInfo::Mode mode = is_contextual_ |
| 9171 // The call must be followed by a test eax instruction to indicate | 9178 ? RelocInfo::CODE_TARGET_CONTEXT |
| 9172 // that the inobject property case was inlined. | 9179 : RelocInfo::CODE_TARGET; |
| 9180 __ call(ic, mode); | |
| 9181 // The call must be followed by: | |
| 9182 // - a test eax instruction to indicate that the inobject property | |
| 9183 // case was inlined. | |
| 9184 // - a mov ecx instruction to indicate that the contextual property | |
| 9185 // load was inlined. | |
| 9173 // | 9186 // |
| 9174 // Store the delta to the map check instruction here in the test | 9187 // Store the delta to the map check instruction here in the test |
| 9175 // instruction. Use masm_-> instead of the __ macro since the | 9188 // instruction. Use masm_-> instead of the __ macro since the |
| 9176 // latter can't return a value. | 9189 // latter can't return a value. |
| 9177 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site()); | 9190 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site()); |
| 9178 // Here we use masm_-> instead of the __ macro because this is the | 9191 // Here we use masm_-> instead of the __ macro because this is the |
| 9179 // instruction that gets patched and coverage code gets in the way. | 9192 // instruction that gets patched and coverage code gets in the way. |
| 9180 masm_->test(eax, Immediate(-delta_to_patch_site)); | 9193 if (is_contextual_) { |
| 9181 __ IncrementCounter(&Counters::named_load_inline_miss, 1); | 9194 masm_->mov(ecx, -delta_to_patch_site); |
| 9195 __ IncrementCounter(&Counters::named_load_global_inline_miss, 1); | |
| 9196 } else { | |
| 9197 masm_->test(eax, Immediate(-delta_to_patch_site)); | |
| 9198 __ IncrementCounter(&Counters::named_load_inline_miss, 1); | |
| 9199 } | |
| 9182 | 9200 |
| 9183 if (!dst_.is(eax)) __ mov(dst_, eax); | 9201 if (!dst_.is(eax)) __ mov(dst_, eax); |
| 9184 } | 9202 } |
| 9185 | 9203 |
| 9186 | 9204 |
| 9187 class DeferredReferenceGetKeyedValue: public DeferredCode { | 9205 class DeferredReferenceGetKeyedValue: public DeferredCode { |
| 9188 public: | 9206 public: |
| 9189 explicit DeferredReferenceGetKeyedValue(Register dst, | 9207 explicit DeferredReferenceGetKeyedValue(Register dst, |
| 9190 Register receiver, | 9208 Register receiver, |
| 9191 Register key) | 9209 Register key) |
| (...skipping 150 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 9342 masm_->test(eax, Immediate(-delta_to_patch_site)); | 9360 masm_->test(eax, Immediate(-delta_to_patch_site)); |
| 9343 // Restore value (returned from store IC) register. | 9361 // Restore value (returned from store IC) register. |
| 9344 if (!old_value.is(eax)) __ mov(old_value, eax); | 9362 if (!old_value.is(eax)) __ mov(old_value, eax); |
| 9345 } | 9363 } |
| 9346 | 9364 |
| 9347 | 9365 |
| 9348 Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) { | 9366 Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) { |
| 9349 #ifdef DEBUG | 9367 #ifdef DEBUG |
| 9350 int original_height = frame()->height(); | 9368 int original_height = frame()->height(); |
| 9351 #endif | 9369 #endif |
| 9370 | |
| 9371 bool contextual_load_in_builtin = | |
| 9372 is_contextual && | |
| 9373 (Bootstrapper::IsActive() || | |
| 9374 (!info_->closure().is_null() && info_->closure()->IsBuiltin())); | |
| 9375 | |
| 9352 Result result; | 9376 Result result; |
| 9353 // Do not inline the inobject property case for loads from the global | 9377 // Do not inline in the global code or when not in loop. |
| 9354 // object. Also do not inline for unoptimized code. This saves time in | 9378 if (scope()->is_global_scope() || |
| 9355 // the code generator. Unoptimized code is toplevel code or code that is | 9379 loop_nesting() == 0 || |
| 9356 // not in a loop. | 9380 contextual_load_in_builtin) { |
| 9357 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) { | |
| 9358 Comment cmnt(masm(), "[ Load from named Property"); | 9381 Comment cmnt(masm(), "[ Load from named Property"); |
| 9359 frame()->Push(name); | 9382 frame()->Push(name); |
| 9360 | 9383 |
| 9361 RelocInfo::Mode mode = is_contextual | 9384 RelocInfo::Mode mode = is_contextual |
| 9362 ? RelocInfo::CODE_TARGET_CONTEXT | 9385 ? RelocInfo::CODE_TARGET_CONTEXT |
| 9363 : RelocInfo::CODE_TARGET; | 9386 : RelocInfo::CODE_TARGET; |
| 9364 result = frame()->CallLoadIC(mode); | 9387 result = frame()->CallLoadIC(mode); |
| 9365 // A test eax instruction following the call signals that the inobject | 9388 // A test eax instruction following the call signals that the inobject |
| 9366 // property case was inlined. Ensure that there is not a test eax | 9389 // property case was inlined. Ensure that there is not a test eax |
| 9367 // instruction here. | 9390 // instruction here. |
| 9368 __ nop(); | 9391 __ nop(); |
| 9369 } else { | 9392 } else { |
| 9370 // Inline the inobject property case. | 9393 // Inline the property load. |
| 9371 Comment cmnt(masm(), "[ Inlined named property load"); | 9394 Comment cmnt(masm(), is_contextual |
| 9395 ? "[ Inlined contextual property load" | |
| 9396 : "[ Inlined named property load"); | |
| 9372 Result receiver = frame()->Pop(); | 9397 Result receiver = frame()->Pop(); |
| 9373 receiver.ToRegister(); | 9398 receiver.ToRegister(); |
| 9374 | 9399 |
| 9375 result = allocator()->Allocate(); | 9400 result = allocator()->Allocate(); |
| 9376 ASSERT(result.is_valid()); | 9401 ASSERT(result.is_valid()); |
| 9377 DeferredReferenceGetNamedValue* deferred = | 9402 DeferredReferenceGetNamedValue* deferred = |
| 9378 new DeferredReferenceGetNamedValue(result.reg(), receiver.reg(), name); | 9403 new DeferredReferenceGetNamedValue(result.reg(), |
| 9404 receiver.reg(), | |
| 9405 name, | |
| 9406 is_contextual); | |
| 9379 | 9407 |
| 9380 // Check that the receiver is a heap object. | 9408 if (!is_contextual) { |
| 9381 __ test(receiver.reg(), Immediate(kSmiTagMask)); | 9409 // Check that the receiver is a heap object. |
| 9382 deferred->Branch(zero); | 9410 __ test(receiver.reg(), Immediate(kSmiTagMask)); |
| 9411 deferred->Branch(zero); | |
| 9412 } | |
| 9383 | 9413 |
| 9384 __ bind(deferred->patch_site()); | 9414 __ bind(deferred->patch_site()); |
| 9385 // This is the map check instruction that will be patched (so we can't | 9415 // This is the map check instruction that will be patched (so we can't |
| 9386 // use the double underscore macro that may insert instructions). | 9416 // use the double underscore macro that may insert instructions). |
| 9387 // Initially use an invalid map to force a failure. | 9417 // Initially use an invalid map to force a failure. |
| 9388 masm()->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset), | 9418 masm()->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset), |
| 9389 Immediate(Factory::null_value())); | 9419 Immediate(Factory::null_value())); |
| 9390 // This branch is always a forwards branch so it's always a fixed size | 9420 // This branch is always a forwards branch so it's always a fixed size |
| 9391 // which allows the assert below to succeed and patching to work. | 9421 // which allows the assert below to succeed and patching to work. |
| 9392 deferred->Branch(not_equal); | 9422 deferred->Branch(not_equal); |
| 9393 | 9423 |
| 9394 // The delta from the patch label to the load offset must be statically | 9424 // The delta from the patch label to the actual load must be |
| 9395 // known. | 9425 // statically known. |
| 9396 ASSERT(masm()->SizeOfCodeGeneratedSince(deferred->patch_site()) == | 9426 ASSERT(masm()->SizeOfCodeGeneratedSince(deferred->patch_site()) == |
| 9397 LoadIC::kOffsetToLoadInstruction); | 9427 LoadIC::kOffsetToLoadInstruction); |
| 9398 // The initial (invalid) offset has to be large enough to force a 32-bit | |
| 9399 // instruction encoding to allow patching with an arbitrary offset. Use | |
| 9400 // kMaxInt (minus kHeapObjectTag). | |
| 9401 int offset = kMaxInt; | |
| 9402 masm()->mov(result.reg(), FieldOperand(receiver.reg(), offset)); | |
| 9403 | 9428 |
| 9404 __ IncrementCounter(&Counters::named_load_inline, 1); | 9429 if (is_contextual) { |
| 9430 // Load the (initialy invalid) cell and get its value. | |
| 9431 masm()->mov(result.reg(), Factory::null_value()); | |
|
Søren Thygesen Gjesse
2010/09/20 06:50:08
How about having a --debug-code guarded assert her
| |
| 9432 __ mov(result.reg(), | |
| 9433 FieldOperand(result.reg(), JSGlobalPropertyCell::kValueOffset)); | |
| 9434 __ cmp(result.reg(), Factory::the_hole_value()); | |
| 9435 deferred->Branch(equal); | |
| 9436 __ IncrementCounter(&Counters::named_load_global_inline, 1); | |
| 9437 } else { | |
| 9438 // The initial (invalid) offset has to be large enough to force a 32-bit | |
| 9439 // instruction encoding to allow patching with an arbitrary offset. Use | |
| 9440 // kMaxInt (minus kHeapObjectTag). | |
| 9441 int offset = kMaxInt; | |
| 9442 masm()->mov(result.reg(), FieldOperand(receiver.reg(), offset)); | |
| 9443 __ IncrementCounter(&Counters::named_load_inline, 1); | |
| 9444 } | |
| 9445 | |
| 9405 deferred->BindExit(); | 9446 deferred->BindExit(); |
| 9406 } | 9447 } |
| 9407 ASSERT(frame()->height() == original_height - 1); | 9448 ASSERT(frame()->height() == original_height - 1); |
| 9408 return result; | 9449 return result; |
| 9409 } | 9450 } |
| 9410 | 9451 |
| 9411 | 9452 |
| 9412 Result CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) { | 9453 Result CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) { |
| 9413 #ifdef DEBUG | 9454 #ifdef DEBUG |
| 9414 int expected_height = frame()->height() - (is_contextual ? 1 : 2); | 9455 int expected_height = frame()->height() - (is_contextual ? 1 : 2); |
| (...skipping 640 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 10055 masm.GetCode(&desc); | 10096 masm.GetCode(&desc); |
| 10056 // Call the function from C++. | 10097 // Call the function from C++. |
| 10057 return FUNCTION_CAST<MemCopyFunction>(buffer); | 10098 return FUNCTION_CAST<MemCopyFunction>(buffer); |
| 10058 } | 10099 } |
| 10059 | 10100 |
| 10060 #undef __ | 10101 #undef __ |
| 10061 | 10102 |
| 10062 } } // namespace v8::internal | 10103 } } // namespace v8::internal |
| 10063 | 10104 |
| 10064 #endif // V8_TARGET_ARCH_IA32 | 10105 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |