| OLD | NEW |
| 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 21 matching lines...) Expand all Loading... |
| 32 #include "ic-inl.h" | 32 #include "ic-inl.h" |
| 33 #include "codegen-inl.h" | 33 #include "codegen-inl.h" |
| 34 #include "stub-cache.h" | 34 #include "stub-cache.h" |
| 35 | 35 |
| 36 namespace v8 { | 36 namespace v8 { |
| 37 namespace internal { | 37 namespace internal { |
| 38 | 38 |
| 39 #define __ ACCESS_MASM(masm) | 39 #define __ ACCESS_MASM(masm) |
| 40 | 40 |
| 41 | 41 |
| 42 static void ProbeTable(MacroAssembler* masm, | 42 static void ProbeTable(Isolate* isolate, |
| 43 MacroAssembler* masm, |
| 43 Code::Flags flags, | 44 Code::Flags flags, |
| 44 StubCache::Table table, | 45 StubCache::Table table, |
| 45 Register name, | 46 Register name, |
| 46 Register offset, | 47 Register offset, |
| 47 Register scratch, | 48 Register scratch, |
| 48 Register scratch2) { | 49 Register scratch2) { |
| 49 ExternalReference key_offset(SCTableReference::keyReference(table)); | 50 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); |
| 50 ExternalReference value_offset(SCTableReference::valueReference(table)); | 51 ExternalReference value_offset(isolate->stub_cache()->value_reference(table)); |
| 51 | 52 |
| 52 uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address()); | 53 uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address()); |
| 53 uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address()); | 54 uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address()); |
| 54 | 55 |
| 55 // Check the relative positions of the address fields. | 56 // Check the relative positions of the address fields. |
| 56 ASSERT(value_off_addr > key_off_addr); | 57 ASSERT(value_off_addr > key_off_addr); |
| 57 ASSERT((value_off_addr - key_off_addr) % 4 == 0); | 58 ASSERT((value_off_addr - key_off_addr) % 4 == 0); |
| 58 ASSERT((value_off_addr - key_off_addr) < (256 * 4)); | 59 ASSERT((value_off_addr - key_off_addr) < (256 * 4)); |
| 59 | 60 |
| 60 Label miss; | 61 Label miss; |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 94 // must always call a backup property check that is complete. | 95 // must always call a backup property check that is complete. |
| 95 // This function is safe to call if the receiver has fast properties. | 96 // This function is safe to call if the receiver has fast properties. |
| 96 // Name must be a symbol and receiver must be a heap object. | 97 // Name must be a symbol and receiver must be a heap object. |
| 97 static void GenerateDictionaryNegativeLookup(MacroAssembler* masm, | 98 static void GenerateDictionaryNegativeLookup(MacroAssembler* masm, |
| 98 Label* miss_label, | 99 Label* miss_label, |
| 99 Register receiver, | 100 Register receiver, |
| 100 String* name, | 101 String* name, |
| 101 Register scratch0, | 102 Register scratch0, |
| 102 Register scratch1) { | 103 Register scratch1) { |
| 103 ASSERT(name->IsSymbol()); | 104 ASSERT(name->IsSymbol()); |
| 104 __ IncrementCounter(&Counters::negative_lookups, 1, scratch0, scratch1); | 105 __ IncrementCounter(COUNTERS->negative_lookups(), 1, scratch0, scratch1); |
| 105 __ IncrementCounter(&Counters::negative_lookups_miss, 1, scratch0, scratch1); | 106 __ IncrementCounter(COUNTERS->negative_lookups_miss(), 1, scratch0, scratch1); |
| 106 | 107 |
| 107 Label done; | 108 Label done; |
| 108 | 109 |
| 109 const int kInterceptorOrAccessCheckNeededMask = | 110 const int kInterceptorOrAccessCheckNeededMask = |
| 110 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); | 111 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); |
| 111 | 112 |
| 112 // Bail out if the receiver has a named interceptor or requires access checks. | 113 // Bail out if the receiver has a named interceptor or requires access checks. |
| 113 Register map = scratch1; | 114 Register map = scratch1; |
| 114 __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 115 __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
| 115 __ ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset)); | 116 __ ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset)); |
| (...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 191 | 192 |
| 192 // Restore the properties. | 193 // Restore the properties. |
| 193 __ ldr(properties, | 194 __ ldr(properties, |
| 194 FieldMemOperand(receiver, JSObject::kPropertiesOffset)); | 195 FieldMemOperand(receiver, JSObject::kPropertiesOffset)); |
| 195 } else { | 196 } else { |
| 196 // Give up probing if still not found the undefined value. | 197 // Give up probing if still not found the undefined value. |
| 197 __ b(ne, miss_label); | 198 __ b(ne, miss_label); |
| 198 } | 199 } |
| 199 } | 200 } |
| 200 __ bind(&done); | 201 __ bind(&done); |
| 201 __ DecrementCounter(&Counters::negative_lookups_miss, 1, scratch0, scratch1); | 202 __ DecrementCounter(COUNTERS->negative_lookups_miss(), 1, scratch0, scratch1); |
| 202 } | 203 } |
| 203 | 204 |
| 204 | 205 |
| 205 void StubCache::GenerateProbe(MacroAssembler* masm, | 206 void StubCache::GenerateProbe(MacroAssembler* masm, |
| 206 Code::Flags flags, | 207 Code::Flags flags, |
| 207 Register receiver, | 208 Register receiver, |
| 208 Register name, | 209 Register name, |
| 209 Register scratch, | 210 Register scratch, |
| 210 Register extra, | 211 Register extra, |
| 211 Register extra2) { | 212 Register extra2) { |
| 213 Isolate* isolate = Isolate::Current(); |
| 212 Label miss; | 214 Label miss; |
| 213 | 215 |
| 214 // Make sure that code is valid. The shifting code relies on the | 216 // Make sure that code is valid. The shifting code relies on the |
| 215 // entry size being 8. | 217 // entry size being 8. |
| 216 ASSERT(sizeof(Entry) == 8); | 218 ASSERT(sizeof(Entry) == 8); |
| 217 | 219 |
| 218 // Make sure the flags does not name a specific type. | 220 // Make sure the flags does not name a specific type. |
| 219 ASSERT(Code::ExtractTypeFromFlags(flags) == 0); | 221 ASSERT(Code::ExtractTypeFromFlags(flags) == 0); |
| 220 | 222 |
| 221 // Make sure that there are no register conflicts. | 223 // Make sure that there are no register conflicts. |
| (...skipping 19 matching lines...) Expand all Loading... |
| 241 // Get the map of the receiver and compute the hash. | 243 // Get the map of the receiver and compute the hash. |
| 242 __ ldr(scratch, FieldMemOperand(name, String::kHashFieldOffset)); | 244 __ ldr(scratch, FieldMemOperand(name, String::kHashFieldOffset)); |
| 243 __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 245 __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
| 244 __ add(scratch, scratch, Operand(ip)); | 246 __ add(scratch, scratch, Operand(ip)); |
| 245 __ eor(scratch, scratch, Operand(flags)); | 247 __ eor(scratch, scratch, Operand(flags)); |
| 246 __ and_(scratch, | 248 __ and_(scratch, |
| 247 scratch, | 249 scratch, |
| 248 Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize)); | 250 Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize)); |
| 249 | 251 |
| 250 // Probe the primary table. | 252 // Probe the primary table. |
| 251 ProbeTable(masm, flags, kPrimary, name, scratch, extra, extra2); | 253 ProbeTable(isolate, masm, flags, kPrimary, name, scratch, extra, extra2); |
| 252 | 254 |
| 253 // Primary miss: Compute hash for secondary probe. | 255 // Primary miss: Compute hash for secondary probe. |
| 254 __ sub(scratch, scratch, Operand(name)); | 256 __ sub(scratch, scratch, Operand(name)); |
| 255 __ add(scratch, scratch, Operand(flags)); | 257 __ add(scratch, scratch, Operand(flags)); |
| 256 __ and_(scratch, | 258 __ and_(scratch, |
| 257 scratch, | 259 scratch, |
| 258 Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize)); | 260 Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize)); |
| 259 | 261 |
| 260 // Probe the secondary table. | 262 // Probe the secondary table. |
| 261 ProbeTable(masm, flags, kSecondary, name, scratch, extra, extra2); | 263 ProbeTable(isolate, masm, flags, kSecondary, name, scratch, extra, extra2); |
| 262 | 264 |
| 263 // Cache miss: Fall-through and let caller handle the miss by | 265 // Cache miss: Fall-through and let caller handle the miss by |
| 264 // entering the runtime system. | 266 // entering the runtime system. |
| 265 __ bind(&miss); | 267 __ bind(&miss); |
| 266 } | 268 } |
| 267 | 269 |
| 268 | 270 |
| 269 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm, | 271 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm, |
| 270 int index, | 272 int index, |
| 271 Register prototype) { | 273 Register prototype) { |
| 272 // Load the global or builtins object from the current context. | 274 // Load the global or builtins object from the current context. |
| 273 __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); | 275 __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); |
| 274 // Load the global context from the global or builtins object. | 276 // Load the global context from the global or builtins object. |
| 275 __ ldr(prototype, | 277 __ ldr(prototype, |
| 276 FieldMemOperand(prototype, GlobalObject::kGlobalContextOffset)); | 278 FieldMemOperand(prototype, GlobalObject::kGlobalContextOffset)); |
| 277 // Load the function from the global context. | 279 // Load the function from the global context. |
| 278 __ ldr(prototype, MemOperand(prototype, Context::SlotOffset(index))); | 280 __ ldr(prototype, MemOperand(prototype, Context::SlotOffset(index))); |
| 279 // Load the initial map. The global functions all have initial maps. | 281 // Load the initial map. The global functions all have initial maps. |
| 280 __ ldr(prototype, | 282 __ ldr(prototype, |
| 281 FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset)); | 283 FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset)); |
| 282 // Load the prototype from the initial map. | 284 // Load the prototype from the initial map. |
| 283 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset)); | 285 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset)); |
| 284 } | 286 } |
| 285 | 287 |
| 286 | 288 |
| 287 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype( | 289 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype( |
| 288 MacroAssembler* masm, int index, Register prototype, Label* miss) { | 290 MacroAssembler* masm, int index, Register prototype, Label* miss) { |
| 289 // Check we're still in the same context. | 291 // Check we're still in the same context. |
| 290 __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); | 292 __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); |
| 291 __ Move(ip, Top::global()); | 293 __ Move(ip, Isolate::Current()->global()); |
| 292 __ cmp(prototype, ip); | 294 __ cmp(prototype, ip); |
| 293 __ b(ne, miss); | 295 __ b(ne, miss); |
| 294 // Get the global function with the given index. | 296 // Get the global function with the given index. |
| 295 JSFunction* function = JSFunction::cast(Top::global_context()->get(index)); | 297 JSFunction* function = JSFunction::cast( |
| 298 Isolate::Current()->global_context()->get(index)); |
| 296 // Load its initial map. The global functions all have initial maps. | 299 // Load its initial map. The global functions all have initial maps. |
| 297 __ Move(prototype, Handle<Map>(function->initial_map())); | 300 __ Move(prototype, Handle<Map>(function->initial_map())); |
| 298 // Load the prototype from the initial map. | 301 // Load the prototype from the initial map. |
| 299 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset)); | 302 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset)); |
| 300 } | 303 } |
| 301 | 304 |
| 302 | 305 |
| 303 // Load a fast property out of a holder object (src). In-object properties | 306 // Load a fast property out of a holder object (src). In-object properties |
| 304 // are loaded directly otherwise the property is loaded from the properties | 307 // are loaded directly otherwise the property is loaded from the properties |
| 305 // fixed array. | 308 // fixed array. |
| (...skipping 196 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 502 // Return the value (register r0). | 505 // Return the value (register r0). |
| 503 __ bind(&exit); | 506 __ bind(&exit); |
| 504 __ Ret(); | 507 __ Ret(); |
| 505 } | 508 } |
| 506 | 509 |
| 507 | 510 |
| 508 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) { | 511 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) { |
| 509 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC); | 512 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC); |
| 510 Code* code = NULL; | 513 Code* code = NULL; |
| 511 if (kind == Code::LOAD_IC) { | 514 if (kind == Code::LOAD_IC) { |
| 512 code = Builtins::builtin(Builtins::LoadIC_Miss); | 515 code = Isolate::Current()->builtins()->builtin(Builtins::LoadIC_Miss); |
| 513 } else { | 516 } else { |
| 514 code = Builtins::builtin(Builtins::KeyedLoadIC_Miss); | 517 code = Isolate::Current()->builtins()->builtin(Builtins::KeyedLoadIC_Miss); |
| 515 } | 518 } |
| 516 | 519 |
| 517 Handle<Code> ic(code); | 520 Handle<Code> ic(code); |
| 518 __ Jump(ic, RelocInfo::CODE_TARGET); | 521 __ Jump(ic, RelocInfo::CODE_TARGET); |
| 519 } | 522 } |
| 520 | 523 |
| 521 | 524 |
| 522 static void GenerateCallFunction(MacroAssembler* masm, | 525 static void GenerateCallFunction(MacroAssembler* masm, |
| 523 Object* object, | 526 Object* object, |
| 524 const ParameterCount& arguments, | 527 const ParameterCount& arguments, |
| (...skipping 20 matching lines...) Expand all Loading... |
| 545 } | 548 } |
| 546 | 549 |
| 547 | 550 |
| 548 static void PushInterceptorArguments(MacroAssembler* masm, | 551 static void PushInterceptorArguments(MacroAssembler* masm, |
| 549 Register receiver, | 552 Register receiver, |
| 550 Register holder, | 553 Register holder, |
| 551 Register name, | 554 Register name, |
| 552 JSObject* holder_obj) { | 555 JSObject* holder_obj) { |
| 553 __ push(name); | 556 __ push(name); |
| 554 InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor(); | 557 InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor(); |
| 555 ASSERT(!Heap::InNewSpace(interceptor)); | 558 ASSERT(!HEAP->InNewSpace(interceptor)); |
| 556 Register scratch = name; | 559 Register scratch = name; |
| 557 __ mov(scratch, Operand(Handle<Object>(interceptor))); | 560 __ mov(scratch, Operand(Handle<Object>(interceptor))); |
| 558 __ push(scratch); | 561 __ push(scratch); |
| 559 __ push(receiver); | 562 __ push(receiver); |
| 560 __ push(holder); | 563 __ push(holder); |
| 561 __ ldr(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset)); | 564 __ ldr(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset)); |
| 562 __ push(scratch); | 565 __ push(scratch); |
| 563 } | 566 } |
| 564 | 567 |
| 565 | 568 |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 613 // -- sp[(argc + 4) * 4] : receiver | 616 // -- sp[(argc + 4) * 4] : receiver |
| 614 // ----------------------------------- | 617 // ----------------------------------- |
| 615 // Get the function and setup the context. | 618 // Get the function and setup the context. |
| 616 JSFunction* function = optimization.constant_function(); | 619 JSFunction* function = optimization.constant_function(); |
| 617 __ mov(r5, Operand(Handle<JSFunction>(function))); | 620 __ mov(r5, Operand(Handle<JSFunction>(function))); |
| 618 __ ldr(cp, FieldMemOperand(r5, JSFunction::kContextOffset)); | 621 __ ldr(cp, FieldMemOperand(r5, JSFunction::kContextOffset)); |
| 619 | 622 |
| 620 // Pass the additional arguments FastHandleApiCall expects. | 623 // Pass the additional arguments FastHandleApiCall expects. |
| 621 Object* call_data = optimization.api_call_info()->data(); | 624 Object* call_data = optimization.api_call_info()->data(); |
| 622 Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info()); | 625 Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info()); |
| 623 if (Heap::InNewSpace(call_data)) { | 626 if (HEAP->InNewSpace(call_data)) { |
| 624 __ Move(r0, api_call_info_handle); | 627 __ Move(r0, api_call_info_handle); |
| 625 __ ldr(r6, FieldMemOperand(r0, CallHandlerInfo::kDataOffset)); | 628 __ ldr(r6, FieldMemOperand(r0, CallHandlerInfo::kDataOffset)); |
| 626 } else { | 629 } else { |
| 627 __ Move(r6, Handle<Object>(call_data)); | 630 __ Move(r6, Handle<Object>(call_data)); |
| 628 } | 631 } |
| 629 // Store js function and call data. | 632 // Store js function and call data. |
| 630 __ stm(ib, sp, r5.bit() | r6.bit()); | 633 __ stm(ib, sp, r5.bit() | r6.bit()); |
| 631 | 634 |
| 632 // r2 points to call data as expected by Arguments | 635 // r2 points to call data as expected by Arguments |
| 633 // (refer to layout above). | 636 // (refer to layout above). |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 707 } else { | 710 } else { |
| 708 CompileRegular(masm, | 711 CompileRegular(masm, |
| 709 object, | 712 object, |
| 710 receiver, | 713 receiver, |
| 711 scratch1, | 714 scratch1, |
| 712 scratch2, | 715 scratch2, |
| 713 scratch3, | 716 scratch3, |
| 714 name, | 717 name, |
| 715 holder, | 718 holder, |
| 716 miss); | 719 miss); |
| 717 return Heap::undefined_value(); | 720 return HEAP->undefined_value(); |
| 718 } | 721 } |
| 719 } | 722 } |
| 720 | 723 |
| 721 private: | 724 private: |
| 722 MaybeObject* CompileCacheable(MacroAssembler* masm, | 725 MaybeObject* CompileCacheable(MacroAssembler* masm, |
| 723 JSObject* object, | 726 JSObject* object, |
| 724 Register receiver, | 727 Register receiver, |
| 725 Register scratch1, | 728 Register scratch1, |
| 726 Register scratch2, | 729 Register scratch2, |
| 727 Register scratch3, | 730 Register scratch3, |
| (...skipping 15 matching lines...) Expand all Loading... |
| 743 interceptor_holder); | 746 interceptor_holder); |
| 744 if (depth1 == kInvalidProtoDepth) { | 747 if (depth1 == kInvalidProtoDepth) { |
| 745 depth2 = | 748 depth2 = |
| 746 optimization.GetPrototypeDepthOfExpectedType(interceptor_holder, | 749 optimization.GetPrototypeDepthOfExpectedType(interceptor_holder, |
| 747 lookup->holder()); | 750 lookup->holder()); |
| 748 } | 751 } |
| 749 can_do_fast_api_call = (depth1 != kInvalidProtoDepth) || | 752 can_do_fast_api_call = (depth1 != kInvalidProtoDepth) || |
| 750 (depth2 != kInvalidProtoDepth); | 753 (depth2 != kInvalidProtoDepth); |
| 751 } | 754 } |
| 752 | 755 |
| 753 __ IncrementCounter(&Counters::call_const_interceptor, 1, | 756 __ IncrementCounter(COUNTERS->call_const_interceptor(), 1, |
| 754 scratch1, scratch2); | 757 scratch1, scratch2); |
| 755 | 758 |
| 756 if (can_do_fast_api_call) { | 759 if (can_do_fast_api_call) { |
| 757 __ IncrementCounter(&Counters::call_const_interceptor_fast_api, 1, | 760 __ IncrementCounter(COUNTERS->call_const_interceptor_fast_api(), 1, |
| 758 scratch1, scratch2); | 761 scratch1, scratch2); |
| 759 ReserveSpaceForFastApiCall(masm, scratch1); | 762 ReserveSpaceForFastApiCall(masm, scratch1); |
| 760 } | 763 } |
| 761 | 764 |
| 762 // Check that the maps from receiver to interceptor's holder | 765 // Check that the maps from receiver to interceptor's holder |
| 763 // haven't changed and thus we can invoke interceptor. | 766 // haven't changed and thus we can invoke interceptor. |
| 764 Label miss_cleanup; | 767 Label miss_cleanup; |
| 765 Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label; | 768 Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label; |
| 766 Register holder = | 769 Register holder = |
| 767 stub_compiler_->CheckPrototypes(object, receiver, | 770 stub_compiler_->CheckPrototypes(object, receiver, |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 808 FreeSpaceForFastApiCall(masm); | 811 FreeSpaceForFastApiCall(masm); |
| 809 __ b(miss_label); | 812 __ b(miss_label); |
| 810 } | 813 } |
| 811 | 814 |
| 812 // Invoke a regular function. | 815 // Invoke a regular function. |
| 813 __ bind(®ular_invoke); | 816 __ bind(®ular_invoke); |
| 814 if (can_do_fast_api_call) { | 817 if (can_do_fast_api_call) { |
| 815 FreeSpaceForFastApiCall(masm); | 818 FreeSpaceForFastApiCall(masm); |
| 816 } | 819 } |
| 817 | 820 |
| 818 return Heap::undefined_value(); | 821 return HEAP->undefined_value(); |
| 819 } | 822 } |
| 820 | 823 |
| 821 void CompileRegular(MacroAssembler* masm, | 824 void CompileRegular(MacroAssembler* masm, |
| 822 JSObject* object, | 825 JSObject* object, |
| 823 Register receiver, | 826 Register receiver, |
| 824 Register scratch1, | 827 Register scratch1, |
| 825 Register scratch2, | 828 Register scratch2, |
| 826 Register scratch3, | 829 Register scratch3, |
| 827 String* name, | 830 String* name, |
| 828 JSObject* interceptor_holder, | 831 JSObject* interceptor_holder, |
| (...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 939 // Convert and store int passed in register ival to IEEE 754 single precision | 942 // Convert and store int passed in register ival to IEEE 754 single precision |
| 940 // floating point value at memory location (dst + 4 * wordoffset) | 943 // floating point value at memory location (dst + 4 * wordoffset) |
| 941 // If VFP3 is available use it for conversion. | 944 // If VFP3 is available use it for conversion. |
| 942 static void StoreIntAsFloat(MacroAssembler* masm, | 945 static void StoreIntAsFloat(MacroAssembler* masm, |
| 943 Register dst, | 946 Register dst, |
| 944 Register wordoffset, | 947 Register wordoffset, |
| 945 Register ival, | 948 Register ival, |
| 946 Register fval, | 949 Register fval, |
| 947 Register scratch1, | 950 Register scratch1, |
| 948 Register scratch2) { | 951 Register scratch2) { |
| 949 if (CpuFeatures::IsSupported(VFP3)) { | 952 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) { |
| 950 CpuFeatures::Scope scope(VFP3); | 953 CpuFeatures::Scope scope(VFP3); |
| 951 __ vmov(s0, ival); | 954 __ vmov(s0, ival); |
| 952 __ add(scratch1, dst, Operand(wordoffset, LSL, 2)); | 955 __ add(scratch1, dst, Operand(wordoffset, LSL, 2)); |
| 953 __ vcvt_f32_s32(s0, s0); | 956 __ vcvt_f32_s32(s0, s0); |
| 954 __ vstr(s0, scratch1, 0); | 957 __ vstr(s0, scratch1, 0); |
| 955 } else { | 958 } else { |
| 956 Label not_special, done; | 959 Label not_special, done; |
| 957 // Move sign bit from source to destination. This works because the sign | 960 // Move sign bit from source to destination. This works because the sign |
| 958 // bit in the exponent word of the double has the same position and polarity | 961 // bit in the exponent word of the double has the same position and polarity |
| 959 // as the 2's complement sign bit in a Smi. | 962 // as the 2's complement sign bit in a Smi. |
| (...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1077 // Only global objects and objects that do not require access | 1080 // Only global objects and objects that do not require access |
| 1078 // checks are allowed in stubs. | 1081 // checks are allowed in stubs. |
| 1079 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded()); | 1082 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded()); |
| 1080 | 1083 |
| 1081 ASSERT(current->GetPrototype()->IsJSObject()); | 1084 ASSERT(current->GetPrototype()->IsJSObject()); |
| 1082 JSObject* prototype = JSObject::cast(current->GetPrototype()); | 1085 JSObject* prototype = JSObject::cast(current->GetPrototype()); |
| 1083 if (!current->HasFastProperties() && | 1086 if (!current->HasFastProperties() && |
| 1084 !current->IsJSGlobalObject() && | 1087 !current->IsJSGlobalObject() && |
| 1085 !current->IsJSGlobalProxy()) { | 1088 !current->IsJSGlobalProxy()) { |
| 1086 if (!name->IsSymbol()) { | 1089 if (!name->IsSymbol()) { |
| 1087 MaybeObject* maybe_lookup_result = Heap::LookupSymbol(name); | 1090 MaybeObject* maybe_lookup_result = HEAP->LookupSymbol(name); |
| 1088 Object* lookup_result = NULL; // Initialization to please compiler. | 1091 Object* lookup_result = NULL; // Initialization to please compiler. |
| 1089 if (!maybe_lookup_result->ToObject(&lookup_result)) { | 1092 if (!maybe_lookup_result->ToObject(&lookup_result)) { |
| 1090 set_failure(Failure::cast(maybe_lookup_result)); | 1093 set_failure(Failure::cast(maybe_lookup_result)); |
| 1091 return reg; | 1094 return reg; |
| 1092 } | 1095 } |
| 1093 name = String::cast(lookup_result); | 1096 name = String::cast(lookup_result); |
| 1094 } | 1097 } |
| 1095 ASSERT(current->property_dictionary()->FindEntry(name) == | 1098 ASSERT(current->property_dictionary()->FindEntry(name) == |
| 1096 StringDictionary::kNotFound); | 1099 StringDictionary::kNotFound); |
| 1097 | 1100 |
| 1098 GenerateDictionaryNegativeLookup(masm(), | 1101 GenerateDictionaryNegativeLookup(masm(), |
| 1099 miss, | 1102 miss, |
| 1100 reg, | 1103 reg, |
| 1101 name, | 1104 name, |
| 1102 scratch1, | 1105 scratch1, |
| 1103 scratch2); | 1106 scratch2); |
| 1104 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); | 1107 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); |
| 1105 reg = holder_reg; // from now the object is in holder_reg | 1108 reg = holder_reg; // from now the object is in holder_reg |
| 1106 __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset)); | 1109 __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset)); |
| 1107 } else if (Heap::InNewSpace(prototype)) { | 1110 } else if (HEAP->InNewSpace(prototype)) { |
| 1108 // Get the map of the current object. | 1111 // Get the map of the current object. |
| 1109 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); | 1112 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); |
| 1110 __ cmp(scratch1, Operand(Handle<Map>(current->map()))); | 1113 __ cmp(scratch1, Operand(Handle<Map>(current->map()))); |
| 1111 | 1114 |
| 1112 // Branch on the result of the map check. | 1115 // Branch on the result of the map check. |
| 1113 __ b(ne, miss); | 1116 __ b(ne, miss); |
| 1114 | 1117 |
| 1115 // Check access rights to the global object. This has to happen | 1118 // Check access rights to the global object. This has to happen |
| 1116 // after the map check so that we know that the object is | 1119 // after the map check so that we know that the object is |
| 1117 // actually a global object. | 1120 // actually a global object. |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1151 // Go to the next object in the prototype chain. | 1154 // Go to the next object in the prototype chain. |
| 1152 current = prototype; | 1155 current = prototype; |
| 1153 } | 1156 } |
| 1154 | 1157 |
| 1155 // Check the holder map. | 1158 // Check the holder map. |
| 1156 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); | 1159 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); |
| 1157 __ cmp(scratch1, Operand(Handle<Map>(current->map()))); | 1160 __ cmp(scratch1, Operand(Handle<Map>(current->map()))); |
| 1158 __ b(ne, miss); | 1161 __ b(ne, miss); |
| 1159 | 1162 |
| 1160 // Log the check depth. | 1163 // Log the check depth. |
| 1161 LOG(IntEvent("check-maps-depth", depth + 1)); | 1164 LOG(Isolate::Current(), IntEvent("check-maps-depth", depth + 1)); |
| 1162 | 1165 |
| 1163 // Perform security check for access to the global object. | 1166 // Perform security check for access to the global object. |
| 1164 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded()); | 1167 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded()); |
| 1165 if (holder->IsJSGlobalProxy()) { | 1168 if (holder->IsJSGlobalProxy()) { |
| 1166 __ CheckAccessGlobalProxy(reg, scratch1, miss); | 1169 __ CheckAccessGlobalProxy(reg, scratch1, miss); |
| 1167 }; | 1170 }; |
| 1168 | 1171 |
| 1169 // If we've skipped any global objects, it's not enough to verify | 1172 // If we've skipped any global objects, it's not enough to verify |
| 1170 // that their maps haven't changed. We also need to check that the | 1173 // that their maps haven't changed. We also need to check that the |
| 1171 // property cell for the property is still empty. | 1174 // property cell for the property is still empty. |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1245 // Check that the maps haven't changed. | 1248 // Check that the maps haven't changed. |
| 1246 Register reg = | 1249 Register reg = |
| 1247 CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3, | 1250 CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3, |
| 1248 name, miss); | 1251 name, miss); |
| 1249 | 1252 |
| 1250 // Build AccessorInfo::args_ list on the stack and push property name below | 1253 // Build AccessorInfo::args_ list on the stack and push property name below |
| 1251 // the exit frame to make GC aware of them and store pointers to them. | 1254 // the exit frame to make GC aware of them and store pointers to them. |
| 1252 __ push(receiver); | 1255 __ push(receiver); |
| 1253 __ mov(scratch2, sp); // scratch2 = AccessorInfo::args_ | 1256 __ mov(scratch2, sp); // scratch2 = AccessorInfo::args_ |
| 1254 Handle<AccessorInfo> callback_handle(callback); | 1257 Handle<AccessorInfo> callback_handle(callback); |
| 1255 if (Heap::InNewSpace(callback_handle->data())) { | 1258 if (HEAP->InNewSpace(callback_handle->data())) { |
| 1256 __ Move(scratch3, callback_handle); | 1259 __ Move(scratch3, callback_handle); |
| 1257 __ ldr(scratch3, FieldMemOperand(scratch3, AccessorInfo::kDataOffset)); | 1260 __ ldr(scratch3, FieldMemOperand(scratch3, AccessorInfo::kDataOffset)); |
| 1258 } else { | 1261 } else { |
| 1259 __ Move(scratch3, Handle<Object>(callback_handle->data())); | 1262 __ Move(scratch3, Handle<Object>(callback_handle->data())); |
| 1260 } | 1263 } |
| 1261 __ Push(reg, scratch3, name_reg); | 1264 __ Push(reg, scratch3, name_reg); |
| 1262 __ mov(r0, sp); // r0 = Handle<String> | 1265 __ mov(r0, sp); // r0 = Handle<String> |
| 1263 | 1266 |
| 1264 Address getter_address = v8::ToCData<Address>(callback->getter()); | 1267 Address getter_address = v8::ToCData<Address>(callback->getter()); |
| 1265 ApiFunction fun(getter_address); | 1268 ApiFunction fun(getter_address); |
| (...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1459 | 1462 |
| 1460 | 1463 |
| 1461 void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell, | 1464 void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell, |
| 1462 JSFunction* function, | 1465 JSFunction* function, |
| 1463 Label* miss) { | 1466 Label* miss) { |
| 1464 // Get the value from the cell. | 1467 // Get the value from the cell. |
| 1465 __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell))); | 1468 __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell))); |
| 1466 __ ldr(r1, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset)); | 1469 __ ldr(r1, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset)); |
| 1467 | 1470 |
| 1468 // Check that the cell contains the same function. | 1471 // Check that the cell contains the same function. |
| 1469 if (Heap::InNewSpace(function)) { | 1472 if (HEAP->InNewSpace(function)) { |
| 1470 // We can't embed a pointer to a function in new space so we have | 1473 // We can't embed a pointer to a function in new space so we have |
| 1471 // to verify that the shared function info is unchanged. This has | 1474 // to verify that the shared function info is unchanged. This has |
| 1472 // the nice side effect that multiple closures based on the same | 1475 // the nice side effect that multiple closures based on the same |
| 1473 // function can all use this call IC. Before we load through the | 1476 // function can all use this call IC. Before we load through the |
| 1474 // function, we have to verify that it still is a function. | 1477 // function, we have to verify that it still is a function. |
| 1475 __ tst(r1, Operand(kSmiTagMask)); | 1478 __ tst(r1, Operand(kSmiTagMask)); |
| 1476 __ b(eq, miss); | 1479 __ b(eq, miss); |
| 1477 __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE); | 1480 __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE); |
| 1478 __ b(ne, miss); | 1481 __ b(ne, miss); |
| 1479 | 1482 |
| 1480 // Check the shared function info. Make sure it hasn't changed. | 1483 // Check the shared function info. Make sure it hasn't changed. |
| 1481 __ Move(r3, Handle<SharedFunctionInfo>(function->shared())); | 1484 __ Move(r3, Handle<SharedFunctionInfo>(function->shared())); |
| 1482 __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); | 1485 __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); |
| 1483 __ cmp(r4, r3); | 1486 __ cmp(r4, r3); |
| 1484 __ b(ne, miss); | 1487 __ b(ne, miss); |
| 1485 } else { | 1488 } else { |
| 1486 __ cmp(r1, Operand(Handle<JSFunction>(function))); | 1489 __ cmp(r1, Operand(Handle<JSFunction>(function))); |
| 1487 __ b(ne, miss); | 1490 __ b(ne, miss); |
| 1488 } | 1491 } |
| 1489 } | 1492 } |
| 1490 | 1493 |
| 1491 | 1494 |
| 1492 MaybeObject* CallStubCompiler::GenerateMissBranch() { | 1495 MaybeObject* CallStubCompiler::GenerateMissBranch() { |
| 1493 MaybeObject* maybe_obj = StubCache::ComputeCallMiss(arguments().immediate(), | 1496 MaybeObject* maybe_obj = Isolate::Current()->stub_cache()->ComputeCallMiss( |
| 1494 kind_); | 1497 arguments().immediate(), kind_); |
| 1495 Object* obj; | 1498 Object* obj; |
| 1496 if (!maybe_obj->ToObject(&obj)) return maybe_obj; | 1499 if (!maybe_obj->ToObject(&obj)) return maybe_obj; |
| 1497 __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET); | 1500 __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET); |
| 1498 return obj; | 1501 return obj; |
| 1499 } | 1502 } |
| 1500 | 1503 |
| 1501 | 1504 |
| 1502 MaybeObject* CallStubCompiler::CompileCallField(JSObject* object, | 1505 MaybeObject* CallStubCompiler::CompileCallField(JSObject* object, |
| 1503 JSObject* holder, | 1506 JSObject* holder, |
| 1504 int index, | 1507 int index, |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1544 String* name) { | 1547 String* name) { |
| 1545 // ----------- S t a t e ------------- | 1548 // ----------- S t a t e ------------- |
| 1546 // -- r2 : name | 1549 // -- r2 : name |
| 1547 // -- lr : return address | 1550 // -- lr : return address |
| 1548 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) | 1551 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) |
| 1549 // -- ... | 1552 // -- ... |
| 1550 // -- sp[argc * 4] : receiver | 1553 // -- sp[argc * 4] : receiver |
| 1551 // ----------------------------------- | 1554 // ----------------------------------- |
| 1552 | 1555 |
| 1553 // If object is not an array, bail out to regular call. | 1556 // If object is not an array, bail out to regular call. |
| 1554 if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value(); | 1557 if (!object->IsJSArray() || cell != NULL) return HEAP->undefined_value(); |
| 1555 | 1558 |
| 1556 Label miss; | 1559 Label miss; |
| 1557 | 1560 |
| 1558 GenerateNameCheck(name, &miss); | 1561 GenerateNameCheck(name, &miss); |
| 1559 | 1562 |
| 1560 Register receiver = r1; | 1563 Register receiver = r1; |
| 1561 | 1564 |
| 1562 // Get the receiver from the stack | 1565 // Get the receiver from the stack |
| 1563 const int argc = arguments().immediate(); | 1566 const int argc = arguments().immediate(); |
| 1564 __ ldr(receiver, MemOperand(sp, argc * kPointerSize)); | 1567 __ ldr(receiver, MemOperand(sp, argc * kPointerSize)); |
| (...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1711 String* name) { | 1714 String* name) { |
| 1712 // ----------- S t a t e ------------- | 1715 // ----------- S t a t e ------------- |
| 1713 // -- r2 : name | 1716 // -- r2 : name |
| 1714 // -- lr : return address | 1717 // -- lr : return address |
| 1715 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) | 1718 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) |
| 1716 // -- ... | 1719 // -- ... |
| 1717 // -- sp[argc * 4] : receiver | 1720 // -- sp[argc * 4] : receiver |
| 1718 // ----------------------------------- | 1721 // ----------------------------------- |
| 1719 | 1722 |
| 1720 // If object is not an array, bail out to regular call. | 1723 // If object is not an array, bail out to regular call. |
| 1721 if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value(); | 1724 if (!object->IsJSArray() || cell != NULL) return HEAP->undefined_value(); |
| 1722 | 1725 |
| 1723 Label miss, return_undefined, call_builtin; | 1726 Label miss, return_undefined, call_builtin; |
| 1724 | 1727 |
| 1725 Register receiver = r1; | 1728 Register receiver = r1; |
| 1726 Register elements = r3; | 1729 Register elements = r3; |
| 1727 | 1730 |
| 1728 GenerateNameCheck(name, &miss); | 1731 GenerateNameCheck(name, &miss); |
| 1729 | 1732 |
| 1730 // Get the receiver from the stack | 1733 // Get the receiver from the stack |
| 1731 const int argc = arguments().immediate(); | 1734 const int argc = arguments().immediate(); |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1798 String* name) { | 1801 String* name) { |
| 1799 // ----------- S t a t e ------------- | 1802 // ----------- S t a t e ------------- |
| 1800 // -- r2 : function name | 1803 // -- r2 : function name |
| 1801 // -- lr : return address | 1804 // -- lr : return address |
| 1802 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) | 1805 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) |
| 1803 // -- ... | 1806 // -- ... |
| 1804 // -- sp[argc * 4] : receiver | 1807 // -- sp[argc * 4] : receiver |
| 1805 // ----------------------------------- | 1808 // ----------------------------------- |
| 1806 | 1809 |
| 1807 // If object is not a string, bail out to regular call. | 1810 // If object is not a string, bail out to regular call. |
| 1808 if (!object->IsString() || cell != NULL) return Heap::undefined_value(); | 1811 if (!object->IsString() || cell != NULL) return HEAP->undefined_value(); |
| 1809 | 1812 |
| 1810 const int argc = arguments().immediate(); | 1813 const int argc = arguments().immediate(); |
| 1811 | 1814 |
| 1812 Label miss; | 1815 Label miss; |
| 1813 Label name_miss; | 1816 Label name_miss; |
| 1814 Label index_out_of_range; | 1817 Label index_out_of_range; |
| 1815 Label* index_out_of_range_label = &index_out_of_range; | 1818 Label* index_out_of_range_label = &index_out_of_range; |
| 1816 | 1819 |
| 1817 if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) { | 1820 if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) { |
| 1818 index_out_of_range_label = &miss; | 1821 index_out_of_range_label = &miss; |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1884 String* name) { | 1887 String* name) { |
| 1885 // ----------- S t a t e ------------- | 1888 // ----------- S t a t e ------------- |
| 1886 // -- r2 : function name | 1889 // -- r2 : function name |
| 1887 // -- lr : return address | 1890 // -- lr : return address |
| 1888 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) | 1891 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) |
| 1889 // -- ... | 1892 // -- ... |
| 1890 // -- sp[argc * 4] : receiver | 1893 // -- sp[argc * 4] : receiver |
| 1891 // ----------------------------------- | 1894 // ----------------------------------- |
| 1892 | 1895 |
| 1893 // If object is not a string, bail out to regular call. | 1896 // If object is not a string, bail out to regular call. |
| 1894 if (!object->IsString() || cell != NULL) return Heap::undefined_value(); | 1897 if (!object->IsString() || cell != NULL) return HEAP->undefined_value(); |
| 1895 | 1898 |
| 1896 const int argc = arguments().immediate(); | 1899 const int argc = arguments().immediate(); |
| 1897 | 1900 |
| 1898 Label miss; | 1901 Label miss; |
| 1899 Label name_miss; | 1902 Label name_miss; |
| 1900 Label index_out_of_range; | 1903 Label index_out_of_range; |
| 1901 Label* index_out_of_range_label = &index_out_of_range; | 1904 Label* index_out_of_range_label = &index_out_of_range; |
| 1902 | 1905 |
| 1903 if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) { | 1906 if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) { |
| 1904 index_out_of_range_label = &miss; | 1907 index_out_of_range_label = &miss; |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1975 // -- lr : return address | 1978 // -- lr : return address |
| 1976 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) | 1979 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) |
| 1977 // -- ... | 1980 // -- ... |
| 1978 // -- sp[argc * 4] : receiver | 1981 // -- sp[argc * 4] : receiver |
| 1979 // ----------------------------------- | 1982 // ----------------------------------- |
| 1980 | 1983 |
| 1981 const int argc = arguments().immediate(); | 1984 const int argc = arguments().immediate(); |
| 1982 | 1985 |
| 1983 // If the object is not a JSObject or we got an unexpected number of | 1986 // If the object is not a JSObject or we got an unexpected number of |
| 1984 // arguments, bail out to the regular call. | 1987 // arguments, bail out to the regular call. |
| 1985 if (!object->IsJSObject() || argc != 1) return Heap::undefined_value(); | 1988 if (!object->IsJSObject() || argc != 1) return HEAP->undefined_value(); |
| 1986 | 1989 |
| 1987 Label miss; | 1990 Label miss; |
| 1988 GenerateNameCheck(name, &miss); | 1991 GenerateNameCheck(name, &miss); |
| 1989 | 1992 |
| 1990 if (cell == NULL) { | 1993 if (cell == NULL) { |
| 1991 __ ldr(r1, MemOperand(sp, 1 * kPointerSize)); | 1994 __ ldr(r1, MemOperand(sp, 1 * kPointerSize)); |
| 1992 | 1995 |
| 1993 STATIC_ASSERT(kSmiTag == 0); | 1996 STATIC_ASSERT(kSmiTag == 0); |
| 1994 __ tst(r1, Operand(kSmiTagMask)); | 1997 __ tst(r1, Operand(kSmiTagMask)); |
| 1995 __ b(eq, &miss); | 1998 __ b(eq, &miss); |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2046 JSFunction* function, | 2049 JSFunction* function, |
| 2047 String* name) { | 2050 String* name) { |
| 2048 // ----------- S t a t e ------------- | 2051 // ----------- S t a t e ------------- |
| 2049 // -- r2 : function name | 2052 // -- r2 : function name |
| 2050 // -- lr : return address | 2053 // -- lr : return address |
| 2051 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) | 2054 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) |
| 2052 // -- ... | 2055 // -- ... |
| 2053 // -- sp[argc * 4] : receiver | 2056 // -- sp[argc * 4] : receiver |
| 2054 // ----------------------------------- | 2057 // ----------------------------------- |
| 2055 | 2058 |
| 2056 if (!CpuFeatures::IsSupported(VFP3)) return Heap::undefined_value(); | 2059 if (!Isolate::Current()->cpu_features()->IsSupported(VFP3)) |
| 2060 return HEAP->undefined_value(); |
| 2061 |
| 2057 CpuFeatures::Scope scope_vfp3(VFP3); | 2062 CpuFeatures::Scope scope_vfp3(VFP3); |
| 2058 | 2063 |
| 2059 const int argc = arguments().immediate(); | 2064 const int argc = arguments().immediate(); |
| 2060 | 2065 |
| 2061 // If the object is not a JSObject or we got an unexpected number of | 2066 // If the object is not a JSObject or we got an unexpected number of |
| 2062 // arguments, bail out to the regular call. | 2067 // arguments, bail out to the regular call. |
| 2063 if (!object->IsJSObject() || argc != 1) return Heap::undefined_value(); | 2068 if (!object->IsJSObject() || argc != 1) return HEAP->undefined_value(); |
| 2064 | 2069 |
| 2065 Label miss, slow; | 2070 Label miss, slow; |
| 2066 GenerateNameCheck(name, &miss); | 2071 GenerateNameCheck(name, &miss); |
| 2067 | 2072 |
| 2068 if (cell == NULL) { | 2073 if (cell == NULL) { |
| 2069 __ ldr(r1, MemOperand(sp, 1 * kPointerSize)); | 2074 __ ldr(r1, MemOperand(sp, 1 * kPointerSize)); |
| 2070 | 2075 |
| 2071 STATIC_ASSERT(kSmiTag == 0); | 2076 STATIC_ASSERT(kSmiTag == 0); |
| 2072 __ JumpIfSmi(r1, &miss); | 2077 __ JumpIfSmi(r1, &miss); |
| 2073 | 2078 |
| (...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2195 // -- lr : return address | 2200 // -- lr : return address |
| 2196 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) | 2201 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) |
| 2197 // -- ... | 2202 // -- ... |
| 2198 // -- sp[argc * 4] : receiver | 2203 // -- sp[argc * 4] : receiver |
| 2199 // ----------------------------------- | 2204 // ----------------------------------- |
| 2200 | 2205 |
| 2201 const int argc = arguments().immediate(); | 2206 const int argc = arguments().immediate(); |
| 2202 | 2207 |
| 2203 // If the object is not a JSObject or we got an unexpected number of | 2208 // If the object is not a JSObject or we got an unexpected number of |
| 2204 // arguments, bail out to the regular call. | 2209 // arguments, bail out to the regular call. |
| 2205 if (!object->IsJSObject() || argc != 1) return Heap::undefined_value(); | 2210 if (!object->IsJSObject() || argc != 1) return HEAP->undefined_value(); |
| 2206 | 2211 |
| 2207 Label miss; | 2212 Label miss; |
| 2208 GenerateNameCheck(name, &miss); | 2213 GenerateNameCheck(name, &miss); |
| 2209 | 2214 |
| 2210 if (cell == NULL) { | 2215 if (cell == NULL) { |
| 2211 __ ldr(r1, MemOperand(sp, 1 * kPointerSize)); | 2216 __ ldr(r1, MemOperand(sp, 1 * kPointerSize)); |
| 2212 | 2217 |
| 2213 STATIC_ASSERT(kSmiTag == 0); | 2218 STATIC_ASSERT(kSmiTag == 0); |
| 2214 __ tst(r1, Operand(kSmiTagMask)); | 2219 __ tst(r1, Operand(kSmiTagMask)); |
| 2215 __ b(eq, &miss); | 2220 __ b(eq, &miss); |
| (...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2292 MaybeObject* CallStubCompiler::CompileFastApiCall( | 2297 MaybeObject* CallStubCompiler::CompileFastApiCall( |
| 2293 const CallOptimization& optimization, | 2298 const CallOptimization& optimization, |
| 2294 Object* object, | 2299 Object* object, |
| 2295 JSObject* holder, | 2300 JSObject* holder, |
| 2296 JSGlobalPropertyCell* cell, | 2301 JSGlobalPropertyCell* cell, |
| 2297 JSFunction* function, | 2302 JSFunction* function, |
| 2298 String* name) { | 2303 String* name) { |
| 2299 ASSERT(optimization.is_simple_api_call()); | 2304 ASSERT(optimization.is_simple_api_call()); |
| 2300 // Bail out if object is a global object as we don't want to | 2305 // Bail out if object is a global object as we don't want to |
| 2301 // repatch it to global receiver. | 2306 // repatch it to global receiver. |
| 2302 if (object->IsGlobalObject()) return Heap::undefined_value(); | 2307 if (object->IsGlobalObject()) return HEAP->undefined_value(); |
| 2303 if (cell != NULL) return Heap::undefined_value(); | 2308 if (cell != NULL) return HEAP->undefined_value(); |
| 2304 int depth = optimization.GetPrototypeDepthOfExpectedType( | 2309 int depth = optimization.GetPrototypeDepthOfExpectedType( |
| 2305 JSObject::cast(object), holder); | 2310 JSObject::cast(object), holder); |
| 2306 if (depth == kInvalidProtoDepth) return Heap::undefined_value(); | 2311 if (depth == kInvalidProtoDepth) return HEAP->undefined_value(); |
| 2307 | 2312 |
| 2308 Label miss, miss_before_stack_reserved; | 2313 Label miss, miss_before_stack_reserved; |
| 2309 | 2314 |
| 2310 GenerateNameCheck(name, &miss_before_stack_reserved); | 2315 GenerateNameCheck(name, &miss_before_stack_reserved); |
| 2311 | 2316 |
| 2312 // Get the receiver from the stack. | 2317 // Get the receiver from the stack. |
| 2313 const int argc = arguments().immediate(); | 2318 const int argc = arguments().immediate(); |
| 2314 __ ldr(r1, MemOperand(sp, argc * kPointerSize)); | 2319 __ ldr(r1, MemOperand(sp, argc * kPointerSize)); |
| 2315 | 2320 |
| 2316 // Check that the receiver isn't a smi. | 2321 // Check that the receiver isn't a smi. |
| 2317 __ tst(r1, Operand(kSmiTagMask)); | 2322 __ tst(r1, Operand(kSmiTagMask)); |
| 2318 __ b(eq, &miss_before_stack_reserved); | 2323 __ b(eq, &miss_before_stack_reserved); |
| 2319 | 2324 |
| 2320 __ IncrementCounter(&Counters::call_const, 1, r0, r3); | 2325 __ IncrementCounter(COUNTERS->call_const(), 1, r0, r3); |
| 2321 __ IncrementCounter(&Counters::call_const_fast_api, 1, r0, r3); | 2326 __ IncrementCounter(COUNTERS->call_const_fast_api(), 1, r0, r3); |
| 2322 | 2327 |
| 2323 ReserveSpaceForFastApiCall(masm(), r0); | 2328 ReserveSpaceForFastApiCall(masm(), r0); |
| 2324 | 2329 |
| 2325 // Check that the maps haven't changed and find a Holder as a side effect. | 2330 // Check that the maps haven't changed and find a Holder as a side effect. |
| 2326 CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name, | 2331 CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name, |
| 2327 depth, &miss); | 2332 depth, &miss); |
| 2328 | 2333 |
| 2329 MaybeObject* result = GenerateFastApiDirectCall(masm(), optimization, argc); | 2334 MaybeObject* result = GenerateFastApiDirectCall(masm(), optimization, argc); |
| 2330 if (result->IsFailure()) return result; | 2335 if (result->IsFailure()) return result; |
| 2331 | 2336 |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2375 __ b(eq, &miss); | 2380 __ b(eq, &miss); |
| 2376 } | 2381 } |
| 2377 | 2382 |
| 2378 // Make sure that it's okay not to patch the on stack receiver | 2383 // Make sure that it's okay not to patch the on stack receiver |
| 2379 // unless we're doing a receiver map check. | 2384 // unless we're doing a receiver map check. |
| 2380 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK); | 2385 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK); |
| 2381 | 2386 |
| 2382 SharedFunctionInfo* function_info = function->shared(); | 2387 SharedFunctionInfo* function_info = function->shared(); |
| 2383 switch (check) { | 2388 switch (check) { |
| 2384 case RECEIVER_MAP_CHECK: | 2389 case RECEIVER_MAP_CHECK: |
| 2385 __ IncrementCounter(&Counters::call_const, 1, r0, r3); | 2390 __ IncrementCounter(COUNTERS->call_const(), 1, r0, r3); |
| 2386 | 2391 |
| 2387 // Check that the maps haven't changed. | 2392 // Check that the maps haven't changed. |
| 2388 CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name, | 2393 CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name, |
| 2389 &miss); | 2394 &miss); |
| 2390 | 2395 |
| 2391 // Patch the receiver on the stack with the global proxy if | 2396 // Patch the receiver on the stack with the global proxy if |
| 2392 // necessary. | 2397 // necessary. |
| 2393 if (object->IsGlobalObject()) { | 2398 if (object->IsGlobalObject()) { |
| 2394 __ ldr(r3, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset)); | 2399 __ ldr(r3, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset)); |
| 2395 __ str(r3, MemOperand(sp, argc * kPointerSize)); | 2400 __ str(r3, MemOperand(sp, argc * kPointerSize)); |
| (...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2566 // necessary. | 2571 // necessary. |
| 2567 if (object->IsGlobalObject()) { | 2572 if (object->IsGlobalObject()) { |
| 2568 __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset)); | 2573 __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset)); |
| 2569 __ str(r3, MemOperand(sp, argc * kPointerSize)); | 2574 __ str(r3, MemOperand(sp, argc * kPointerSize)); |
| 2570 } | 2575 } |
| 2571 | 2576 |
| 2572 // Setup the context (function already in r1). | 2577 // Setup the context (function already in r1). |
| 2573 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); | 2578 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); |
| 2574 | 2579 |
| 2575 // Jump to the cached code (tail call). | 2580 // Jump to the cached code (tail call). |
| 2576 __ IncrementCounter(&Counters::call_global_inline, 1, r3, r4); | 2581 __ IncrementCounter(COUNTERS->call_global_inline(), 1, r3, r4); |
| 2577 ASSERT(function->is_compiled()); | 2582 ASSERT(function->is_compiled()); |
| 2578 Handle<Code> code(function->code()); | 2583 Handle<Code> code(function->code()); |
| 2579 ParameterCount expected(function->shared()->formal_parameter_count()); | 2584 ParameterCount expected(function->shared()->formal_parameter_count()); |
| 2580 if (V8::UseCrankshaft()) { | 2585 if (V8::UseCrankshaft()) { |
| 2581 // TODO(kasperl): For now, we always call indirectly through the | 2586 // TODO(kasperl): For now, we always call indirectly through the |
| 2582 // code field in the function to allow recompilation to take effect | 2587 // code field in the function to allow recompilation to take effect |
| 2583 // without changing any of the call sites. | 2588 // without changing any of the call sites. |
| 2584 __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); | 2589 __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); |
| 2585 __ InvokeCode(r3, expected, arguments(), JUMP_FUNCTION); | 2590 __ InvokeCode(r3, expected, arguments(), JUMP_FUNCTION); |
| 2586 } else { | 2591 } else { |
| 2587 __ InvokeCode(code, expected, arguments(), | 2592 __ InvokeCode(code, expected, arguments(), |
| 2588 RelocInfo::CODE_TARGET, JUMP_FUNCTION); | 2593 RelocInfo::CODE_TARGET, JUMP_FUNCTION); |
| 2589 } | 2594 } |
| 2590 | 2595 |
| 2591 // Handle call cache miss. | 2596 // Handle call cache miss. |
| 2592 __ bind(&miss); | 2597 __ bind(&miss); |
| 2593 __ IncrementCounter(&Counters::call_global_inline_miss, 1, r1, r3); | 2598 __ IncrementCounter(COUNTERS->call_global_inline_miss(), 1, r1, r3); |
| 2594 Object* obj; | 2599 Object* obj; |
| 2595 { MaybeObject* maybe_obj = GenerateMissBranch(); | 2600 { MaybeObject* maybe_obj = GenerateMissBranch(); |
| 2596 if (!maybe_obj->ToObject(&obj)) return maybe_obj; | 2601 if (!maybe_obj->ToObject(&obj)) return maybe_obj; |
| 2597 } | 2602 } |
| 2598 | 2603 |
| 2599 // Return the generated code. | 2604 // Return the generated code. |
| 2600 return GetCode(NORMAL, name); | 2605 return GetCode(NORMAL, name); |
| 2601 } | 2606 } |
| 2602 | 2607 |
| 2603 | 2608 |
| 2604 MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object, | 2609 MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object, |
| 2605 int index, | 2610 int index, |
| 2606 Map* transition, | 2611 Map* transition, |
| 2607 String* name) { | 2612 String* name) { |
| 2608 // ----------- S t a t e ------------- | 2613 // ----------- S t a t e ------------- |
| 2609 // -- r0 : value | 2614 // -- r0 : value |
| 2610 // -- r1 : receiver | 2615 // -- r1 : receiver |
| 2611 // -- r2 : name | 2616 // -- r2 : name |
| 2612 // -- lr : return address | 2617 // -- lr : return address |
| 2613 // ----------------------------------- | 2618 // ----------------------------------- |
| 2614 Label miss; | 2619 Label miss; |
| 2615 | 2620 |
| 2616 GenerateStoreField(masm(), | 2621 GenerateStoreField(masm(), |
| 2617 object, | 2622 object, |
| 2618 index, | 2623 index, |
| 2619 transition, | 2624 transition, |
| 2620 r1, r2, r3, | 2625 r1, r2, r3, |
| 2621 &miss); | 2626 &miss); |
| 2622 __ bind(&miss); | 2627 __ bind(&miss); |
| 2623 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); | 2628 Handle<Code> ic(Isolate::Current()->builtins()->builtin( |
| 2629 Builtins::StoreIC_Miss)); |
| 2624 __ Jump(ic, RelocInfo::CODE_TARGET); | 2630 __ Jump(ic, RelocInfo::CODE_TARGET); |
| 2625 | 2631 |
| 2626 // Return the generated code. | 2632 // Return the generated code. |
| 2627 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name); | 2633 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name); |
| 2628 } | 2634 } |
| 2629 | 2635 |
| 2630 | 2636 |
| 2631 MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object, | 2637 MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object, |
| 2632 AccessorInfo* callback, | 2638 AccessorInfo* callback, |
| 2633 String* name) { | 2639 String* name) { |
| (...skipping 27 matching lines...) Expand all Loading... |
| 2661 __ mov(ip, Operand(Handle<AccessorInfo>(callback))); // callback info | 2667 __ mov(ip, Operand(Handle<AccessorInfo>(callback))); // callback info |
| 2662 __ Push(ip, r2, r0); | 2668 __ Push(ip, r2, r0); |
| 2663 | 2669 |
| 2664 // Do tail-call to the runtime system. | 2670 // Do tail-call to the runtime system. |
| 2665 ExternalReference store_callback_property = | 2671 ExternalReference store_callback_property = |
| 2666 ExternalReference(IC_Utility(IC::kStoreCallbackProperty)); | 2672 ExternalReference(IC_Utility(IC::kStoreCallbackProperty)); |
| 2667 __ TailCallExternalReference(store_callback_property, 4, 1); | 2673 __ TailCallExternalReference(store_callback_property, 4, 1); |
| 2668 | 2674 |
| 2669 // Handle store cache miss. | 2675 // Handle store cache miss. |
| 2670 __ bind(&miss); | 2676 __ bind(&miss); |
| 2671 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); | 2677 Handle<Code> ic(Isolate::Current()->builtins()->builtin( |
| 2678 Builtins::StoreIC_Miss)); |
| 2672 __ Jump(ic, RelocInfo::CODE_TARGET); | 2679 __ Jump(ic, RelocInfo::CODE_TARGET); |
| 2673 | 2680 |
| 2674 // Return the generated code. | 2681 // Return the generated code. |
| 2675 return GetCode(CALLBACKS, name); | 2682 return GetCode(CALLBACKS, name); |
| 2676 } | 2683 } |
| 2677 | 2684 |
| 2678 | 2685 |
| 2679 MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver, | 2686 MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver, |
| 2680 String* name) { | 2687 String* name) { |
| 2681 // ----------- S t a t e ------------- | 2688 // ----------- S t a t e ------------- |
| (...skipping 27 matching lines...) Expand all Loading... |
| 2709 __ mov(r0, Operand(Smi::FromInt(strict_mode_))); | 2716 __ mov(r0, Operand(Smi::FromInt(strict_mode_))); |
| 2710 __ push(r0); // strict mode | 2717 __ push(r0); // strict mode |
| 2711 | 2718 |
| 2712 // Do tail-call to the runtime system. | 2719 // Do tail-call to the runtime system. |
| 2713 ExternalReference store_ic_property = | 2720 ExternalReference store_ic_property = |
| 2714 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty)); | 2721 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty)); |
| 2715 __ TailCallExternalReference(store_ic_property, 4, 1); | 2722 __ TailCallExternalReference(store_ic_property, 4, 1); |
| 2716 | 2723 |
| 2717 // Handle store cache miss. | 2724 // Handle store cache miss. |
| 2718 __ bind(&miss); | 2725 __ bind(&miss); |
| 2719 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); | 2726 Handle<Code> ic(Isolate::Current()->builtins()->builtin( |
| 2727 Builtins::StoreIC_Miss)); |
| 2720 __ Jump(ic, RelocInfo::CODE_TARGET); | 2728 __ Jump(ic, RelocInfo::CODE_TARGET); |
| 2721 | 2729 |
| 2722 // Return the generated code. | 2730 // Return the generated code. |
| 2723 return GetCode(INTERCEPTOR, name); | 2731 return GetCode(INTERCEPTOR, name); |
| 2724 } | 2732 } |
| 2725 | 2733 |
| 2726 | 2734 |
| 2727 MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object, | 2735 MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object, |
| 2728 JSGlobalPropertyCell* cell, | 2736 JSGlobalPropertyCell* cell, |
| 2729 String* name) { | 2737 String* name) { |
| (...skipping 16 matching lines...) Expand all Loading... |
| 2746 // global object. We bail out to the runtime system to do that. | 2754 // global object. We bail out to the runtime system to do that. |
| 2747 __ mov(r4, Operand(Handle<JSGlobalPropertyCell>(cell))); | 2755 __ mov(r4, Operand(Handle<JSGlobalPropertyCell>(cell))); |
| 2748 __ LoadRoot(r5, Heap::kTheHoleValueRootIndex); | 2756 __ LoadRoot(r5, Heap::kTheHoleValueRootIndex); |
| 2749 __ ldr(r6, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset)); | 2757 __ ldr(r6, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset)); |
| 2750 __ cmp(r5, r6); | 2758 __ cmp(r5, r6); |
| 2751 __ b(eq, &miss); | 2759 __ b(eq, &miss); |
| 2752 | 2760 |
| 2753 // Store the value in the cell. | 2761 // Store the value in the cell. |
| 2754 __ str(r0, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset)); | 2762 __ str(r0, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset)); |
| 2755 | 2763 |
| 2756 __ IncrementCounter(&Counters::named_store_global_inline, 1, r4, r3); | 2764 __ IncrementCounter(COUNTERS->named_store_global_inline(), 1, r4, r3); |
| 2757 __ Ret(); | 2765 __ Ret(); |
| 2758 | 2766 |
| 2759 // Handle store cache miss. | 2767 // Handle store cache miss. |
| 2760 __ bind(&miss); | 2768 __ bind(&miss); |
| 2761 __ IncrementCounter(&Counters::named_store_global_inline_miss, 1, r4, r3); | 2769 __ IncrementCounter(COUNTERS->named_store_global_inline_miss(), 1, r4, r3); |
| 2762 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); | 2770 Handle<Code> ic(Isolate::Current()->builtins()->builtin( |
| 2771 Builtins::StoreIC_Miss)); |
| 2763 __ Jump(ic, RelocInfo::CODE_TARGET); | 2772 __ Jump(ic, RelocInfo::CODE_TARGET); |
| 2764 | 2773 |
| 2765 // Return the generated code. | 2774 // Return the generated code. |
| 2766 return GetCode(NORMAL, name); | 2775 return GetCode(NORMAL, name); |
| 2767 } | 2776 } |
| 2768 | 2777 |
| 2769 | 2778 |
| 2770 MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name, | 2779 MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name, |
| 2771 JSObject* object, | 2780 JSObject* object, |
| 2772 JSObject* last) { | 2781 JSObject* last) { |
| (...skipping 26 matching lines...) Expand all Loading... |
| 2799 | 2808 |
| 2800 // Return undefined if maps of the full prototype chain are still the | 2809 // Return undefined if maps of the full prototype chain are still the |
| 2801 // same and no global property with this name contains a value. | 2810 // same and no global property with this name contains a value. |
| 2802 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); | 2811 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); |
| 2803 __ Ret(); | 2812 __ Ret(); |
| 2804 | 2813 |
| 2805 __ bind(&miss); | 2814 __ bind(&miss); |
| 2806 GenerateLoadMiss(masm(), Code::LOAD_IC); | 2815 GenerateLoadMiss(masm(), Code::LOAD_IC); |
| 2807 | 2816 |
| 2808 // Return the generated code. | 2817 // Return the generated code. |
| 2809 return GetCode(NONEXISTENT, Heap::empty_string()); | 2818 return GetCode(NONEXISTENT, HEAP->empty_string()); |
| 2810 } | 2819 } |
| 2811 | 2820 |
| 2812 | 2821 |
| 2813 MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object, | 2822 MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object, |
| 2814 JSObject* holder, | 2823 JSObject* holder, |
| 2815 int index, | 2824 int index, |
| 2816 String* name) { | 2825 String* name) { |
| 2817 // ----------- S t a t e ------------- | 2826 // ----------- S t a t e ------------- |
| 2818 // -- r0 : receiver | 2827 // -- r0 : receiver |
| 2819 // -- r2 : name | 2828 // -- r2 : name |
| (...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2934 __ ldr(r4, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset)); | 2943 __ ldr(r4, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset)); |
| 2935 | 2944 |
| 2936 // Check for deleted property if property can actually be deleted. | 2945 // Check for deleted property if property can actually be deleted. |
| 2937 if (!is_dont_delete) { | 2946 if (!is_dont_delete) { |
| 2938 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | 2947 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
| 2939 __ cmp(r4, ip); | 2948 __ cmp(r4, ip); |
| 2940 __ b(eq, &miss); | 2949 __ b(eq, &miss); |
| 2941 } | 2950 } |
| 2942 | 2951 |
| 2943 __ mov(r0, r4); | 2952 __ mov(r0, r4); |
| 2944 __ IncrementCounter(&Counters::named_load_global_stub, 1, r1, r3); | 2953 __ IncrementCounter(COUNTERS->named_load_global_stub(), 1, r1, r3); |
| 2945 __ Ret(); | 2954 __ Ret(); |
| 2946 | 2955 |
| 2947 __ bind(&miss); | 2956 __ bind(&miss); |
| 2948 __ IncrementCounter(&Counters::named_load_global_stub_miss, 1, r1, r3); | 2957 __ IncrementCounter(COUNTERS->named_load_global_stub_miss(), 1, r1, r3); |
| 2949 GenerateLoadMiss(masm(), Code::LOAD_IC); | 2958 GenerateLoadMiss(masm(), Code::LOAD_IC); |
| 2950 | 2959 |
| 2951 // Return the generated code. | 2960 // Return the generated code. |
| 2952 return GetCode(NORMAL, name); | 2961 return GetCode(NORMAL, name); |
| 2953 } | 2962 } |
| 2954 | 2963 |
| 2955 | 2964 |
| 2956 MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name, | 2965 MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name, |
| 2957 JSObject* receiver, | 2966 JSObject* receiver, |
| 2958 JSObject* holder, | 2967 JSObject* holder, |
| (...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3083 } | 3092 } |
| 3084 | 3093 |
| 3085 | 3094 |
| 3086 MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) { | 3095 MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) { |
| 3087 // ----------- S t a t e ------------- | 3096 // ----------- S t a t e ------------- |
| 3088 // -- lr : return address | 3097 // -- lr : return address |
| 3089 // -- r0 : key | 3098 // -- r0 : key |
| 3090 // -- r1 : receiver | 3099 // -- r1 : receiver |
| 3091 // ----------------------------------- | 3100 // ----------------------------------- |
| 3092 Label miss; | 3101 Label miss; |
| 3093 __ IncrementCounter(&Counters::keyed_load_string_length, 1, r2, r3); | 3102 __ IncrementCounter(COUNTERS->keyed_load_string_length(), 1, r2, r3); |
| 3094 | 3103 |
| 3095 // Check the key is the cached one. | 3104 // Check the key is the cached one. |
| 3096 __ cmp(r0, Operand(Handle<String>(name))); | 3105 __ cmp(r0, Operand(Handle<String>(name))); |
| 3097 __ b(ne, &miss); | 3106 __ b(ne, &miss); |
| 3098 | 3107 |
| 3099 GenerateLoadStringLength(masm(), r1, r2, r3, &miss, true); | 3108 GenerateLoadStringLength(masm(), r1, r2, r3, &miss, true); |
| 3100 __ bind(&miss); | 3109 __ bind(&miss); |
| 3101 __ DecrementCounter(&Counters::keyed_load_string_length, 1, r2, r3); | 3110 __ DecrementCounter(COUNTERS->keyed_load_string_length(), 1, r2, r3); |
| 3102 | 3111 |
| 3103 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); | 3112 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); |
| 3104 | 3113 |
| 3105 return GetCode(CALLBACKS, name); | 3114 return GetCode(CALLBACKS, name); |
| 3106 } | 3115 } |
| 3107 | 3116 |
| 3108 | 3117 |
| 3109 MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) { | 3118 MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) { |
| 3110 // ----------- S t a t e ------------- | 3119 // ----------- S t a t e ------------- |
| 3111 // -- lr : return address | 3120 // -- lr : return address |
| 3112 // -- r0 : key | 3121 // -- r0 : key |
| 3113 // -- r1 : receiver | 3122 // -- r1 : receiver |
| 3114 // ----------------------------------- | 3123 // ----------------------------------- |
| 3115 Label miss; | 3124 Label miss; |
| 3116 | 3125 |
| 3117 __ IncrementCounter(&Counters::keyed_load_function_prototype, 1, r2, r3); | 3126 __ IncrementCounter(COUNTERS->keyed_load_function_prototype(), 1, r2, r3); |
| 3118 | 3127 |
| 3119 // Check the name hasn't changed. | 3128 // Check the name hasn't changed. |
| 3120 __ cmp(r0, Operand(Handle<String>(name))); | 3129 __ cmp(r0, Operand(Handle<String>(name))); |
| 3121 __ b(ne, &miss); | 3130 __ b(ne, &miss); |
| 3122 | 3131 |
| 3123 GenerateLoadFunctionPrototype(masm(), r1, r2, r3, &miss); | 3132 GenerateLoadFunctionPrototype(masm(), r1, r2, r3, &miss); |
| 3124 __ bind(&miss); | 3133 __ bind(&miss); |
| 3125 __ DecrementCounter(&Counters::keyed_load_function_prototype, 1, r2, r3); | 3134 __ DecrementCounter(COUNTERS->keyed_load_function_prototype(), 1, r2, r3); |
| 3126 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); | 3135 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); |
| 3127 | 3136 |
| 3128 return GetCode(CALLBACKS, name); | 3137 return GetCode(CALLBACKS, name); |
| 3129 } | 3138 } |
| 3130 | 3139 |
| 3131 | 3140 |
| 3132 MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) { | 3141 MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) { |
| 3133 // ----------- S t a t e ------------- | 3142 // ----------- S t a t e ------------- |
| 3134 // -- lr : return address | 3143 // -- lr : return address |
| 3135 // -- r0 : key | 3144 // -- r0 : key |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3183 Map* transition, | 3192 Map* transition, |
| 3184 String* name) { | 3193 String* name) { |
| 3185 // ----------- S t a t e ------------- | 3194 // ----------- S t a t e ------------- |
| 3186 // -- r0 : value | 3195 // -- r0 : value |
| 3187 // -- r1 : name | 3196 // -- r1 : name |
| 3188 // -- r2 : receiver | 3197 // -- r2 : receiver |
| 3189 // -- lr : return address | 3198 // -- lr : return address |
| 3190 // ----------------------------------- | 3199 // ----------------------------------- |
| 3191 Label miss; | 3200 Label miss; |
| 3192 | 3201 |
| 3193 __ IncrementCounter(&Counters::keyed_store_field, 1, r3, r4); | 3202 __ IncrementCounter(COUNTERS->keyed_store_field(), 1, r3, r4); |
| 3194 | 3203 |
| 3195 // Check that the name has not changed. | 3204 // Check that the name has not changed. |
| 3196 __ cmp(r1, Operand(Handle<String>(name))); | 3205 __ cmp(r1, Operand(Handle<String>(name))); |
| 3197 __ b(ne, &miss); | 3206 __ b(ne, &miss); |
| 3198 | 3207 |
| 3199 // r3 is used as scratch register. r1 and r2 keep their values if a jump to | 3208 // r3 is used as scratch register. r1 and r2 keep their values if a jump to |
| 3200 // the miss label is generated. | 3209 // the miss label is generated. |
| 3201 GenerateStoreField(masm(), | 3210 GenerateStoreField(masm(), |
| 3202 object, | 3211 object, |
| 3203 index, | 3212 index, |
| 3204 transition, | 3213 transition, |
| 3205 r2, r1, r3, | 3214 r2, r1, r3, |
| 3206 &miss); | 3215 &miss); |
| 3207 __ bind(&miss); | 3216 __ bind(&miss); |
| 3208 | 3217 |
| 3209 __ DecrementCounter(&Counters::keyed_store_field, 1, r3, r4); | 3218 __ DecrementCounter(COUNTERS->keyed_store_field(), 1, r3, r4); |
| 3210 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss)); | 3219 Handle<Code> ic(Isolate::Current()->builtins()->builtin( |
| 3220 Builtins::KeyedStoreIC_Miss)); |
| 3211 | 3221 |
| 3212 __ Jump(ic, RelocInfo::CODE_TARGET); | 3222 __ Jump(ic, RelocInfo::CODE_TARGET); |
| 3213 | 3223 |
| 3214 // Return the generated code. | 3224 // Return the generated code. |
| 3215 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name); | 3225 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name); |
| 3216 } | 3226 } |
| 3217 | 3227 |
| 3218 | 3228 |
| 3219 MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized( | 3229 MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized( |
| 3220 JSObject* receiver) { | 3230 JSObject* receiver) { |
| (...skipping 23 matching lines...) Expand all Loading... |
| 3244 __ b(ne, &miss); | 3254 __ b(ne, &miss); |
| 3245 | 3255 |
| 3246 // Check that the key is a smi. | 3256 // Check that the key is a smi. |
| 3247 __ tst(key_reg, Operand(kSmiTagMask)); | 3257 __ tst(key_reg, Operand(kSmiTagMask)); |
| 3248 __ b(ne, &miss); | 3258 __ b(ne, &miss); |
| 3249 | 3259 |
| 3250 // Get the elements array and make sure it is a fast element array, not 'cow'. | 3260 // Get the elements array and make sure it is a fast element array, not 'cow'. |
| 3251 __ ldr(elements_reg, | 3261 __ ldr(elements_reg, |
| 3252 FieldMemOperand(receiver_reg, JSObject::kElementsOffset)); | 3262 FieldMemOperand(receiver_reg, JSObject::kElementsOffset)); |
| 3253 __ ldr(scratch, FieldMemOperand(elements_reg, HeapObject::kMapOffset)); | 3263 __ ldr(scratch, FieldMemOperand(elements_reg, HeapObject::kMapOffset)); |
| 3254 __ cmp(scratch, Operand(Handle<Map>(Factory::fixed_array_map()))); | 3264 __ cmp(scratch, Operand(Handle<Map>(FACTORY->fixed_array_map()))); |
| 3255 __ b(ne, &miss); | 3265 __ b(ne, &miss); |
| 3256 | 3266 |
| 3257 // Check that the key is within bounds. | 3267 // Check that the key is within bounds. |
| 3258 if (receiver->IsJSArray()) { | 3268 if (receiver->IsJSArray()) { |
| 3259 __ ldr(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset)); | 3269 __ ldr(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset)); |
| 3260 } else { | 3270 } else { |
| 3261 __ ldr(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset)); | 3271 __ ldr(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset)); |
| 3262 } | 3272 } |
| 3263 // Compare smis. | 3273 // Compare smis. |
| 3264 __ cmp(key_reg, scratch); | 3274 __ cmp(key_reg, scratch); |
| 3265 __ b(hs, &miss); | 3275 __ b(hs, &miss); |
| 3266 | 3276 |
| 3267 __ add(scratch, | 3277 __ add(scratch, |
| 3268 elements_reg, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 3278 elements_reg, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 3269 ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); | 3279 ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); |
| 3270 __ str(value_reg, | 3280 __ str(value_reg, |
| 3271 MemOperand(scratch, key_reg, LSL, kPointerSizeLog2 - kSmiTagSize)); | 3281 MemOperand(scratch, key_reg, LSL, kPointerSizeLog2 - kSmiTagSize)); |
| 3272 #ifdef ENABLE_CARDMARKING_WRITE_BARRIER | 3282 #ifdef ENABLE_CARDMARKING_WRITE_BARRIER |
| 3273 __ RecordWrite(scratch, | 3283 __ RecordWrite(scratch, |
| 3274 Operand(key_reg, LSL, kPointerSizeLog2 - kSmiTagSize), | 3284 Operand(key_reg, LSL, kPointerSizeLog2 - kSmiTagSize), |
| 3275 receiver_reg , elements_reg); | 3285 receiver_reg , elements_reg); |
| 3276 #endif | 3286 #endif |
| 3277 | 3287 |
| 3278 // value_reg (r0) is preserved. | 3288 // value_reg (r0) is preserved. |
| 3279 // Done. | 3289 // Done. |
| 3280 __ Ret(); | 3290 __ Ret(); |
| 3281 | 3291 |
| 3282 __ bind(&miss); | 3292 __ bind(&miss); |
| 3283 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss)); | 3293 Handle<Code> ic( |
| 3294 Isolate::Current()->builtins()->builtin(Builtins::KeyedStoreIC_Miss)); |
| 3284 __ Jump(ic, RelocInfo::CODE_TARGET); | 3295 __ Jump(ic, RelocInfo::CODE_TARGET); |
| 3285 | 3296 |
| 3286 // Return the generated code. | 3297 // Return the generated code. |
| 3287 return GetCode(NORMAL, NULL); | 3298 return GetCode(NORMAL, NULL); |
| 3288 } | 3299 } |
| 3289 | 3300 |
| 3290 | 3301 |
| 3291 MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) { | 3302 MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) { |
| 3292 // ----------- S t a t e ------------- | 3303 // ----------- S t a t e ------------- |
| 3293 // -- r0 : argc | 3304 // -- r0 : argc |
| (...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3409 // Move argc to r1 and the JSObject to return to r0 and tag it. | 3420 // Move argc to r1 and the JSObject to return to r0 and tag it. |
| 3410 __ mov(r1, r0); | 3421 __ mov(r1, r0); |
| 3411 __ mov(r0, r4); | 3422 __ mov(r0, r4); |
| 3412 __ orr(r0, r0, Operand(kHeapObjectTag)); | 3423 __ orr(r0, r0, Operand(kHeapObjectTag)); |
| 3413 | 3424 |
| 3414 // r0: JSObject | 3425 // r0: JSObject |
| 3415 // r1: argc | 3426 // r1: argc |
| 3416 // Remove caller arguments and receiver from the stack and return. | 3427 // Remove caller arguments and receiver from the stack and return. |
| 3417 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2)); | 3428 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2)); |
| 3418 __ add(sp, sp, Operand(kPointerSize)); | 3429 __ add(sp, sp, Operand(kPointerSize)); |
| 3419 __ IncrementCounter(&Counters::constructed_objects, 1, r1, r2); | 3430 __ IncrementCounter(COUNTERS->constructed_objects(), 1, r1, r2); |
| 3420 __ IncrementCounter(&Counters::constructed_objects_stub, 1, r1, r2); | 3431 __ IncrementCounter(COUNTERS->constructed_objects_stub(), 1, r1, r2); |
| 3421 __ Jump(lr); | 3432 __ Jump(lr); |
| 3422 | 3433 |
| 3423 // Jump to the generic stub in case the specialized code cannot handle the | 3434 // Jump to the generic stub in case the specialized code cannot handle the |
| 3424 // construction. | 3435 // construction. |
| 3425 __ bind(&generic_stub_call); | 3436 __ bind(&generic_stub_call); |
| 3426 Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric); | 3437 Code* code = Isolate::Current()->builtins()->builtin( |
| 3438 Builtins::JSConstructStubGeneric); |
| 3427 Handle<Code> generic_construct_stub(code); | 3439 Handle<Code> generic_construct_stub(code); |
| 3428 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET); | 3440 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET); |
| 3429 | 3441 |
| 3430 // Return the generated code. | 3442 // Return the generated code. |
| 3431 return GetCode(); | 3443 return GetCode(); |
| 3432 } | 3444 } |
| 3433 | 3445 |
| 3434 | 3446 |
| 3435 static bool IsElementTypeSigned(ExternalArrayType array_type) { | 3447 static bool IsElementTypeSigned(ExternalArrayType array_type) { |
| 3436 switch (array_type) { | 3448 switch (array_type) { |
| (...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3505 __ ldrsh(value, MemOperand(r3, key, LSL, 0)); | 3517 __ ldrsh(value, MemOperand(r3, key, LSL, 0)); |
| 3506 break; | 3518 break; |
| 3507 case kExternalUnsignedShortArray: | 3519 case kExternalUnsignedShortArray: |
| 3508 __ ldrh(value, MemOperand(r3, key, LSL, 0)); | 3520 __ ldrh(value, MemOperand(r3, key, LSL, 0)); |
| 3509 break; | 3521 break; |
| 3510 case kExternalIntArray: | 3522 case kExternalIntArray: |
| 3511 case kExternalUnsignedIntArray: | 3523 case kExternalUnsignedIntArray: |
| 3512 __ ldr(value, MemOperand(r3, key, LSL, 1)); | 3524 __ ldr(value, MemOperand(r3, key, LSL, 1)); |
| 3513 break; | 3525 break; |
| 3514 case kExternalFloatArray: | 3526 case kExternalFloatArray: |
| 3515 if (CpuFeatures::IsSupported(VFP3)) { | 3527 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) { |
| 3516 CpuFeatures::Scope scope(VFP3); | 3528 CpuFeatures::Scope scope(VFP3); |
| 3517 __ add(r2, r3, Operand(key, LSL, 1)); | 3529 __ add(r2, r3, Operand(key, LSL, 1)); |
| 3518 __ vldr(s0, r2, 0); | 3530 __ vldr(s0, r2, 0); |
| 3519 } else { | 3531 } else { |
| 3520 __ ldr(value, MemOperand(r3, key, LSL, 1)); | 3532 __ ldr(value, MemOperand(r3, key, LSL, 1)); |
| 3521 } | 3533 } |
| 3522 break; | 3534 break; |
| 3523 default: | 3535 default: |
| 3524 UNREACHABLE(); | 3536 UNREACHABLE(); |
| 3525 break; | 3537 break; |
| (...skipping 18 matching lines...) Expand all Loading... |
| 3544 | 3556 |
| 3545 __ bind(&box_int); | 3557 __ bind(&box_int); |
| 3546 // Allocate a HeapNumber for the result and perform int-to-double | 3558 // Allocate a HeapNumber for the result and perform int-to-double |
| 3547 // conversion. Don't touch r0 or r1 as they are needed if allocation | 3559 // conversion. Don't touch r0 or r1 as they are needed if allocation |
| 3548 // fails. | 3560 // fails. |
| 3549 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex); | 3561 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex); |
| 3550 __ AllocateHeapNumber(r5, r3, r4, r6, &slow); | 3562 __ AllocateHeapNumber(r5, r3, r4, r6, &slow); |
| 3551 // Now we can use r0 for the result as key is not needed any more. | 3563 // Now we can use r0 for the result as key is not needed any more. |
| 3552 __ mov(r0, r5); | 3564 __ mov(r0, r5); |
| 3553 | 3565 |
| 3554 if (CpuFeatures::IsSupported(VFP3)) { | 3566 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) { |
| 3555 CpuFeatures::Scope scope(VFP3); | 3567 CpuFeatures::Scope scope(VFP3); |
| 3556 __ vmov(s0, value); | 3568 __ vmov(s0, value); |
| 3557 __ vcvt_f64_s32(d0, s0); | 3569 __ vcvt_f64_s32(d0, s0); |
| 3558 __ sub(r3, r0, Operand(kHeapObjectTag)); | 3570 __ sub(r3, r0, Operand(kHeapObjectTag)); |
| 3559 __ vstr(d0, r3, HeapNumber::kValueOffset); | 3571 __ vstr(d0, r3, HeapNumber::kValueOffset); |
| 3560 __ Ret(); | 3572 __ Ret(); |
| 3561 } else { | 3573 } else { |
| 3562 WriteInt32ToHeapNumberStub stub(value, r0, r3); | 3574 WriteInt32ToHeapNumberStub stub(value, r0, r3); |
| 3563 __ TailCallStub(&stub); | 3575 __ TailCallStub(&stub); |
| 3564 } | 3576 } |
| 3565 } else if (array_type == kExternalUnsignedIntArray) { | 3577 } else if (array_type == kExternalUnsignedIntArray) { |
| 3566 // The test is different for unsigned int values. Since we need | 3578 // The test is different for unsigned int values. Since we need |
| 3567 // the value to be in the range of a positive smi, we can't | 3579 // the value to be in the range of a positive smi, we can't |
| 3568 // handle either of the top two bits being set in the value. | 3580 // handle either of the top two bits being set in the value. |
| 3569 if (CpuFeatures::IsSupported(VFP3)) { | 3581 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) { |
| 3570 CpuFeatures::Scope scope(VFP3); | 3582 CpuFeatures::Scope scope(VFP3); |
| 3571 Label box_int, done; | 3583 Label box_int, done; |
| 3572 __ tst(value, Operand(0xC0000000)); | 3584 __ tst(value, Operand(0xC0000000)); |
| 3573 __ b(ne, &box_int); | 3585 __ b(ne, &box_int); |
| 3574 // Tag integer as smi and return it. | 3586 // Tag integer as smi and return it. |
| 3575 __ mov(r0, Operand(value, LSL, kSmiTagSize)); | 3587 __ mov(r0, Operand(value, LSL, kSmiTagSize)); |
| 3576 __ Ret(); | 3588 __ Ret(); |
| 3577 | 3589 |
| 3578 __ bind(&box_int); | 3590 __ bind(&box_int); |
| 3579 __ vmov(s0, value); | 3591 __ vmov(s0, value); |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3623 | 3635 |
| 3624 __ str(hiword, FieldMemOperand(r4, HeapNumber::kExponentOffset)); | 3636 __ str(hiword, FieldMemOperand(r4, HeapNumber::kExponentOffset)); |
| 3625 __ str(loword, FieldMemOperand(r4, HeapNumber::kMantissaOffset)); | 3637 __ str(loword, FieldMemOperand(r4, HeapNumber::kMantissaOffset)); |
| 3626 | 3638 |
| 3627 __ mov(r0, r4); | 3639 __ mov(r0, r4); |
| 3628 __ Ret(); | 3640 __ Ret(); |
| 3629 } | 3641 } |
| 3630 } else if (array_type == kExternalFloatArray) { | 3642 } else if (array_type == kExternalFloatArray) { |
| 3631 // For the floating-point array type, we need to always allocate a | 3643 // For the floating-point array type, we need to always allocate a |
| 3632 // HeapNumber. | 3644 // HeapNumber. |
| 3633 if (CpuFeatures::IsSupported(VFP3)) { | 3645 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) { |
| 3634 CpuFeatures::Scope scope(VFP3); | 3646 CpuFeatures::Scope scope(VFP3); |
| 3635 // Allocate a HeapNumber for the result. Don't use r0 and r1 as | 3647 // Allocate a HeapNumber for the result. Don't use r0 and r1 as |
| 3636 // AllocateHeapNumber clobbers all registers - also when jumping due to | 3648 // AllocateHeapNumber clobbers all registers - also when jumping due to |
| 3637 // exhausted young space. | 3649 // exhausted young space. |
| 3638 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex); | 3650 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex); |
| 3639 __ AllocateHeapNumber(r2, r3, r4, r6, &slow); | 3651 __ AllocateHeapNumber(r2, r3, r4, r6, &slow); |
| 3640 __ vcvt_f64_f32(d0, s0); | 3652 __ vcvt_f64_f32(d0, s0); |
| 3641 __ sub(r1, r2, Operand(kHeapObjectTag)); | 3653 __ sub(r1, r2, Operand(kHeapObjectTag)); |
| 3642 __ vstr(d0, r1, HeapNumber::kValueOffset); | 3654 __ vstr(d0, r1, HeapNumber::kValueOffset); |
| 3643 | 3655 |
| (...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3699 } | 3711 } |
| 3700 | 3712 |
| 3701 } else { | 3713 } else { |
| 3702 // Tag integer as smi and return it. | 3714 // Tag integer as smi and return it. |
| 3703 __ mov(r0, Operand(value, LSL, kSmiTagSize)); | 3715 __ mov(r0, Operand(value, LSL, kSmiTagSize)); |
| 3704 __ Ret(); | 3716 __ Ret(); |
| 3705 } | 3717 } |
| 3706 | 3718 |
| 3707 // Slow case, key and receiver still in r0 and r1. | 3719 // Slow case, key and receiver still in r0 and r1. |
| 3708 __ bind(&slow); | 3720 __ bind(&slow); |
| 3709 __ IncrementCounter(&Counters::keyed_load_external_array_slow, 1, r2, r3); | 3721 __ IncrementCounter(COUNTERS->keyed_load_external_array_slow(), 1, r2, r3); |
| 3710 | 3722 |
| 3711 // ---------- S t a t e -------------- | 3723 // ---------- S t a t e -------------- |
| 3712 // -- lr : return address | 3724 // -- lr : return address |
| 3713 // -- r0 : key | 3725 // -- r0 : key |
| 3714 // -- r1 : receiver | 3726 // -- r1 : receiver |
| 3715 // ----------------------------------- | 3727 // ----------------------------------- |
| 3716 | 3728 |
| 3717 __ Push(r1, r0); | 3729 __ Push(r1, r0); |
| 3718 | 3730 |
| 3719 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); | 3731 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); |
| (...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3814 __ b(ne, &slow); | 3826 __ b(ne, &slow); |
| 3815 | 3827 |
| 3816 __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset)); | 3828 __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset)); |
| 3817 | 3829 |
| 3818 // r3: base pointer of external storage. | 3830 // r3: base pointer of external storage. |
| 3819 // r4: key (integer). | 3831 // r4: key (integer). |
| 3820 | 3832 |
| 3821 // The WebGL specification leaves the behavior of storing NaN and | 3833 // The WebGL specification leaves the behavior of storing NaN and |
| 3822 // +/-Infinity into integer arrays basically undefined. For more | 3834 // +/-Infinity into integer arrays basically undefined. For more |
| 3823 // reproducible behavior, convert these to zero. | 3835 // reproducible behavior, convert these to zero. |
| 3824 if (CpuFeatures::IsSupported(VFP3)) { | 3836 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) { |
| 3825 CpuFeatures::Scope scope(VFP3); | 3837 CpuFeatures::Scope scope(VFP3); |
| 3826 | 3838 |
| 3827 | 3839 |
| 3828 if (array_type == kExternalFloatArray) { | 3840 if (array_type == kExternalFloatArray) { |
| 3829 // vldr requires offset to be a multiple of 4 so we can not | 3841 // vldr requires offset to be a multiple of 4 so we can not |
| 3830 // include -kHeapObjectTag into it. | 3842 // include -kHeapObjectTag into it. |
| 3831 __ sub(r5, r0, Operand(kHeapObjectTag)); | 3843 __ sub(r5, r0, Operand(kHeapObjectTag)); |
| 3832 __ vldr(d0, r5, HeapNumber::kValueOffset); | 3844 __ vldr(d0, r5, HeapNumber::kValueOffset); |
| 3833 __ add(r5, r3, Operand(r4, LSL, 2)); | 3845 __ add(r5, r3, Operand(r4, LSL, 2)); |
| 3834 __ vcvt_f32_f64(s0, d0); | 3846 __ vcvt_f32_f64(s0, d0); |
| (...skipping 192 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4027 | 4039 |
| 4028 return GetCode(flags); | 4040 return GetCode(flags); |
| 4029 } | 4041 } |
| 4030 | 4042 |
| 4031 | 4043 |
| 4032 #undef __ | 4044 #undef __ |
| 4033 | 4045 |
| 4034 } } // namespace v8::internal | 4046 } } // namespace v8::internal |
| 4035 | 4047 |
| 4036 #endif // V8_TARGET_ARCH_ARM | 4048 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |