| OLD | NEW |
| 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 21 matching lines...) Expand all Loading... |
| 32 #include "ic-inl.h" | 32 #include "ic-inl.h" |
| 33 #include "codegen-inl.h" | 33 #include "codegen-inl.h" |
| 34 #include "stub-cache.h" | 34 #include "stub-cache.h" |
| 35 | 35 |
| 36 namespace v8 { | 36 namespace v8 { |
| 37 namespace internal { | 37 namespace internal { |
| 38 | 38 |
| 39 #define __ ACCESS_MASM(masm) | 39 #define __ ACCESS_MASM(masm) |
| 40 | 40 |
| 41 | 41 |
| 42 static void ProbeTable(MacroAssembler* masm, | 42 static void ProbeTable(Isolate* isolate, |
| 43 MacroAssembler* masm, |
| 43 Code::Flags flags, | 44 Code::Flags flags, |
| 44 StubCache::Table table, | 45 StubCache::Table table, |
| 45 Register name, | 46 Register name, |
| 46 Register offset, | 47 Register offset, |
| 47 Register scratch, | 48 Register scratch, |
| 48 Register scratch2) { | 49 Register scratch2) { |
| 49 ExternalReference key_offset(SCTableReference::keyReference(table)); | 50 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); |
| 50 ExternalReference value_offset(SCTableReference::valueReference(table)); | 51 ExternalReference value_offset(isolate->stub_cache()->value_reference(table)); |
| 51 | 52 |
| 52 uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address()); | 53 uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address()); |
| 53 uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address()); | 54 uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address()); |
| 54 | 55 |
| 55 // Check the relative positions of the address fields. | 56 // Check the relative positions of the address fields. |
| 56 ASSERT(value_off_addr > key_off_addr); | 57 ASSERT(value_off_addr > key_off_addr); |
| 57 ASSERT((value_off_addr - key_off_addr) % 4 == 0); | 58 ASSERT((value_off_addr - key_off_addr) % 4 == 0); |
| 58 ASSERT((value_off_addr - key_off_addr) < (256 * 4)); | 59 ASSERT((value_off_addr - key_off_addr) < (256 * 4)); |
| 59 | 60 |
| 60 Label miss; | 61 Label miss; |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 94 // must always call a backup property check that is complete. | 95 // must always call a backup property check that is complete. |
| 95 // This function is safe to call if the receiver has fast properties. | 96 // This function is safe to call if the receiver has fast properties. |
| 96 // Name must be a symbol and receiver must be a heap object. | 97 // Name must be a symbol and receiver must be a heap object. |
| 97 static void GenerateDictionaryNegativeLookup(MacroAssembler* masm, | 98 static void GenerateDictionaryNegativeLookup(MacroAssembler* masm, |
| 98 Label* miss_label, | 99 Label* miss_label, |
| 99 Register receiver, | 100 Register receiver, |
| 100 String* name, | 101 String* name, |
| 101 Register scratch0, | 102 Register scratch0, |
| 102 Register scratch1) { | 103 Register scratch1) { |
| 103 ASSERT(name->IsSymbol()); | 104 ASSERT(name->IsSymbol()); |
| 104 __ IncrementCounter(&Counters::negative_lookups, 1, scratch0, scratch1); | 105 __ IncrementCounter(COUNTERS->negative_lookups(), 1, scratch0, scratch1); |
| 105 __ IncrementCounter(&Counters::negative_lookups_miss, 1, scratch0, scratch1); | 106 __ IncrementCounter(COUNTERS->negative_lookups_miss(), 1, scratch0, scratch1); |
| 106 | 107 |
| 107 Label done; | 108 Label done; |
| 108 | 109 |
| 109 const int kInterceptorOrAccessCheckNeededMask = | 110 const int kInterceptorOrAccessCheckNeededMask = |
| 110 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); | 111 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); |
| 111 | 112 |
| 112 // Bail out if the receiver has a named interceptor or requires access checks. | 113 // Bail out if the receiver has a named interceptor or requires access checks. |
| 113 Register map = scratch1; | 114 Register map = scratch1; |
| 114 __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 115 __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
| 115 __ ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset)); | 116 __ ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset)); |
| (...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 191 | 192 |
| 192 // Restore the properties. | 193 // Restore the properties. |
| 193 __ ldr(properties, | 194 __ ldr(properties, |
| 194 FieldMemOperand(receiver, JSObject::kPropertiesOffset)); | 195 FieldMemOperand(receiver, JSObject::kPropertiesOffset)); |
| 195 } else { | 196 } else { |
| 196 // Give up probing if still not found the undefined value. | 197 // Give up probing if still not found the undefined value. |
| 197 __ b(ne, miss_label); | 198 __ b(ne, miss_label); |
| 198 } | 199 } |
| 199 } | 200 } |
| 200 __ bind(&done); | 201 __ bind(&done); |
| 201 __ DecrementCounter(&Counters::negative_lookups_miss, 1, scratch0, scratch1); | 202 __ DecrementCounter(COUNTERS->negative_lookups_miss(), 1, scratch0, scratch1); |
| 202 } | 203 } |
| 203 | 204 |
| 204 | 205 |
| 205 void StubCache::GenerateProbe(MacroAssembler* masm, | 206 void StubCache::GenerateProbe(MacroAssembler* masm, |
| 206 Code::Flags flags, | 207 Code::Flags flags, |
| 207 Register receiver, | 208 Register receiver, |
| 208 Register name, | 209 Register name, |
| 209 Register scratch, | 210 Register scratch, |
| 210 Register extra, | 211 Register extra, |
| 211 Register extra2) { | 212 Register extra2) { |
| 213 Isolate* isolate = Isolate::Current(); |
| 212 Label miss; | 214 Label miss; |
| 213 | 215 |
| 214 // Make sure that code is valid. The shifting code relies on the | 216 // Make sure that code is valid. The shifting code relies on the |
| 215 // entry size being 8. | 217 // entry size being 8. |
| 216 ASSERT(sizeof(Entry) == 8); | 218 ASSERT(sizeof(Entry) == 8); |
| 217 | 219 |
| 218 // Make sure the flags does not name a specific type. | 220 // Make sure the flags does not name a specific type. |
| 219 ASSERT(Code::ExtractTypeFromFlags(flags) == 0); | 221 ASSERT(Code::ExtractTypeFromFlags(flags) == 0); |
| 220 | 222 |
| 221 // Make sure that there are no register conflicts. | 223 // Make sure that there are no register conflicts. |
| (...skipping 19 matching lines...) Expand all Loading... |
| 241 // Get the map of the receiver and compute the hash. | 243 // Get the map of the receiver and compute the hash. |
| 242 __ ldr(scratch, FieldMemOperand(name, String::kHashFieldOffset)); | 244 __ ldr(scratch, FieldMemOperand(name, String::kHashFieldOffset)); |
| 243 __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 245 __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
| 244 __ add(scratch, scratch, Operand(ip)); | 246 __ add(scratch, scratch, Operand(ip)); |
| 245 __ eor(scratch, scratch, Operand(flags)); | 247 __ eor(scratch, scratch, Operand(flags)); |
| 246 __ and_(scratch, | 248 __ and_(scratch, |
| 247 scratch, | 249 scratch, |
| 248 Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize)); | 250 Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize)); |
| 249 | 251 |
| 250 // Probe the primary table. | 252 // Probe the primary table. |
| 251 ProbeTable(masm, flags, kPrimary, name, scratch, extra, extra2); | 253 ProbeTable(isolate, masm, flags, kPrimary, name, scratch, extra, extra2); |
| 252 | 254 |
| 253 // Primary miss: Compute hash for secondary probe. | 255 // Primary miss: Compute hash for secondary probe. |
| 254 __ sub(scratch, scratch, Operand(name)); | 256 __ sub(scratch, scratch, Operand(name)); |
| 255 __ add(scratch, scratch, Operand(flags)); | 257 __ add(scratch, scratch, Operand(flags)); |
| 256 __ and_(scratch, | 258 __ and_(scratch, |
| 257 scratch, | 259 scratch, |
| 258 Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize)); | 260 Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize)); |
| 259 | 261 |
| 260 // Probe the secondary table. | 262 // Probe the secondary table. |
| 261 ProbeTable(masm, flags, kSecondary, name, scratch, extra, extra2); | 263 ProbeTable(isolate, masm, flags, kSecondary, name, scratch, extra, extra2); |
| 262 | 264 |
| 263 // Cache miss: Fall-through and let caller handle the miss by | 265 // Cache miss: Fall-through and let caller handle the miss by |
| 264 // entering the runtime system. | 266 // entering the runtime system. |
| 265 __ bind(&miss); | 267 __ bind(&miss); |
| 266 } | 268 } |
| 267 | 269 |
| 268 | 270 |
| 269 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm, | 271 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm, |
| 270 int index, | 272 int index, |
| 271 Register prototype) { | 273 Register prototype) { |
| 272 // Load the global or builtins object from the current context. | 274 // Load the global or builtins object from the current context. |
| 273 __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); | 275 __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); |
| 274 // Load the global context from the global or builtins object. | 276 // Load the global context from the global or builtins object. |
| 275 __ ldr(prototype, | 277 __ ldr(prototype, |
| 276 FieldMemOperand(prototype, GlobalObject::kGlobalContextOffset)); | 278 FieldMemOperand(prototype, GlobalObject::kGlobalContextOffset)); |
| 277 // Load the function from the global context. | 279 // Load the function from the global context. |
| 278 __ ldr(prototype, MemOperand(prototype, Context::SlotOffset(index))); | 280 __ ldr(prototype, MemOperand(prototype, Context::SlotOffset(index))); |
| 279 // Load the initial map. The global functions all have initial maps. | 281 // Load the initial map. The global functions all have initial maps. |
| 280 __ ldr(prototype, | 282 __ ldr(prototype, |
| 281 FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset)); | 283 FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset)); |
| 282 // Load the prototype from the initial map. | 284 // Load the prototype from the initial map. |
| 283 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset)); | 285 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset)); |
| 284 } | 286 } |
| 285 | 287 |
| 286 | 288 |
| 287 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype( | 289 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype( |
| 288 MacroAssembler* masm, int index, Register prototype, Label* miss) { | 290 MacroAssembler* masm, int index, Register prototype, Label* miss) { |
| 289 // Check we're still in the same context. | 291 // Check we're still in the same context. |
| 290 __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); | 292 __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); |
| 291 __ Move(ip, Top::global()); | 293 __ Move(ip, Isolate::Current()->global()); |
| 292 __ cmp(prototype, ip); | 294 __ cmp(prototype, ip); |
| 293 __ b(ne, miss); | 295 __ b(ne, miss); |
| 294 // Get the global function with the given index. | 296 // Get the global function with the given index. |
| 295 JSFunction* function = JSFunction::cast(Top::global_context()->get(index)); | 297 JSFunction* function = JSFunction::cast( |
| 298 Isolate::Current()->global_context()->get(index)); |
| 296 // Load its initial map. The global functions all have initial maps. | 299 // Load its initial map. The global functions all have initial maps. |
| 297 __ Move(prototype, Handle<Map>(function->initial_map())); | 300 __ Move(prototype, Handle<Map>(function->initial_map())); |
| 298 // Load the prototype from the initial map. | 301 // Load the prototype from the initial map. |
| 299 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset)); | 302 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset)); |
| 300 } | 303 } |
| 301 | 304 |
| 302 | 305 |
| 303 // Load a fast property out of a holder object (src). In-object properties | 306 // Load a fast property out of a holder object (src). In-object properties |
| 304 // are loaded directly otherwise the property is loaded from the properties | 307 // are loaded directly otherwise the property is loaded from the properties |
| 305 // fixed array. | 308 // fixed array. |
| (...skipping 192 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 498 // Return the value (register r0). | 501 // Return the value (register r0). |
| 499 __ bind(&exit); | 502 __ bind(&exit); |
| 500 __ Ret(); | 503 __ Ret(); |
| 501 } | 504 } |
| 502 | 505 |
| 503 | 506 |
| 504 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) { | 507 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) { |
| 505 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC); | 508 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC); |
| 506 Code* code = NULL; | 509 Code* code = NULL; |
| 507 if (kind == Code::LOAD_IC) { | 510 if (kind == Code::LOAD_IC) { |
| 508 code = Builtins::builtin(Builtins::LoadIC_Miss); | 511 code = Isolate::Current()->builtins()->builtin(Builtins::LoadIC_Miss); |
| 509 } else { | 512 } else { |
| 510 code = Builtins::builtin(Builtins::KeyedLoadIC_Miss); | 513 code = Isolate::Current()->builtins()->builtin(Builtins::KeyedLoadIC_Miss); |
| 511 } | 514 } |
| 512 | 515 |
| 513 Handle<Code> ic(code); | 516 Handle<Code> ic(code); |
| 514 __ Jump(ic, RelocInfo::CODE_TARGET); | 517 __ Jump(ic, RelocInfo::CODE_TARGET); |
| 515 } | 518 } |
| 516 | 519 |
| 517 | 520 |
| 518 static void GenerateCallFunction(MacroAssembler* masm, | 521 static void GenerateCallFunction(MacroAssembler* masm, |
| 519 Object* object, | 522 Object* object, |
| 520 const ParameterCount& arguments, | 523 const ParameterCount& arguments, |
| (...skipping 20 matching lines...) Expand all Loading... |
| 541 } | 544 } |
| 542 | 545 |
| 543 | 546 |
| 544 static void PushInterceptorArguments(MacroAssembler* masm, | 547 static void PushInterceptorArguments(MacroAssembler* masm, |
| 545 Register receiver, | 548 Register receiver, |
| 546 Register holder, | 549 Register holder, |
| 547 Register name, | 550 Register name, |
| 548 JSObject* holder_obj) { | 551 JSObject* holder_obj) { |
| 549 __ push(name); | 552 __ push(name); |
| 550 InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor(); | 553 InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor(); |
| 551 ASSERT(!Heap::InNewSpace(interceptor)); | 554 ASSERT(!HEAP->InNewSpace(interceptor)); |
| 552 Register scratch = name; | 555 Register scratch = name; |
| 553 __ mov(scratch, Operand(Handle<Object>(interceptor))); | 556 __ mov(scratch, Operand(Handle<Object>(interceptor))); |
| 554 __ push(scratch); | 557 __ push(scratch); |
| 555 __ push(receiver); | 558 __ push(receiver); |
| 556 __ push(holder); | 559 __ push(holder); |
| 557 __ ldr(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset)); | 560 __ ldr(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset)); |
| 558 __ push(scratch); | 561 __ push(scratch); |
| 559 } | 562 } |
| 560 | 563 |
| 561 | 564 |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 609 // -- sp[(argc + 4) * 4] : receiver | 612 // -- sp[(argc + 4) * 4] : receiver |
| 610 // ----------------------------------- | 613 // ----------------------------------- |
| 611 // Get the function and setup the context. | 614 // Get the function and setup the context. |
| 612 JSFunction* function = optimization.constant_function(); | 615 JSFunction* function = optimization.constant_function(); |
| 613 __ mov(r5, Operand(Handle<JSFunction>(function))); | 616 __ mov(r5, Operand(Handle<JSFunction>(function))); |
| 614 __ ldr(cp, FieldMemOperand(r5, JSFunction::kContextOffset)); | 617 __ ldr(cp, FieldMemOperand(r5, JSFunction::kContextOffset)); |
| 615 | 618 |
| 616 // Pass the additional arguments FastHandleApiCall expects. | 619 // Pass the additional arguments FastHandleApiCall expects. |
| 617 Object* call_data = optimization.api_call_info()->data(); | 620 Object* call_data = optimization.api_call_info()->data(); |
| 618 Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info()); | 621 Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info()); |
| 619 if (Heap::InNewSpace(call_data)) { | 622 if (HEAP->InNewSpace(call_data)) { |
| 620 __ Move(r0, api_call_info_handle); | 623 __ Move(r0, api_call_info_handle); |
| 621 __ ldr(r6, FieldMemOperand(r0, CallHandlerInfo::kDataOffset)); | 624 __ ldr(r6, FieldMemOperand(r0, CallHandlerInfo::kDataOffset)); |
| 622 } else { | 625 } else { |
| 623 __ Move(r6, Handle<Object>(call_data)); | 626 __ Move(r6, Handle<Object>(call_data)); |
| 624 } | 627 } |
| 625 // Store js function and call data. | 628 // Store js function and call data. |
| 626 __ stm(ib, sp, r5.bit() | r6.bit()); | 629 __ stm(ib, sp, r5.bit() | r6.bit()); |
| 627 | 630 |
| 628 // r2 points to call data as expected by Arguments | 631 // r2 points to call data as expected by Arguments |
| 629 // (refer to layout above). | 632 // (refer to layout above). |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 703 } else { | 706 } else { |
| 704 CompileRegular(masm, | 707 CompileRegular(masm, |
| 705 object, | 708 object, |
| 706 receiver, | 709 receiver, |
| 707 scratch1, | 710 scratch1, |
| 708 scratch2, | 711 scratch2, |
| 709 scratch3, | 712 scratch3, |
| 710 name, | 713 name, |
| 711 holder, | 714 holder, |
| 712 miss); | 715 miss); |
| 713 return Heap::undefined_value(); | 716 return HEAP->undefined_value(); |
| 714 } | 717 } |
| 715 } | 718 } |
| 716 | 719 |
| 717 private: | 720 private: |
| 718 MaybeObject* CompileCacheable(MacroAssembler* masm, | 721 MaybeObject* CompileCacheable(MacroAssembler* masm, |
| 719 JSObject* object, | 722 JSObject* object, |
| 720 Register receiver, | 723 Register receiver, |
| 721 Register scratch1, | 724 Register scratch1, |
| 722 Register scratch2, | 725 Register scratch2, |
| 723 Register scratch3, | 726 Register scratch3, |
| (...skipping 15 matching lines...) Expand all Loading... |
| 739 interceptor_holder); | 742 interceptor_holder); |
| 740 if (depth1 == kInvalidProtoDepth) { | 743 if (depth1 == kInvalidProtoDepth) { |
| 741 depth2 = | 744 depth2 = |
| 742 optimization.GetPrototypeDepthOfExpectedType(interceptor_holder, | 745 optimization.GetPrototypeDepthOfExpectedType(interceptor_holder, |
| 743 lookup->holder()); | 746 lookup->holder()); |
| 744 } | 747 } |
| 745 can_do_fast_api_call = (depth1 != kInvalidProtoDepth) || | 748 can_do_fast_api_call = (depth1 != kInvalidProtoDepth) || |
| 746 (depth2 != kInvalidProtoDepth); | 749 (depth2 != kInvalidProtoDepth); |
| 747 } | 750 } |
| 748 | 751 |
| 749 __ IncrementCounter(&Counters::call_const_interceptor, 1, | 752 __ IncrementCounter(COUNTERS->call_const_interceptor(), 1, |
| 750 scratch1, scratch2); | 753 scratch1, scratch2); |
| 751 | 754 |
| 752 if (can_do_fast_api_call) { | 755 if (can_do_fast_api_call) { |
| 753 __ IncrementCounter(&Counters::call_const_interceptor_fast_api, 1, | 756 __ IncrementCounter(COUNTERS->call_const_interceptor_fast_api(), 1, |
| 754 scratch1, scratch2); | 757 scratch1, scratch2); |
| 755 ReserveSpaceForFastApiCall(masm, scratch1); | 758 ReserveSpaceForFastApiCall(masm, scratch1); |
| 756 } | 759 } |
| 757 | 760 |
| 758 // Check that the maps from receiver to interceptor's holder | 761 // Check that the maps from receiver to interceptor's holder |
| 759 // haven't changed and thus we can invoke interceptor. | 762 // haven't changed and thus we can invoke interceptor. |
| 760 Label miss_cleanup; | 763 Label miss_cleanup; |
| 761 Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label; | 764 Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label; |
| 762 Register holder = | 765 Register holder = |
| 763 stub_compiler_->CheckPrototypes(object, receiver, | 766 stub_compiler_->CheckPrototypes(object, receiver, |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 804 FreeSpaceForFastApiCall(masm); | 807 FreeSpaceForFastApiCall(masm); |
| 805 __ b(miss_label); | 808 __ b(miss_label); |
| 806 } | 809 } |
| 807 | 810 |
| 808 // Invoke a regular function. | 811 // Invoke a regular function. |
| 809 __ bind(®ular_invoke); | 812 __ bind(®ular_invoke); |
| 810 if (can_do_fast_api_call) { | 813 if (can_do_fast_api_call) { |
| 811 FreeSpaceForFastApiCall(masm); | 814 FreeSpaceForFastApiCall(masm); |
| 812 } | 815 } |
| 813 | 816 |
| 814 return Heap::undefined_value(); | 817 return HEAP->undefined_value(); |
| 815 } | 818 } |
| 816 | 819 |
| 817 void CompileRegular(MacroAssembler* masm, | 820 void CompileRegular(MacroAssembler* masm, |
| 818 JSObject* object, | 821 JSObject* object, |
| 819 Register receiver, | 822 Register receiver, |
| 820 Register scratch1, | 823 Register scratch1, |
| 821 Register scratch2, | 824 Register scratch2, |
| 822 Register scratch3, | 825 Register scratch3, |
| 823 String* name, | 826 String* name, |
| 824 JSObject* interceptor_holder, | 827 JSObject* interceptor_holder, |
| (...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 935 // Convert and store int passed in register ival to IEEE 754 single precision | 938 // Convert and store int passed in register ival to IEEE 754 single precision |
| 936 // floating point value at memory location (dst + 4 * wordoffset) | 939 // floating point value at memory location (dst + 4 * wordoffset) |
| 937 // If VFP3 is available use it for conversion. | 940 // If VFP3 is available use it for conversion. |
| 938 static void StoreIntAsFloat(MacroAssembler* masm, | 941 static void StoreIntAsFloat(MacroAssembler* masm, |
| 939 Register dst, | 942 Register dst, |
| 940 Register wordoffset, | 943 Register wordoffset, |
| 941 Register ival, | 944 Register ival, |
| 942 Register fval, | 945 Register fval, |
| 943 Register scratch1, | 946 Register scratch1, |
| 944 Register scratch2) { | 947 Register scratch2) { |
| 945 if (CpuFeatures::IsSupported(VFP3)) { | 948 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) { |
| 946 CpuFeatures::Scope scope(VFP3); | 949 CpuFeatures::Scope scope(VFP3); |
| 947 __ vmov(s0, ival); | 950 __ vmov(s0, ival); |
| 948 __ add(scratch1, dst, Operand(wordoffset, LSL, 2)); | 951 __ add(scratch1, dst, Operand(wordoffset, LSL, 2)); |
| 949 __ vcvt_f32_s32(s0, s0); | 952 __ vcvt_f32_s32(s0, s0); |
| 950 __ vstr(s0, scratch1, 0); | 953 __ vstr(s0, scratch1, 0); |
| 951 } else { | 954 } else { |
| 952 Label not_special, done; | 955 Label not_special, done; |
| 953 // Move sign bit from source to destination. This works because the sign | 956 // Move sign bit from source to destination. This works because the sign |
| 954 // bit in the exponent word of the double has the same position and polarity | 957 // bit in the exponent word of the double has the same position and polarity |
| 955 // as the 2's complement sign bit in a Smi. | 958 // as the 2's complement sign bit in a Smi. |
| (...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1073 // Only global objects and objects that do not require access | 1076 // Only global objects and objects that do not require access |
| 1074 // checks are allowed in stubs. | 1077 // checks are allowed in stubs. |
| 1075 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded()); | 1078 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded()); |
| 1076 | 1079 |
| 1077 ASSERT(current->GetPrototype()->IsJSObject()); | 1080 ASSERT(current->GetPrototype()->IsJSObject()); |
| 1078 JSObject* prototype = JSObject::cast(current->GetPrototype()); | 1081 JSObject* prototype = JSObject::cast(current->GetPrototype()); |
| 1079 if (!current->HasFastProperties() && | 1082 if (!current->HasFastProperties() && |
| 1080 !current->IsJSGlobalObject() && | 1083 !current->IsJSGlobalObject() && |
| 1081 !current->IsJSGlobalProxy()) { | 1084 !current->IsJSGlobalProxy()) { |
| 1082 if (!name->IsSymbol()) { | 1085 if (!name->IsSymbol()) { |
| 1083 MaybeObject* maybe_lookup_result = Heap::LookupSymbol(name); | 1086 MaybeObject* maybe_lookup_result = HEAP->LookupSymbol(name); |
| 1084 Object* lookup_result = NULL; // Initialization to please compiler. | 1087 Object* lookup_result = NULL; // Initialization to please compiler. |
| 1085 if (!maybe_lookup_result->ToObject(&lookup_result)) { | 1088 if (!maybe_lookup_result->ToObject(&lookup_result)) { |
| 1086 set_failure(Failure::cast(maybe_lookup_result)); | 1089 set_failure(Failure::cast(maybe_lookup_result)); |
| 1087 return reg; | 1090 return reg; |
| 1088 } | 1091 } |
| 1089 name = String::cast(lookup_result); | 1092 name = String::cast(lookup_result); |
| 1090 } | 1093 } |
| 1091 ASSERT(current->property_dictionary()->FindEntry(name) == | 1094 ASSERT(current->property_dictionary()->FindEntry(name) == |
| 1092 StringDictionary::kNotFound); | 1095 StringDictionary::kNotFound); |
| 1093 | 1096 |
| 1094 GenerateDictionaryNegativeLookup(masm(), | 1097 GenerateDictionaryNegativeLookup(masm(), |
| 1095 miss, | 1098 miss, |
| 1096 reg, | 1099 reg, |
| 1097 name, | 1100 name, |
| 1098 scratch1, | 1101 scratch1, |
| 1099 scratch2); | 1102 scratch2); |
| 1100 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); | 1103 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); |
| 1101 reg = holder_reg; // from now the object is in holder_reg | 1104 reg = holder_reg; // from now the object is in holder_reg |
| 1102 __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset)); | 1105 __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset)); |
| 1103 } else if (Heap::InNewSpace(prototype)) { | 1106 } else if (HEAP->InNewSpace(prototype)) { |
| 1104 // Get the map of the current object. | 1107 // Get the map of the current object. |
| 1105 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); | 1108 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); |
| 1106 __ cmp(scratch1, Operand(Handle<Map>(current->map()))); | 1109 __ cmp(scratch1, Operand(Handle<Map>(current->map()))); |
| 1107 | 1110 |
| 1108 // Branch on the result of the map check. | 1111 // Branch on the result of the map check. |
| 1109 __ b(ne, miss); | 1112 __ b(ne, miss); |
| 1110 | 1113 |
| 1111 // Check access rights to the global object. This has to happen | 1114 // Check access rights to the global object. This has to happen |
| 1112 // after the map check so that we know that the object is | 1115 // after the map check so that we know that the object is |
| 1113 // actually a global object. | 1116 // actually a global object. |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1147 // Go to the next object in the prototype chain. | 1150 // Go to the next object in the prototype chain. |
| 1148 current = prototype; | 1151 current = prototype; |
| 1149 } | 1152 } |
| 1150 | 1153 |
| 1151 // Check the holder map. | 1154 // Check the holder map. |
| 1152 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); | 1155 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); |
| 1153 __ cmp(scratch1, Operand(Handle<Map>(current->map()))); | 1156 __ cmp(scratch1, Operand(Handle<Map>(current->map()))); |
| 1154 __ b(ne, miss); | 1157 __ b(ne, miss); |
| 1155 | 1158 |
| 1156 // Log the check depth. | 1159 // Log the check depth. |
| 1157 LOG(IntEvent("check-maps-depth", depth + 1)); | 1160 LOG(Isolate::Current(), IntEvent("check-maps-depth", depth + 1)); |
| 1158 | 1161 |
| 1159 // Perform security check for access to the global object. | 1162 // Perform security check for access to the global object. |
| 1160 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded()); | 1163 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded()); |
| 1161 if (holder->IsJSGlobalProxy()) { | 1164 if (holder->IsJSGlobalProxy()) { |
| 1162 __ CheckAccessGlobalProxy(reg, scratch1, miss); | 1165 __ CheckAccessGlobalProxy(reg, scratch1, miss); |
| 1163 }; | 1166 }; |
| 1164 | 1167 |
| 1165 // If we've skipped any global objects, it's not enough to verify | 1168 // If we've skipped any global objects, it's not enough to verify |
| 1166 // that their maps haven't changed. We also need to check that the | 1169 // that their maps haven't changed. We also need to check that the |
| 1167 // property cell for the property is still empty. | 1170 // property cell for the property is still empty. |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1241 // Check that the maps haven't changed. | 1244 // Check that the maps haven't changed. |
| 1242 Register reg = | 1245 Register reg = |
| 1243 CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3, | 1246 CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3, |
| 1244 name, miss); | 1247 name, miss); |
| 1245 | 1248 |
| 1246 // Build AccessorInfo::args_ list on the stack and push property name below | 1249 // Build AccessorInfo::args_ list on the stack and push property name below |
| 1247 // the exit frame to make GC aware of them and store pointers to them. | 1250 // the exit frame to make GC aware of them and store pointers to them. |
| 1248 __ push(receiver); | 1251 __ push(receiver); |
| 1249 __ mov(scratch2, sp); // scratch2 = AccessorInfo::args_ | 1252 __ mov(scratch2, sp); // scratch2 = AccessorInfo::args_ |
| 1250 Handle<AccessorInfo> callback_handle(callback); | 1253 Handle<AccessorInfo> callback_handle(callback); |
| 1251 if (Heap::InNewSpace(callback_handle->data())) { | 1254 if (HEAP->InNewSpace(callback_handle->data())) { |
| 1252 __ Move(scratch3, callback_handle); | 1255 __ Move(scratch3, callback_handle); |
| 1253 __ ldr(scratch3, FieldMemOperand(scratch3, AccessorInfo::kDataOffset)); | 1256 __ ldr(scratch3, FieldMemOperand(scratch3, AccessorInfo::kDataOffset)); |
| 1254 } else { | 1257 } else { |
| 1255 __ Move(scratch3, Handle<Object>(callback_handle->data())); | 1258 __ Move(scratch3, Handle<Object>(callback_handle->data())); |
| 1256 } | 1259 } |
| 1257 __ Push(reg, scratch3, name_reg); | 1260 __ Push(reg, scratch3, name_reg); |
| 1258 __ mov(r0, sp); // r0 = Handle<String> | 1261 __ mov(r0, sp); // r0 = Handle<String> |
| 1259 | 1262 |
| 1260 Address getter_address = v8::ToCData<Address>(callback->getter()); | 1263 Address getter_address = v8::ToCData<Address>(callback->getter()); |
| 1261 ApiFunction fun(getter_address); | 1264 ApiFunction fun(getter_address); |
| (...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1455 | 1458 |
| 1456 | 1459 |
| 1457 void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell, | 1460 void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell, |
| 1458 JSFunction* function, | 1461 JSFunction* function, |
| 1459 Label* miss) { | 1462 Label* miss) { |
| 1460 // Get the value from the cell. | 1463 // Get the value from the cell. |
| 1461 __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell))); | 1464 __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell))); |
| 1462 __ ldr(r1, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset)); | 1465 __ ldr(r1, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset)); |
| 1463 | 1466 |
| 1464 // Check that the cell contains the same function. | 1467 // Check that the cell contains the same function. |
| 1465 if (Heap::InNewSpace(function)) { | 1468 if (HEAP->InNewSpace(function)) { |
| 1466 // We can't embed a pointer to a function in new space so we have | 1469 // We can't embed a pointer to a function in new space so we have |
| 1467 // to verify that the shared function info is unchanged. This has | 1470 // to verify that the shared function info is unchanged. This has |
| 1468 // the nice side effect that multiple closures based on the same | 1471 // the nice side effect that multiple closures based on the same |
| 1469 // function can all use this call IC. Before we load through the | 1472 // function can all use this call IC. Before we load through the |
| 1470 // function, we have to verify that it still is a function. | 1473 // function, we have to verify that it still is a function. |
| 1471 __ tst(r1, Operand(kSmiTagMask)); | 1474 __ tst(r1, Operand(kSmiTagMask)); |
| 1472 __ b(eq, miss); | 1475 __ b(eq, miss); |
| 1473 __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE); | 1476 __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE); |
| 1474 __ b(ne, miss); | 1477 __ b(ne, miss); |
| 1475 | 1478 |
| 1476 // Check the shared function info. Make sure it hasn't changed. | 1479 // Check the shared function info. Make sure it hasn't changed. |
| 1477 __ Move(r3, Handle<SharedFunctionInfo>(function->shared())); | 1480 __ Move(r3, Handle<SharedFunctionInfo>(function->shared())); |
| 1478 __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); | 1481 __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); |
| 1479 __ cmp(r4, r3); | 1482 __ cmp(r4, r3); |
| 1480 __ b(ne, miss); | 1483 __ b(ne, miss); |
| 1481 } else { | 1484 } else { |
| 1482 __ cmp(r1, Operand(Handle<JSFunction>(function))); | 1485 __ cmp(r1, Operand(Handle<JSFunction>(function))); |
| 1483 __ b(ne, miss); | 1486 __ b(ne, miss); |
| 1484 } | 1487 } |
| 1485 } | 1488 } |
| 1486 | 1489 |
| 1487 | 1490 |
| 1488 MaybeObject* CallStubCompiler::GenerateMissBranch() { | 1491 MaybeObject* CallStubCompiler::GenerateMissBranch() { |
| 1489 MaybeObject* maybe_obj = StubCache::ComputeCallMiss(arguments().immediate(), | 1492 MaybeObject* maybe_obj = Isolate::Current()->stub_cache()->ComputeCallMiss( |
| 1490 kind_); | 1493 arguments().immediate(), kind_); |
| 1491 Object* obj; | 1494 Object* obj; |
| 1492 if (!maybe_obj->ToObject(&obj)) return maybe_obj; | 1495 if (!maybe_obj->ToObject(&obj)) return maybe_obj; |
| 1493 __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET); | 1496 __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET); |
| 1494 return obj; | 1497 return obj; |
| 1495 } | 1498 } |
| 1496 | 1499 |
| 1497 | 1500 |
| 1498 MaybeObject* CallStubCompiler::CompileCallField(JSObject* object, | 1501 MaybeObject* CallStubCompiler::CompileCallField(JSObject* object, |
| 1499 JSObject* holder, | 1502 JSObject* holder, |
| 1500 int index, | 1503 int index, |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1540 String* name) { | 1543 String* name) { |
| 1541 // ----------- S t a t e ------------- | 1544 // ----------- S t a t e ------------- |
| 1542 // -- r2 : name | 1545 // -- r2 : name |
| 1543 // -- lr : return address | 1546 // -- lr : return address |
| 1544 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) | 1547 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) |
| 1545 // -- ... | 1548 // -- ... |
| 1546 // -- sp[argc * 4] : receiver | 1549 // -- sp[argc * 4] : receiver |
| 1547 // ----------------------------------- | 1550 // ----------------------------------- |
| 1548 | 1551 |
| 1549 // If object is not an array, bail out to regular call. | 1552 // If object is not an array, bail out to regular call. |
| 1550 if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value(); | 1553 if (!object->IsJSArray() || cell != NULL) return HEAP->undefined_value(); |
| 1551 | 1554 |
| 1552 Label miss; | 1555 Label miss; |
| 1553 | 1556 |
| 1554 GenerateNameCheck(name, &miss); | 1557 GenerateNameCheck(name, &miss); |
| 1555 | 1558 |
| 1556 Register receiver = r1; | 1559 Register receiver = r1; |
| 1557 | 1560 |
| 1558 // Get the receiver from the stack | 1561 // Get the receiver from the stack |
| 1559 const int argc = arguments().immediate(); | 1562 const int argc = arguments().immediate(); |
| 1560 __ ldr(receiver, MemOperand(sp, argc * kPointerSize)); | 1563 __ ldr(receiver, MemOperand(sp, argc * kPointerSize)); |
| (...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1700 String* name) { | 1703 String* name) { |
| 1701 // ----------- S t a t e ------------- | 1704 // ----------- S t a t e ------------- |
| 1702 // -- r2 : name | 1705 // -- r2 : name |
| 1703 // -- lr : return address | 1706 // -- lr : return address |
| 1704 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) | 1707 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) |
| 1705 // -- ... | 1708 // -- ... |
| 1706 // -- sp[argc * 4] : receiver | 1709 // -- sp[argc * 4] : receiver |
| 1707 // ----------------------------------- | 1710 // ----------------------------------- |
| 1708 | 1711 |
| 1709 // If object is not an array, bail out to regular call. | 1712 // If object is not an array, bail out to regular call. |
| 1710 if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value(); | 1713 if (!object->IsJSArray() || cell != NULL) return HEAP->undefined_value(); |
| 1711 | 1714 |
| 1712 Label miss, return_undefined, call_builtin; | 1715 Label miss, return_undefined, call_builtin; |
| 1713 | 1716 |
| 1714 Register receiver = r1; | 1717 Register receiver = r1; |
| 1715 Register elements = r3; | 1718 Register elements = r3; |
| 1716 | 1719 |
| 1717 GenerateNameCheck(name, &miss); | 1720 GenerateNameCheck(name, &miss); |
| 1718 | 1721 |
| 1719 // Get the receiver from the stack | 1722 // Get the receiver from the stack |
| 1720 const int argc = arguments().immediate(); | 1723 const int argc = arguments().immediate(); |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1787 String* name) { | 1790 String* name) { |
| 1788 // ----------- S t a t e ------------- | 1791 // ----------- S t a t e ------------- |
| 1789 // -- r2 : function name | 1792 // -- r2 : function name |
| 1790 // -- lr : return address | 1793 // -- lr : return address |
| 1791 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) | 1794 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) |
| 1792 // -- ... | 1795 // -- ... |
| 1793 // -- sp[argc * 4] : receiver | 1796 // -- sp[argc * 4] : receiver |
| 1794 // ----------------------------------- | 1797 // ----------------------------------- |
| 1795 | 1798 |
| 1796 // If object is not a string, bail out to regular call. | 1799 // If object is not a string, bail out to regular call. |
| 1797 if (!object->IsString() || cell != NULL) return Heap::undefined_value(); | 1800 if (!object->IsString() || cell != NULL) return HEAP->undefined_value(); |
| 1798 | 1801 |
| 1799 const int argc = arguments().immediate(); | 1802 const int argc = arguments().immediate(); |
| 1800 | 1803 |
| 1801 Label miss; | 1804 Label miss; |
| 1802 Label name_miss; | 1805 Label name_miss; |
| 1803 Label index_out_of_range; | 1806 Label index_out_of_range; |
| 1804 Label* index_out_of_range_label = &index_out_of_range; | 1807 Label* index_out_of_range_label = &index_out_of_range; |
| 1805 | 1808 |
| 1806 if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) { | 1809 if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) { |
| 1807 index_out_of_range_label = &miss; | 1810 index_out_of_range_label = &miss; |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1873 String* name) { | 1876 String* name) { |
| 1874 // ----------- S t a t e ------------- | 1877 // ----------- S t a t e ------------- |
| 1875 // -- r2 : function name | 1878 // -- r2 : function name |
| 1876 // -- lr : return address | 1879 // -- lr : return address |
| 1877 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) | 1880 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) |
| 1878 // -- ... | 1881 // -- ... |
| 1879 // -- sp[argc * 4] : receiver | 1882 // -- sp[argc * 4] : receiver |
| 1880 // ----------------------------------- | 1883 // ----------------------------------- |
| 1881 | 1884 |
| 1882 // If object is not a string, bail out to regular call. | 1885 // If object is not a string, bail out to regular call. |
| 1883 if (!object->IsString() || cell != NULL) return Heap::undefined_value(); | 1886 if (!object->IsString() || cell != NULL) return HEAP->undefined_value(); |
| 1884 | 1887 |
| 1885 const int argc = arguments().immediate(); | 1888 const int argc = arguments().immediate(); |
| 1886 | 1889 |
| 1887 Label miss; | 1890 Label miss; |
| 1888 Label name_miss; | 1891 Label name_miss; |
| 1889 Label index_out_of_range; | 1892 Label index_out_of_range; |
| 1890 Label* index_out_of_range_label = &index_out_of_range; | 1893 Label* index_out_of_range_label = &index_out_of_range; |
| 1891 | 1894 |
| 1892 if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) { | 1895 if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) { |
| 1893 index_out_of_range_label = &miss; | 1896 index_out_of_range_label = &miss; |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1964 // -- lr : return address | 1967 // -- lr : return address |
| 1965 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) | 1968 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) |
| 1966 // -- ... | 1969 // -- ... |
| 1967 // -- sp[argc * 4] : receiver | 1970 // -- sp[argc * 4] : receiver |
| 1968 // ----------------------------------- | 1971 // ----------------------------------- |
| 1969 | 1972 |
| 1970 const int argc = arguments().immediate(); | 1973 const int argc = arguments().immediate(); |
| 1971 | 1974 |
| 1972 // If the object is not a JSObject or we got an unexpected number of | 1975 // If the object is not a JSObject or we got an unexpected number of |
| 1973 // arguments, bail out to the regular call. | 1976 // arguments, bail out to the regular call. |
| 1974 if (!object->IsJSObject() || argc != 1) return Heap::undefined_value(); | 1977 if (!object->IsJSObject() || argc != 1) return HEAP->undefined_value(); |
| 1975 | 1978 |
| 1976 Label miss; | 1979 Label miss; |
| 1977 GenerateNameCheck(name, &miss); | 1980 GenerateNameCheck(name, &miss); |
| 1978 | 1981 |
| 1979 if (cell == NULL) { | 1982 if (cell == NULL) { |
| 1980 __ ldr(r1, MemOperand(sp, 1 * kPointerSize)); | 1983 __ ldr(r1, MemOperand(sp, 1 * kPointerSize)); |
| 1981 | 1984 |
| 1982 STATIC_ASSERT(kSmiTag == 0); | 1985 STATIC_ASSERT(kSmiTag == 0); |
| 1983 __ tst(r1, Operand(kSmiTagMask)); | 1986 __ tst(r1, Operand(kSmiTagMask)); |
| 1984 __ b(eq, &miss); | 1987 __ b(eq, &miss); |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2035 JSFunction* function, | 2038 JSFunction* function, |
| 2036 String* name) { | 2039 String* name) { |
| 2037 // ----------- S t a t e ------------- | 2040 // ----------- S t a t e ------------- |
| 2038 // -- r2 : function name | 2041 // -- r2 : function name |
| 2039 // -- lr : return address | 2042 // -- lr : return address |
| 2040 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) | 2043 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) |
| 2041 // -- ... | 2044 // -- ... |
| 2042 // -- sp[argc * 4] : receiver | 2045 // -- sp[argc * 4] : receiver |
| 2043 // ----------------------------------- | 2046 // ----------------------------------- |
| 2044 | 2047 |
| 2045 if (!CpuFeatures::IsSupported(VFP3)) return Heap::undefined_value(); | 2048 if (!Isolate::Current()->cpu_features()->IsSupported(VFP3)) |
| 2049 return HEAP->undefined_value(); |
| 2050 |
| 2046 CpuFeatures::Scope scope_vfp3(VFP3); | 2051 CpuFeatures::Scope scope_vfp3(VFP3); |
| 2047 | 2052 |
| 2048 const int argc = arguments().immediate(); | 2053 const int argc = arguments().immediate(); |
| 2049 | 2054 |
| 2050 // If the object is not a JSObject or we got an unexpected number of | 2055 // If the object is not a JSObject or we got an unexpected number of |
| 2051 // arguments, bail out to the regular call. | 2056 // arguments, bail out to the regular call. |
| 2052 if (!object->IsJSObject() || argc != 1) return Heap::undefined_value(); | 2057 if (!object->IsJSObject() || argc != 1) return HEAP->undefined_value(); |
| 2053 | 2058 |
| 2054 Label miss, slow; | 2059 Label miss, slow; |
| 2055 GenerateNameCheck(name, &miss); | 2060 GenerateNameCheck(name, &miss); |
| 2056 | 2061 |
| 2057 if (cell == NULL) { | 2062 if (cell == NULL) { |
| 2058 __ ldr(r1, MemOperand(sp, 1 * kPointerSize)); | 2063 __ ldr(r1, MemOperand(sp, 1 * kPointerSize)); |
| 2059 | 2064 |
| 2060 STATIC_ASSERT(kSmiTag == 0); | 2065 STATIC_ASSERT(kSmiTag == 0); |
| 2061 __ JumpIfSmi(r1, &miss); | 2066 __ JumpIfSmi(r1, &miss); |
| 2062 | 2067 |
| (...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2184 // -- lr : return address | 2189 // -- lr : return address |
| 2185 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) | 2190 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) |
| 2186 // -- ... | 2191 // -- ... |
| 2187 // -- sp[argc * 4] : receiver | 2192 // -- sp[argc * 4] : receiver |
| 2188 // ----------------------------------- | 2193 // ----------------------------------- |
| 2189 | 2194 |
| 2190 const int argc = arguments().immediate(); | 2195 const int argc = arguments().immediate(); |
| 2191 | 2196 |
| 2192 // If the object is not a JSObject or we got an unexpected number of | 2197 // If the object is not a JSObject or we got an unexpected number of |
| 2193 // arguments, bail out to the regular call. | 2198 // arguments, bail out to the regular call. |
| 2194 if (!object->IsJSObject() || argc != 1) return Heap::undefined_value(); | 2199 if (!object->IsJSObject() || argc != 1) return HEAP->undefined_value(); |
| 2195 | 2200 |
| 2196 Label miss; | 2201 Label miss; |
| 2197 GenerateNameCheck(name, &miss); | 2202 GenerateNameCheck(name, &miss); |
| 2198 | 2203 |
| 2199 if (cell == NULL) { | 2204 if (cell == NULL) { |
| 2200 __ ldr(r1, MemOperand(sp, 1 * kPointerSize)); | 2205 __ ldr(r1, MemOperand(sp, 1 * kPointerSize)); |
| 2201 | 2206 |
| 2202 STATIC_ASSERT(kSmiTag == 0); | 2207 STATIC_ASSERT(kSmiTag == 0); |
| 2203 __ tst(r1, Operand(kSmiTagMask)); | 2208 __ tst(r1, Operand(kSmiTagMask)); |
| 2204 __ b(eq, &miss); | 2209 __ b(eq, &miss); |
| (...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2281 MaybeObject* CallStubCompiler::CompileFastApiCall( | 2286 MaybeObject* CallStubCompiler::CompileFastApiCall( |
| 2282 const CallOptimization& optimization, | 2287 const CallOptimization& optimization, |
| 2283 Object* object, | 2288 Object* object, |
| 2284 JSObject* holder, | 2289 JSObject* holder, |
| 2285 JSGlobalPropertyCell* cell, | 2290 JSGlobalPropertyCell* cell, |
| 2286 JSFunction* function, | 2291 JSFunction* function, |
| 2287 String* name) { | 2292 String* name) { |
| 2288 ASSERT(optimization.is_simple_api_call()); | 2293 ASSERT(optimization.is_simple_api_call()); |
| 2289 // Bail out if object is a global object as we don't want to | 2294 // Bail out if object is a global object as we don't want to |
| 2290 // repatch it to global receiver. | 2295 // repatch it to global receiver. |
| 2291 if (object->IsGlobalObject()) return Heap::undefined_value(); | 2296 if (object->IsGlobalObject()) return HEAP->undefined_value(); |
| 2292 if (cell != NULL) return Heap::undefined_value(); | 2297 if (cell != NULL) return HEAP->undefined_value(); |
| 2293 int depth = optimization.GetPrototypeDepthOfExpectedType( | 2298 int depth = optimization.GetPrototypeDepthOfExpectedType( |
| 2294 JSObject::cast(object), holder); | 2299 JSObject::cast(object), holder); |
| 2295 if (depth == kInvalidProtoDepth) return Heap::undefined_value(); | 2300 if (depth == kInvalidProtoDepth) return HEAP->undefined_value(); |
| 2296 | 2301 |
| 2297 Label miss, miss_before_stack_reserved; | 2302 Label miss, miss_before_stack_reserved; |
| 2298 | 2303 |
| 2299 GenerateNameCheck(name, &miss_before_stack_reserved); | 2304 GenerateNameCheck(name, &miss_before_stack_reserved); |
| 2300 | 2305 |
| 2301 // Get the receiver from the stack. | 2306 // Get the receiver from the stack. |
| 2302 const int argc = arguments().immediate(); | 2307 const int argc = arguments().immediate(); |
| 2303 __ ldr(r1, MemOperand(sp, argc * kPointerSize)); | 2308 __ ldr(r1, MemOperand(sp, argc * kPointerSize)); |
| 2304 | 2309 |
| 2305 // Check that the receiver isn't a smi. | 2310 // Check that the receiver isn't a smi. |
| 2306 __ tst(r1, Operand(kSmiTagMask)); | 2311 __ tst(r1, Operand(kSmiTagMask)); |
| 2307 __ b(eq, &miss_before_stack_reserved); | 2312 __ b(eq, &miss_before_stack_reserved); |
| 2308 | 2313 |
| 2309 __ IncrementCounter(&Counters::call_const, 1, r0, r3); | 2314 __ IncrementCounter(COUNTERS->call_const(), 1, r0, r3); |
| 2310 __ IncrementCounter(&Counters::call_const_fast_api, 1, r0, r3); | 2315 __ IncrementCounter(COUNTERS->call_const_fast_api(), 1, r0, r3); |
| 2311 | 2316 |
| 2312 ReserveSpaceForFastApiCall(masm(), r0); | 2317 ReserveSpaceForFastApiCall(masm(), r0); |
| 2313 | 2318 |
| 2314 // Check that the maps haven't changed and find a Holder as a side effect. | 2319 // Check that the maps haven't changed and find a Holder as a side effect. |
| 2315 CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name, | 2320 CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name, |
| 2316 depth, &miss); | 2321 depth, &miss); |
| 2317 | 2322 |
| 2318 MaybeObject* result = GenerateFastApiDirectCall(masm(), optimization, argc); | 2323 MaybeObject* result = GenerateFastApiDirectCall(masm(), optimization, argc); |
| 2319 if (result->IsFailure()) return result; | 2324 if (result->IsFailure()) return result; |
| 2320 | 2325 |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2364 __ b(eq, &miss); | 2369 __ b(eq, &miss); |
| 2365 } | 2370 } |
| 2366 | 2371 |
| 2367 // Make sure that it's okay not to patch the on stack receiver | 2372 // Make sure that it's okay not to patch the on stack receiver |
| 2368 // unless we're doing a receiver map check. | 2373 // unless we're doing a receiver map check. |
| 2369 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK); | 2374 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK); |
| 2370 | 2375 |
| 2371 SharedFunctionInfo* function_info = function->shared(); | 2376 SharedFunctionInfo* function_info = function->shared(); |
| 2372 switch (check) { | 2377 switch (check) { |
| 2373 case RECEIVER_MAP_CHECK: | 2378 case RECEIVER_MAP_CHECK: |
| 2374 __ IncrementCounter(&Counters::call_const, 1, r0, r3); | 2379 __ IncrementCounter(COUNTERS->call_const(), 1, r0, r3); |
| 2375 | 2380 |
| 2376 // Check that the maps haven't changed. | 2381 // Check that the maps haven't changed. |
| 2377 CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name, | 2382 CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name, |
| 2378 &miss); | 2383 &miss); |
| 2379 | 2384 |
| 2380 // Patch the receiver on the stack with the global proxy if | 2385 // Patch the receiver on the stack with the global proxy if |
| 2381 // necessary. | 2386 // necessary. |
| 2382 if (object->IsGlobalObject()) { | 2387 if (object->IsGlobalObject()) { |
| 2383 __ ldr(r3, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset)); | 2388 __ ldr(r3, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset)); |
| 2384 __ str(r3, MemOperand(sp, argc * kPointerSize)); | 2389 __ str(r3, MemOperand(sp, argc * kPointerSize)); |
| (...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2555 // necessary. | 2560 // necessary. |
| 2556 if (object->IsGlobalObject()) { | 2561 if (object->IsGlobalObject()) { |
| 2557 __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset)); | 2562 __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset)); |
| 2558 __ str(r3, MemOperand(sp, argc * kPointerSize)); | 2563 __ str(r3, MemOperand(sp, argc * kPointerSize)); |
| 2559 } | 2564 } |
| 2560 | 2565 |
| 2561 // Setup the context (function already in r1). | 2566 // Setup the context (function already in r1). |
| 2562 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); | 2567 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); |
| 2563 | 2568 |
| 2564 // Jump to the cached code (tail call). | 2569 // Jump to the cached code (tail call). |
| 2565 __ IncrementCounter(&Counters::call_global_inline, 1, r3, r4); | 2570 __ IncrementCounter(COUNTERS->call_global_inline(), 1, r3, r4); |
| 2566 ASSERT(function->is_compiled()); | 2571 ASSERT(function->is_compiled()); |
| 2567 Handle<Code> code(function->code()); | 2572 Handle<Code> code(function->code()); |
| 2568 ParameterCount expected(function->shared()->formal_parameter_count()); | 2573 ParameterCount expected(function->shared()->formal_parameter_count()); |
| 2569 if (V8::UseCrankshaft()) { | 2574 if (V8::UseCrankshaft()) { |
| 2570 // TODO(kasperl): For now, we always call indirectly through the | 2575 // TODO(kasperl): For now, we always call indirectly through the |
| 2571 // code field in the function to allow recompilation to take effect | 2576 // code field in the function to allow recompilation to take effect |
| 2572 // without changing any of the call sites. | 2577 // without changing any of the call sites. |
| 2573 __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); | 2578 __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); |
| 2574 __ InvokeCode(r3, expected, arguments(), JUMP_FUNCTION); | 2579 __ InvokeCode(r3, expected, arguments(), JUMP_FUNCTION); |
| 2575 } else { | 2580 } else { |
| 2576 __ InvokeCode(code, expected, arguments(), | 2581 __ InvokeCode(code, expected, arguments(), |
| 2577 RelocInfo::CODE_TARGET, JUMP_FUNCTION); | 2582 RelocInfo::CODE_TARGET, JUMP_FUNCTION); |
| 2578 } | 2583 } |
| 2579 | 2584 |
| 2580 // Handle call cache miss. | 2585 // Handle call cache miss. |
| 2581 __ bind(&miss); | 2586 __ bind(&miss); |
| 2582 __ IncrementCounter(&Counters::call_global_inline_miss, 1, r1, r3); | 2587 __ IncrementCounter(COUNTERS->call_global_inline_miss(), 1, r1, r3); |
| 2583 Object* obj; | 2588 Object* obj; |
| 2584 { MaybeObject* maybe_obj = GenerateMissBranch(); | 2589 { MaybeObject* maybe_obj = GenerateMissBranch(); |
| 2585 if (!maybe_obj->ToObject(&obj)) return maybe_obj; | 2590 if (!maybe_obj->ToObject(&obj)) return maybe_obj; |
| 2586 } | 2591 } |
| 2587 | 2592 |
| 2588 // Return the generated code. | 2593 // Return the generated code. |
| 2589 return GetCode(NORMAL, name); | 2594 return GetCode(NORMAL, name); |
| 2590 } | 2595 } |
| 2591 | 2596 |
| 2592 | 2597 |
| 2593 MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object, | 2598 MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object, |
| 2594 int index, | 2599 int index, |
| 2595 Map* transition, | 2600 Map* transition, |
| 2596 String* name) { | 2601 String* name) { |
| 2597 // ----------- S t a t e ------------- | 2602 // ----------- S t a t e ------------- |
| 2598 // -- r0 : value | 2603 // -- r0 : value |
| 2599 // -- r1 : receiver | 2604 // -- r1 : receiver |
| 2600 // -- r2 : name | 2605 // -- r2 : name |
| 2601 // -- lr : return address | 2606 // -- lr : return address |
| 2602 // ----------------------------------- | 2607 // ----------------------------------- |
| 2603 Label miss; | 2608 Label miss; |
| 2604 | 2609 |
| 2605 GenerateStoreField(masm(), | 2610 GenerateStoreField(masm(), |
| 2606 object, | 2611 object, |
| 2607 index, | 2612 index, |
| 2608 transition, | 2613 transition, |
| 2609 r1, r2, r3, | 2614 r1, r2, r3, |
| 2610 &miss); | 2615 &miss); |
| 2611 __ bind(&miss); | 2616 __ bind(&miss); |
| 2612 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); | 2617 Handle<Code> ic(Isolate::Current()->builtins()->builtin( |
| 2618 Builtins::StoreIC_Miss)); |
| 2613 __ Jump(ic, RelocInfo::CODE_TARGET); | 2619 __ Jump(ic, RelocInfo::CODE_TARGET); |
| 2614 | 2620 |
| 2615 // Return the generated code. | 2621 // Return the generated code. |
| 2616 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name); | 2622 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name); |
| 2617 } | 2623 } |
| 2618 | 2624 |
| 2619 | 2625 |
| 2620 MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object, | 2626 MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object, |
| 2621 AccessorInfo* callback, | 2627 AccessorInfo* callback, |
| 2622 String* name) { | 2628 String* name) { |
| (...skipping 27 matching lines...) Expand all Loading... |
| 2650 __ mov(ip, Operand(Handle<AccessorInfo>(callback))); // callback info | 2656 __ mov(ip, Operand(Handle<AccessorInfo>(callback))); // callback info |
| 2651 __ Push(ip, r2, r0); | 2657 __ Push(ip, r2, r0); |
| 2652 | 2658 |
| 2653 // Do tail-call to the runtime system. | 2659 // Do tail-call to the runtime system. |
| 2654 ExternalReference store_callback_property = | 2660 ExternalReference store_callback_property = |
| 2655 ExternalReference(IC_Utility(IC::kStoreCallbackProperty)); | 2661 ExternalReference(IC_Utility(IC::kStoreCallbackProperty)); |
| 2656 __ TailCallExternalReference(store_callback_property, 4, 1); | 2662 __ TailCallExternalReference(store_callback_property, 4, 1); |
| 2657 | 2663 |
| 2658 // Handle store cache miss. | 2664 // Handle store cache miss. |
| 2659 __ bind(&miss); | 2665 __ bind(&miss); |
| 2660 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); | 2666 Handle<Code> ic(Isolate::Current()->builtins()->builtin( |
| 2667 Builtins::StoreIC_Miss)); |
| 2661 __ Jump(ic, RelocInfo::CODE_TARGET); | 2668 __ Jump(ic, RelocInfo::CODE_TARGET); |
| 2662 | 2669 |
| 2663 // Return the generated code. | 2670 // Return the generated code. |
| 2664 return GetCode(CALLBACKS, name); | 2671 return GetCode(CALLBACKS, name); |
| 2665 } | 2672 } |
| 2666 | 2673 |
| 2667 | 2674 |
| 2668 MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver, | 2675 MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver, |
| 2669 String* name) { | 2676 String* name) { |
| 2670 // ----------- S t a t e ------------- | 2677 // ----------- S t a t e ------------- |
| (...skipping 27 matching lines...) Expand all Loading... |
| 2698 __ mov(r0, Operand(Smi::FromInt(strict_mode_))); | 2705 __ mov(r0, Operand(Smi::FromInt(strict_mode_))); |
| 2699 __ push(r0); // strict mode | 2706 __ push(r0); // strict mode |
| 2700 | 2707 |
| 2701 // Do tail-call to the runtime system. | 2708 // Do tail-call to the runtime system. |
| 2702 ExternalReference store_ic_property = | 2709 ExternalReference store_ic_property = |
| 2703 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty)); | 2710 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty)); |
| 2704 __ TailCallExternalReference(store_ic_property, 4, 1); | 2711 __ TailCallExternalReference(store_ic_property, 4, 1); |
| 2705 | 2712 |
| 2706 // Handle store cache miss. | 2713 // Handle store cache miss. |
| 2707 __ bind(&miss); | 2714 __ bind(&miss); |
| 2708 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); | 2715 Handle<Code> ic(Isolate::Current()->builtins()->builtin( |
| 2716 Builtins::StoreIC_Miss)); |
| 2709 __ Jump(ic, RelocInfo::CODE_TARGET); | 2717 __ Jump(ic, RelocInfo::CODE_TARGET); |
| 2710 | 2718 |
| 2711 // Return the generated code. | 2719 // Return the generated code. |
| 2712 return GetCode(INTERCEPTOR, name); | 2720 return GetCode(INTERCEPTOR, name); |
| 2713 } | 2721 } |
| 2714 | 2722 |
| 2715 | 2723 |
| 2716 MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object, | 2724 MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object, |
| 2717 JSGlobalPropertyCell* cell, | 2725 JSGlobalPropertyCell* cell, |
| 2718 String* name) { | 2726 String* name) { |
| (...skipping 16 matching lines...) Expand all Loading... |
| 2735 // global object. We bail out to the runtime system to do that. | 2743 // global object. We bail out to the runtime system to do that. |
| 2736 __ mov(r4, Operand(Handle<JSGlobalPropertyCell>(cell))); | 2744 __ mov(r4, Operand(Handle<JSGlobalPropertyCell>(cell))); |
| 2737 __ LoadRoot(r5, Heap::kTheHoleValueRootIndex); | 2745 __ LoadRoot(r5, Heap::kTheHoleValueRootIndex); |
| 2738 __ ldr(r6, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset)); | 2746 __ ldr(r6, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset)); |
| 2739 __ cmp(r5, r6); | 2747 __ cmp(r5, r6); |
| 2740 __ b(eq, &miss); | 2748 __ b(eq, &miss); |
| 2741 | 2749 |
| 2742 // Store the value in the cell. | 2750 // Store the value in the cell. |
| 2743 __ str(r0, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset)); | 2751 __ str(r0, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset)); |
| 2744 | 2752 |
| 2745 __ IncrementCounter(&Counters::named_store_global_inline, 1, r4, r3); | 2753 __ IncrementCounter(COUNTERS->named_store_global_inline(), 1, r4, r3); |
| 2746 __ Ret(); | 2754 __ Ret(); |
| 2747 | 2755 |
| 2748 // Handle store cache miss. | 2756 // Handle store cache miss. |
| 2749 __ bind(&miss); | 2757 __ bind(&miss); |
| 2750 __ IncrementCounter(&Counters::named_store_global_inline_miss, 1, r4, r3); | 2758 __ IncrementCounter(COUNTERS->named_store_global_inline_miss(), 1, r4, r3); |
| 2751 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); | 2759 Handle<Code> ic(Isolate::Current()->builtins()->builtin( |
| 2760 Builtins::StoreIC_Miss)); |
| 2752 __ Jump(ic, RelocInfo::CODE_TARGET); | 2761 __ Jump(ic, RelocInfo::CODE_TARGET); |
| 2753 | 2762 |
| 2754 // Return the generated code. | 2763 // Return the generated code. |
| 2755 return GetCode(NORMAL, name); | 2764 return GetCode(NORMAL, name); |
| 2756 } | 2765 } |
| 2757 | 2766 |
| 2758 | 2767 |
| 2759 MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name, | 2768 MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name, |
| 2760 JSObject* object, | 2769 JSObject* object, |
| 2761 JSObject* last) { | 2770 JSObject* last) { |
| (...skipping 26 matching lines...) Expand all Loading... |
| 2788 | 2797 |
| 2789 // Return undefined if maps of the full prototype chain are still the | 2798 // Return undefined if maps of the full prototype chain are still the |
| 2790 // same and no global property with this name contains a value. | 2799 // same and no global property with this name contains a value. |
| 2791 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); | 2800 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); |
| 2792 __ Ret(); | 2801 __ Ret(); |
| 2793 | 2802 |
| 2794 __ bind(&miss); | 2803 __ bind(&miss); |
| 2795 GenerateLoadMiss(masm(), Code::LOAD_IC); | 2804 GenerateLoadMiss(masm(), Code::LOAD_IC); |
| 2796 | 2805 |
| 2797 // Return the generated code. | 2806 // Return the generated code. |
| 2798 return GetCode(NONEXISTENT, Heap::empty_string()); | 2807 return GetCode(NONEXISTENT, HEAP->empty_string()); |
| 2799 } | 2808 } |
| 2800 | 2809 |
| 2801 | 2810 |
| 2802 MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object, | 2811 MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object, |
| 2803 JSObject* holder, | 2812 JSObject* holder, |
| 2804 int index, | 2813 int index, |
| 2805 String* name) { | 2814 String* name) { |
| 2806 // ----------- S t a t e ------------- | 2815 // ----------- S t a t e ------------- |
| 2807 // -- r0 : receiver | 2816 // -- r0 : receiver |
| 2808 // -- r2 : name | 2817 // -- r2 : name |
| (...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2923 __ ldr(r4, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset)); | 2932 __ ldr(r4, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset)); |
| 2924 | 2933 |
| 2925 // Check for deleted property if property can actually be deleted. | 2934 // Check for deleted property if property can actually be deleted. |
| 2926 if (!is_dont_delete) { | 2935 if (!is_dont_delete) { |
| 2927 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | 2936 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
| 2928 __ cmp(r4, ip); | 2937 __ cmp(r4, ip); |
| 2929 __ b(eq, &miss); | 2938 __ b(eq, &miss); |
| 2930 } | 2939 } |
| 2931 | 2940 |
| 2932 __ mov(r0, r4); | 2941 __ mov(r0, r4); |
| 2933 __ IncrementCounter(&Counters::named_load_global_stub, 1, r1, r3); | 2942 __ IncrementCounter(COUNTERS->named_load_global_stub(), 1, r1, r3); |
| 2934 __ Ret(); | 2943 __ Ret(); |
| 2935 | 2944 |
| 2936 __ bind(&miss); | 2945 __ bind(&miss); |
| 2937 __ IncrementCounter(&Counters::named_load_global_stub_miss, 1, r1, r3); | 2946 __ IncrementCounter(COUNTERS->named_load_global_stub_miss(), 1, r1, r3); |
| 2938 GenerateLoadMiss(masm(), Code::LOAD_IC); | 2947 GenerateLoadMiss(masm(), Code::LOAD_IC); |
| 2939 | 2948 |
| 2940 // Return the generated code. | 2949 // Return the generated code. |
| 2941 return GetCode(NORMAL, name); | 2950 return GetCode(NORMAL, name); |
| 2942 } | 2951 } |
| 2943 | 2952 |
| 2944 | 2953 |
| 2945 MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name, | 2954 MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name, |
| 2946 JSObject* receiver, | 2955 JSObject* receiver, |
| 2947 JSObject* holder, | 2956 JSObject* holder, |
| (...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3072 } | 3081 } |
| 3073 | 3082 |
| 3074 | 3083 |
| 3075 MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) { | 3084 MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) { |
| 3076 // ----------- S t a t e ------------- | 3085 // ----------- S t a t e ------------- |
| 3077 // -- lr : return address | 3086 // -- lr : return address |
| 3078 // -- r0 : key | 3087 // -- r0 : key |
| 3079 // -- r1 : receiver | 3088 // -- r1 : receiver |
| 3080 // ----------------------------------- | 3089 // ----------------------------------- |
| 3081 Label miss; | 3090 Label miss; |
| 3082 __ IncrementCounter(&Counters::keyed_load_string_length, 1, r2, r3); | 3091 __ IncrementCounter(COUNTERS->keyed_load_string_length(), 1, r2, r3); |
| 3083 | 3092 |
| 3084 // Check the key is the cached one. | 3093 // Check the key is the cached one. |
| 3085 __ cmp(r0, Operand(Handle<String>(name))); | 3094 __ cmp(r0, Operand(Handle<String>(name))); |
| 3086 __ b(ne, &miss); | 3095 __ b(ne, &miss); |
| 3087 | 3096 |
| 3088 GenerateLoadStringLength(masm(), r1, r2, r3, &miss, true); | 3097 GenerateLoadStringLength(masm(), r1, r2, r3, &miss, true); |
| 3089 __ bind(&miss); | 3098 __ bind(&miss); |
| 3090 __ DecrementCounter(&Counters::keyed_load_string_length, 1, r2, r3); | 3099 __ DecrementCounter(COUNTERS->keyed_load_string_length(), 1, r2, r3); |
| 3091 | 3100 |
| 3092 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); | 3101 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); |
| 3093 | 3102 |
| 3094 return GetCode(CALLBACKS, name); | 3103 return GetCode(CALLBACKS, name); |
| 3095 } | 3104 } |
| 3096 | 3105 |
| 3097 | 3106 |
| 3098 MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) { | 3107 MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) { |
| 3099 // ----------- S t a t e ------------- | 3108 // ----------- S t a t e ------------- |
| 3100 // -- lr : return address | 3109 // -- lr : return address |
| 3101 // -- r0 : key | 3110 // -- r0 : key |
| 3102 // -- r1 : receiver | 3111 // -- r1 : receiver |
| 3103 // ----------------------------------- | 3112 // ----------------------------------- |
| 3104 Label miss; | 3113 Label miss; |
| 3105 | 3114 |
| 3106 __ IncrementCounter(&Counters::keyed_load_function_prototype, 1, r2, r3); | 3115 __ IncrementCounter(COUNTERS->keyed_load_function_prototype(), 1, r2, r3); |
| 3107 | 3116 |
| 3108 // Check the name hasn't changed. | 3117 // Check the name hasn't changed. |
| 3109 __ cmp(r0, Operand(Handle<String>(name))); | 3118 __ cmp(r0, Operand(Handle<String>(name))); |
| 3110 __ b(ne, &miss); | 3119 __ b(ne, &miss); |
| 3111 | 3120 |
| 3112 GenerateLoadFunctionPrototype(masm(), r1, r2, r3, &miss); | 3121 GenerateLoadFunctionPrototype(masm(), r1, r2, r3, &miss); |
| 3113 __ bind(&miss); | 3122 __ bind(&miss); |
| 3114 __ DecrementCounter(&Counters::keyed_load_function_prototype, 1, r2, r3); | 3123 __ DecrementCounter(COUNTERS->keyed_load_function_prototype(), 1, r2, r3); |
| 3115 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); | 3124 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); |
| 3116 | 3125 |
| 3117 return GetCode(CALLBACKS, name); | 3126 return GetCode(CALLBACKS, name); |
| 3118 } | 3127 } |
| 3119 | 3128 |
| 3120 | 3129 |
| 3121 MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) { | 3130 MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) { |
| 3122 // ----------- S t a t e ------------- | 3131 // ----------- S t a t e ------------- |
| 3123 // -- lr : return address | 3132 // -- lr : return address |
| 3124 // -- r0 : key | 3133 // -- r0 : key |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3172 Map* transition, | 3181 Map* transition, |
| 3173 String* name) { | 3182 String* name) { |
| 3174 // ----------- S t a t e ------------- | 3183 // ----------- S t a t e ------------- |
| 3175 // -- r0 : value | 3184 // -- r0 : value |
| 3176 // -- r1 : name | 3185 // -- r1 : name |
| 3177 // -- r2 : receiver | 3186 // -- r2 : receiver |
| 3178 // -- lr : return address | 3187 // -- lr : return address |
| 3179 // ----------------------------------- | 3188 // ----------------------------------- |
| 3180 Label miss; | 3189 Label miss; |
| 3181 | 3190 |
| 3182 __ IncrementCounter(&Counters::keyed_store_field, 1, r3, r4); | 3191 __ IncrementCounter(COUNTERS->keyed_store_field(), 1, r3, r4); |
| 3183 | 3192 |
| 3184 // Check that the name has not changed. | 3193 // Check that the name has not changed. |
| 3185 __ cmp(r1, Operand(Handle<String>(name))); | 3194 __ cmp(r1, Operand(Handle<String>(name))); |
| 3186 __ b(ne, &miss); | 3195 __ b(ne, &miss); |
| 3187 | 3196 |
| 3188 // r3 is used as scratch register. r1 and r2 keep their values if a jump to | 3197 // r3 is used as scratch register. r1 and r2 keep their values if a jump to |
| 3189 // the miss label is generated. | 3198 // the miss label is generated. |
| 3190 GenerateStoreField(masm(), | 3199 GenerateStoreField(masm(), |
| 3191 object, | 3200 object, |
| 3192 index, | 3201 index, |
| 3193 transition, | 3202 transition, |
| 3194 r2, r1, r3, | 3203 r2, r1, r3, |
| 3195 &miss); | 3204 &miss); |
| 3196 __ bind(&miss); | 3205 __ bind(&miss); |
| 3197 | 3206 |
| 3198 __ DecrementCounter(&Counters::keyed_store_field, 1, r3, r4); | 3207 __ DecrementCounter(COUNTERS->keyed_store_field(), 1, r3, r4); |
| 3199 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss)); | 3208 Handle<Code> ic(Isolate::Current()->builtins()->builtin( |
| 3209 Builtins::KeyedStoreIC_Miss)); |
| 3200 | 3210 |
| 3201 __ Jump(ic, RelocInfo::CODE_TARGET); | 3211 __ Jump(ic, RelocInfo::CODE_TARGET); |
| 3202 | 3212 |
| 3203 // Return the generated code. | 3213 // Return the generated code. |
| 3204 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name); | 3214 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name); |
| 3205 } | 3215 } |
| 3206 | 3216 |
| 3207 | 3217 |
| 3208 MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized( | 3218 MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized( |
| 3209 JSObject* receiver) { | 3219 JSObject* receiver) { |
| (...skipping 23 matching lines...) Expand all Loading... |
| 3233 __ b(ne, &miss); | 3243 __ b(ne, &miss); |
| 3234 | 3244 |
| 3235 // Check that the key is a smi. | 3245 // Check that the key is a smi. |
| 3236 __ tst(key_reg, Operand(kSmiTagMask)); | 3246 __ tst(key_reg, Operand(kSmiTagMask)); |
| 3237 __ b(ne, &miss); | 3247 __ b(ne, &miss); |
| 3238 | 3248 |
| 3239 // Get the elements array and make sure it is a fast element array, not 'cow'. | 3249 // Get the elements array and make sure it is a fast element array, not 'cow'. |
| 3240 __ ldr(elements_reg, | 3250 __ ldr(elements_reg, |
| 3241 FieldMemOperand(receiver_reg, JSObject::kElementsOffset)); | 3251 FieldMemOperand(receiver_reg, JSObject::kElementsOffset)); |
| 3242 __ ldr(scratch, FieldMemOperand(elements_reg, HeapObject::kMapOffset)); | 3252 __ ldr(scratch, FieldMemOperand(elements_reg, HeapObject::kMapOffset)); |
| 3243 __ cmp(scratch, Operand(Handle<Map>(Factory::fixed_array_map()))); | 3253 __ cmp(scratch, Operand(Handle<Map>(FACTORY->fixed_array_map()))); |
| 3244 __ b(ne, &miss); | 3254 __ b(ne, &miss); |
| 3245 | 3255 |
| 3246 // Check that the key is within bounds. | 3256 // Check that the key is within bounds. |
| 3247 if (receiver->IsJSArray()) { | 3257 if (receiver->IsJSArray()) { |
| 3248 __ ldr(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset)); | 3258 __ ldr(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset)); |
| 3249 } else { | 3259 } else { |
| 3250 __ ldr(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset)); | 3260 __ ldr(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset)); |
| 3251 } | 3261 } |
| 3252 // Compare smis. | 3262 // Compare smis. |
| 3253 __ cmp(key_reg, scratch); | 3263 __ cmp(key_reg, scratch); |
| 3254 __ b(hs, &miss); | 3264 __ b(hs, &miss); |
| 3255 | 3265 |
| 3256 __ add(scratch, | 3266 __ add(scratch, |
| 3257 elements_reg, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 3267 elements_reg, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 3258 ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); | 3268 ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); |
| 3259 __ str(value_reg, | 3269 __ str(value_reg, |
| 3260 MemOperand(scratch, key_reg, LSL, kPointerSizeLog2 - kSmiTagSize)); | 3270 MemOperand(scratch, key_reg, LSL, kPointerSizeLog2 - kSmiTagSize)); |
| 3261 __ RecordWrite(scratch, | 3271 __ RecordWrite(scratch, |
| 3262 Operand(key_reg, LSL, kPointerSizeLog2 - kSmiTagSize), | 3272 Operand(key_reg, LSL, kPointerSizeLog2 - kSmiTagSize), |
| 3263 receiver_reg , elements_reg); | 3273 receiver_reg , elements_reg); |
| 3264 | 3274 |
| 3265 // value_reg (r0) is preserved. | 3275 // value_reg (r0) is preserved. |
| 3266 // Done. | 3276 // Done. |
| 3267 __ Ret(); | 3277 __ Ret(); |
| 3268 | 3278 |
| 3269 __ bind(&miss); | 3279 __ bind(&miss); |
| 3270 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss)); | 3280 Handle<Code> ic( |
| 3281 Isolate::Current()->builtins()->builtin(Builtins::KeyedStoreIC_Miss)); |
| 3271 __ Jump(ic, RelocInfo::CODE_TARGET); | 3282 __ Jump(ic, RelocInfo::CODE_TARGET); |
| 3272 | 3283 |
| 3273 // Return the generated code. | 3284 // Return the generated code. |
| 3274 return GetCode(NORMAL, NULL); | 3285 return GetCode(NORMAL, NULL); |
| 3275 } | 3286 } |
| 3276 | 3287 |
| 3277 | 3288 |
| 3278 MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) { | 3289 MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) { |
| 3279 // ----------- S t a t e ------------- | 3290 // ----------- S t a t e ------------- |
| 3280 // -- r0 : argc | 3291 // -- r0 : argc |
| (...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3396 // Move argc to r1 and the JSObject to return to r0 and tag it. | 3407 // Move argc to r1 and the JSObject to return to r0 and tag it. |
| 3397 __ mov(r1, r0); | 3408 __ mov(r1, r0); |
| 3398 __ mov(r0, r4); | 3409 __ mov(r0, r4); |
| 3399 __ orr(r0, r0, Operand(kHeapObjectTag)); | 3410 __ orr(r0, r0, Operand(kHeapObjectTag)); |
| 3400 | 3411 |
| 3401 // r0: JSObject | 3412 // r0: JSObject |
| 3402 // r1: argc | 3413 // r1: argc |
| 3403 // Remove caller arguments and receiver from the stack and return. | 3414 // Remove caller arguments and receiver from the stack and return. |
| 3404 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2)); | 3415 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2)); |
| 3405 __ add(sp, sp, Operand(kPointerSize)); | 3416 __ add(sp, sp, Operand(kPointerSize)); |
| 3406 __ IncrementCounter(&Counters::constructed_objects, 1, r1, r2); | 3417 __ IncrementCounter(COUNTERS->constructed_objects(), 1, r1, r2); |
| 3407 __ IncrementCounter(&Counters::constructed_objects_stub, 1, r1, r2); | 3418 __ IncrementCounter(COUNTERS->constructed_objects_stub(), 1, r1, r2); |
| 3408 __ Jump(lr); | 3419 __ Jump(lr); |
| 3409 | 3420 |
| 3410 // Jump to the generic stub in case the specialized code cannot handle the | 3421 // Jump to the generic stub in case the specialized code cannot handle the |
| 3411 // construction. | 3422 // construction. |
| 3412 __ bind(&generic_stub_call); | 3423 __ bind(&generic_stub_call); |
| 3413 Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric); | 3424 Code* code = Isolate::Current()->builtins()->builtin( |
| 3425 Builtins::JSConstructStubGeneric); |
| 3414 Handle<Code> generic_construct_stub(code); | 3426 Handle<Code> generic_construct_stub(code); |
| 3415 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET); | 3427 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET); |
| 3416 | 3428 |
| 3417 // Return the generated code. | 3429 // Return the generated code. |
| 3418 return GetCode(); | 3430 return GetCode(); |
| 3419 } | 3431 } |
| 3420 | 3432 |
| 3421 | 3433 |
| 3422 static bool IsElementTypeSigned(ExternalArrayType array_type) { | 3434 static bool IsElementTypeSigned(ExternalArrayType array_type) { |
| 3423 switch (array_type) { | 3435 switch (array_type) { |
| (...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3492 __ ldrsh(value, MemOperand(r3, key, LSL, 0)); | 3504 __ ldrsh(value, MemOperand(r3, key, LSL, 0)); |
| 3493 break; | 3505 break; |
| 3494 case kExternalUnsignedShortArray: | 3506 case kExternalUnsignedShortArray: |
| 3495 __ ldrh(value, MemOperand(r3, key, LSL, 0)); | 3507 __ ldrh(value, MemOperand(r3, key, LSL, 0)); |
| 3496 break; | 3508 break; |
| 3497 case kExternalIntArray: | 3509 case kExternalIntArray: |
| 3498 case kExternalUnsignedIntArray: | 3510 case kExternalUnsignedIntArray: |
| 3499 __ ldr(value, MemOperand(r3, key, LSL, 1)); | 3511 __ ldr(value, MemOperand(r3, key, LSL, 1)); |
| 3500 break; | 3512 break; |
| 3501 case kExternalFloatArray: | 3513 case kExternalFloatArray: |
| 3502 if (CpuFeatures::IsSupported(VFP3)) { | 3514 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) { |
| 3503 CpuFeatures::Scope scope(VFP3); | 3515 CpuFeatures::Scope scope(VFP3); |
| 3504 __ add(r2, r3, Operand(key, LSL, 1)); | 3516 __ add(r2, r3, Operand(key, LSL, 1)); |
| 3505 __ vldr(s0, r2, 0); | 3517 __ vldr(s0, r2, 0); |
| 3506 } else { | 3518 } else { |
| 3507 __ ldr(value, MemOperand(r3, key, LSL, 1)); | 3519 __ ldr(value, MemOperand(r3, key, LSL, 1)); |
| 3508 } | 3520 } |
| 3509 break; | 3521 break; |
| 3510 default: | 3522 default: |
| 3511 UNREACHABLE(); | 3523 UNREACHABLE(); |
| 3512 break; | 3524 break; |
| (...skipping 18 matching lines...) Expand all Loading... |
| 3531 | 3543 |
| 3532 __ bind(&box_int); | 3544 __ bind(&box_int); |
| 3533 // Allocate a HeapNumber for the result and perform int-to-double | 3545 // Allocate a HeapNumber for the result and perform int-to-double |
| 3534 // conversion. Don't touch r0 or r1 as they are needed if allocation | 3546 // conversion. Don't touch r0 or r1 as they are needed if allocation |
| 3535 // fails. | 3547 // fails. |
| 3536 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex); | 3548 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex); |
| 3537 __ AllocateHeapNumber(r5, r3, r4, r6, &slow); | 3549 __ AllocateHeapNumber(r5, r3, r4, r6, &slow); |
| 3538 // Now we can use r0 for the result as key is not needed any more. | 3550 // Now we can use r0 for the result as key is not needed any more. |
| 3539 __ mov(r0, r5); | 3551 __ mov(r0, r5); |
| 3540 | 3552 |
| 3541 if (CpuFeatures::IsSupported(VFP3)) { | 3553 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) { |
| 3542 CpuFeatures::Scope scope(VFP3); | 3554 CpuFeatures::Scope scope(VFP3); |
| 3543 __ vmov(s0, value); | 3555 __ vmov(s0, value); |
| 3544 __ vcvt_f64_s32(d0, s0); | 3556 __ vcvt_f64_s32(d0, s0); |
| 3545 __ sub(r3, r0, Operand(kHeapObjectTag)); | 3557 __ sub(r3, r0, Operand(kHeapObjectTag)); |
| 3546 __ vstr(d0, r3, HeapNumber::kValueOffset); | 3558 __ vstr(d0, r3, HeapNumber::kValueOffset); |
| 3547 __ Ret(); | 3559 __ Ret(); |
| 3548 } else { | 3560 } else { |
| 3549 WriteInt32ToHeapNumberStub stub(value, r0, r3); | 3561 WriteInt32ToHeapNumberStub stub(value, r0, r3); |
| 3550 __ TailCallStub(&stub); | 3562 __ TailCallStub(&stub); |
| 3551 } | 3563 } |
| 3552 } else if (array_type == kExternalUnsignedIntArray) { | 3564 } else if (array_type == kExternalUnsignedIntArray) { |
| 3553 // The test is different for unsigned int values. Since we need | 3565 // The test is different for unsigned int values. Since we need |
| 3554 // the value to be in the range of a positive smi, we can't | 3566 // the value to be in the range of a positive smi, we can't |
| 3555 // handle either of the top two bits being set in the value. | 3567 // handle either of the top two bits being set in the value. |
| 3556 if (CpuFeatures::IsSupported(VFP3)) { | 3568 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) { |
| 3557 CpuFeatures::Scope scope(VFP3); | 3569 CpuFeatures::Scope scope(VFP3); |
| 3558 Label box_int, done; | 3570 Label box_int, done; |
| 3559 __ tst(value, Operand(0xC0000000)); | 3571 __ tst(value, Operand(0xC0000000)); |
| 3560 __ b(ne, &box_int); | 3572 __ b(ne, &box_int); |
| 3561 // Tag integer as smi and return it. | 3573 // Tag integer as smi and return it. |
| 3562 __ mov(r0, Operand(value, LSL, kSmiTagSize)); | 3574 __ mov(r0, Operand(value, LSL, kSmiTagSize)); |
| 3563 __ Ret(); | 3575 __ Ret(); |
| 3564 | 3576 |
| 3565 __ bind(&box_int); | 3577 __ bind(&box_int); |
| 3566 __ vmov(s0, value); | 3578 __ vmov(s0, value); |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3610 | 3622 |
| 3611 __ str(hiword, FieldMemOperand(r4, HeapNumber::kExponentOffset)); | 3623 __ str(hiword, FieldMemOperand(r4, HeapNumber::kExponentOffset)); |
| 3612 __ str(loword, FieldMemOperand(r4, HeapNumber::kMantissaOffset)); | 3624 __ str(loword, FieldMemOperand(r4, HeapNumber::kMantissaOffset)); |
| 3613 | 3625 |
| 3614 __ mov(r0, r4); | 3626 __ mov(r0, r4); |
| 3615 __ Ret(); | 3627 __ Ret(); |
| 3616 } | 3628 } |
| 3617 } else if (array_type == kExternalFloatArray) { | 3629 } else if (array_type == kExternalFloatArray) { |
| 3618 // For the floating-point array type, we need to always allocate a | 3630 // For the floating-point array type, we need to always allocate a |
| 3619 // HeapNumber. | 3631 // HeapNumber. |
| 3620 if (CpuFeatures::IsSupported(VFP3)) { | 3632 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) { |
| 3621 CpuFeatures::Scope scope(VFP3); | 3633 CpuFeatures::Scope scope(VFP3); |
| 3622 // Allocate a HeapNumber for the result. Don't use r0 and r1 as | 3634 // Allocate a HeapNumber for the result. Don't use r0 and r1 as |
| 3623 // AllocateHeapNumber clobbers all registers - also when jumping due to | 3635 // AllocateHeapNumber clobbers all registers - also when jumping due to |
| 3624 // exhausted young space. | 3636 // exhausted young space. |
| 3625 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex); | 3637 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex); |
| 3626 __ AllocateHeapNumber(r2, r3, r4, r6, &slow); | 3638 __ AllocateHeapNumber(r2, r3, r4, r6, &slow); |
| 3627 __ vcvt_f64_f32(d0, s0); | 3639 __ vcvt_f64_f32(d0, s0); |
| 3628 __ sub(r1, r2, Operand(kHeapObjectTag)); | 3640 __ sub(r1, r2, Operand(kHeapObjectTag)); |
| 3629 __ vstr(d0, r1, HeapNumber::kValueOffset); | 3641 __ vstr(d0, r1, HeapNumber::kValueOffset); |
| 3630 | 3642 |
| (...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3686 } | 3698 } |
| 3687 | 3699 |
| 3688 } else { | 3700 } else { |
| 3689 // Tag integer as smi and return it. | 3701 // Tag integer as smi and return it. |
| 3690 __ mov(r0, Operand(value, LSL, kSmiTagSize)); | 3702 __ mov(r0, Operand(value, LSL, kSmiTagSize)); |
| 3691 __ Ret(); | 3703 __ Ret(); |
| 3692 } | 3704 } |
| 3693 | 3705 |
| 3694 // Slow case, key and receiver still in r0 and r1. | 3706 // Slow case, key and receiver still in r0 and r1. |
| 3695 __ bind(&slow); | 3707 __ bind(&slow); |
| 3696 __ IncrementCounter(&Counters::keyed_load_external_array_slow, 1, r2, r3); | 3708 __ IncrementCounter(COUNTERS->keyed_load_external_array_slow(), 1, r2, r3); |
| 3697 | 3709 |
| 3698 // ---------- S t a t e -------------- | 3710 // ---------- S t a t e -------------- |
| 3699 // -- lr : return address | 3711 // -- lr : return address |
| 3700 // -- r0 : key | 3712 // -- r0 : key |
| 3701 // -- r1 : receiver | 3713 // -- r1 : receiver |
| 3702 // ----------------------------------- | 3714 // ----------------------------------- |
| 3703 | 3715 |
| 3704 __ Push(r1, r0); | 3716 __ Push(r1, r0); |
| 3705 | 3717 |
| 3706 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); | 3718 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); |
| (...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3801 __ b(ne, &slow); | 3813 __ b(ne, &slow); |
| 3802 | 3814 |
| 3803 __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset)); | 3815 __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset)); |
| 3804 | 3816 |
| 3805 // r3: base pointer of external storage. | 3817 // r3: base pointer of external storage. |
| 3806 // r4: key (integer). | 3818 // r4: key (integer). |
| 3807 | 3819 |
| 3808 // The WebGL specification leaves the behavior of storing NaN and | 3820 // The WebGL specification leaves the behavior of storing NaN and |
| 3809 // +/-Infinity into integer arrays basically undefined. For more | 3821 // +/-Infinity into integer arrays basically undefined. For more |
| 3810 // reproducible behavior, convert these to zero. | 3822 // reproducible behavior, convert these to zero. |
| 3811 if (CpuFeatures::IsSupported(VFP3)) { | 3823 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) { |
| 3812 CpuFeatures::Scope scope(VFP3); | 3824 CpuFeatures::Scope scope(VFP3); |
| 3813 | 3825 |
| 3814 | 3826 |
| 3815 if (array_type == kExternalFloatArray) { | 3827 if (array_type == kExternalFloatArray) { |
| 3816 // vldr requires offset to be a multiple of 4 so we can not | 3828 // vldr requires offset to be a multiple of 4 so we can not |
| 3817 // include -kHeapObjectTag into it. | 3829 // include -kHeapObjectTag into it. |
| 3818 __ sub(r5, r0, Operand(kHeapObjectTag)); | 3830 __ sub(r5, r0, Operand(kHeapObjectTag)); |
| 3819 __ vldr(d0, r5, HeapNumber::kValueOffset); | 3831 __ vldr(d0, r5, HeapNumber::kValueOffset); |
| 3820 __ add(r5, r3, Operand(r4, LSL, 2)); | 3832 __ add(r5, r3, Operand(r4, LSL, 2)); |
| 3821 __ vcvt_f32_f64(s0, d0); | 3833 __ vcvt_f32_f64(s0, d0); |
| (...skipping 192 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4014 | 4026 |
| 4015 return GetCode(flags); | 4027 return GetCode(flags); |
| 4016 } | 4028 } |
| 4017 | 4029 |
| 4018 | 4030 |
| 4019 #undef __ | 4031 #undef __ |
| 4020 | 4032 |
| 4021 } } // namespace v8::internal | 4033 } } // namespace v8::internal |
| 4022 | 4034 |
| 4023 #endif // V8_TARGET_ARCH_ARM | 4035 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |