Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(51)

Side by Side Diff: src/arm/stub-cache-arm.cc

Issue 6717018: Introduce accessors on builtins instance (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Fix tests and lint. Created 9 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/macro-assembler-arm.cc ('k') | src/arm/virtual-frame-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2006-2009 the V8 project authors. All rights reserved. 1 // Copyright 2006-2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after
204 } 204 }
205 205
206 206
207 void StubCache::GenerateProbe(MacroAssembler* masm, 207 void StubCache::GenerateProbe(MacroAssembler* masm,
208 Code::Flags flags, 208 Code::Flags flags,
209 Register receiver, 209 Register receiver,
210 Register name, 210 Register name,
211 Register scratch, 211 Register scratch,
212 Register extra, 212 Register extra,
213 Register extra2) { 213 Register extra2) {
214 Isolate* isolate = Isolate::Current(); 214 Isolate* isolate = masm->isolate();
215 Label miss; 215 Label miss;
216 216
217 // Make sure that code is valid. The shifting code relies on the 217 // Make sure that code is valid. The shifting code relies on the
218 // entry size being 8. 218 // entry size being 8.
219 ASSERT(sizeof(Entry) == 8); 219 ASSERT(sizeof(Entry) == 8);
220 220
221 // Make sure the flags does not name a specific type. 221 // Make sure the flags does not name a specific type.
222 ASSERT(Code::ExtractTypeFromFlags(flags) == 0); 222 ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
223 223
224 // Make sure that there are no register conflicts. 224 // Make sure that there are no register conflicts.
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after
282 // Load the initial map. The global functions all have initial maps. 282 // Load the initial map. The global functions all have initial maps.
283 __ ldr(prototype, 283 __ ldr(prototype,
284 FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset)); 284 FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
285 // Load the prototype from the initial map. 285 // Load the prototype from the initial map.
286 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset)); 286 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
287 } 287 }
288 288
289 289
290 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype( 290 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
291 MacroAssembler* masm, int index, Register prototype, Label* miss) { 291 MacroAssembler* masm, int index, Register prototype, Label* miss) {
292 Isolate* isolate = masm->isolate();
292 // Check we're still in the same context. 293 // Check we're still in the same context.
293 __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); 294 __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
294 __ Move(ip, Isolate::Current()->global()); 295 __ Move(ip, isolate->global());
295 __ cmp(prototype, ip); 296 __ cmp(prototype, ip);
296 __ b(ne, miss); 297 __ b(ne, miss);
297 // Get the global function with the given index. 298 // Get the global function with the given index.
298 JSFunction* function = JSFunction::cast( 299 JSFunction* function =
299 Isolate::Current()->global_context()->get(index)); 300 JSFunction::cast(isolate->global_context()->get(index));
300 // Load its initial map. The global functions all have initial maps. 301 // Load its initial map. The global functions all have initial maps.
301 __ Move(prototype, Handle<Map>(function->initial_map())); 302 __ Move(prototype, Handle<Map>(function->initial_map()));
302 // Load the prototype from the initial map. 303 // Load the prototype from the initial map.
303 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset)); 304 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
304 } 305 }
305 306
306 307
307 // Load a fast property out of a holder object (src). In-object properties 308 // Load a fast property out of a holder object (src). In-object properties
308 // are loaded directly otherwise the property is loaded from the properties 309 // are loaded directly otherwise the property is loaded from the properties
309 // fixed array. 310 // fixed array.
(...skipping 194 matching lines...) Expand 10 before | Expand all | Expand 10 after
504 // Return the value (register r0). 505 // Return the value (register r0).
505 __ bind(&exit); 506 __ bind(&exit);
506 __ Ret(); 507 __ Ret();
507 } 508 }
508 509
509 510
510 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) { 511 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
511 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC); 512 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
512 Code* code = NULL; 513 Code* code = NULL;
513 if (kind == Code::LOAD_IC) { 514 if (kind == Code::LOAD_IC) {
514 code = Isolate::Current()->builtins()->builtin(Builtins::LoadIC_Miss); 515 code = masm->isolate()->builtins()->builtin(Builtins::kLoadIC_Miss);
515 } else { 516 } else {
516 code = Isolate::Current()->builtins()->builtin(Builtins::KeyedLoadIC_Miss); 517 code = masm->isolate()->builtins()->builtin(Builtins::kKeyedLoadIC_Miss);
517 } 518 }
518 519
519 Handle<Code> ic(code); 520 Handle<Code> ic(code);
520 __ Jump(ic, RelocInfo::CODE_TARGET); 521 __ Jump(ic, RelocInfo::CODE_TARGET);
521 } 522 }
522 523
523 524
524 static void GenerateCallFunction(MacroAssembler* masm, 525 static void GenerateCallFunction(MacroAssembler* masm,
525 Object* object, 526 Object* object,
526 const ParameterCount& arguments, 527 const ParameterCount& arguments,
(...skipping 418 matching lines...) Expand 10 before | Expand all | Expand 10 after
945 // Convert and store int passed in register ival to IEEE 754 single precision 946 // Convert and store int passed in register ival to IEEE 754 single precision
946 // floating point value at memory location (dst + 4 * wordoffset) 947 // floating point value at memory location (dst + 4 * wordoffset)
947 // If VFP3 is available use it for conversion. 948 // If VFP3 is available use it for conversion.
948 static void StoreIntAsFloat(MacroAssembler* masm, 949 static void StoreIntAsFloat(MacroAssembler* masm,
949 Register dst, 950 Register dst,
950 Register wordoffset, 951 Register wordoffset,
951 Register ival, 952 Register ival,
952 Register fval, 953 Register fval,
953 Register scratch1, 954 Register scratch1,
954 Register scratch2) { 955 Register scratch2) {
955 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) { 956 if (masm->isolate()->cpu_features()->IsSupported(VFP3)) {
956 CpuFeatures::Scope scope(VFP3); 957 CpuFeatures::Scope scope(VFP3);
957 __ vmov(s0, ival); 958 __ vmov(s0, ival);
958 __ add(scratch1, dst, Operand(wordoffset, LSL, 2)); 959 __ add(scratch1, dst, Operand(wordoffset, LSL, 2));
959 __ vcvt_f32_s32(s0, s0); 960 __ vcvt_f32_s32(s0, s0);
960 __ vstr(s0, scratch1, 0); 961 __ vstr(s0, scratch1, 0);
961 } else { 962 } else {
962 Label not_special, done; 963 Label not_special, done;
963 // Move sign bit from source to destination. This works because the sign 964 // Move sign bit from source to destination. This works because the sign
964 // bit in the exponent word of the double has the same position and polarity 965 // bit in the exponent word of the double has the same position and polarity
965 // as the 2's complement sign bit in a Smi. 966 // as the 2's complement sign bit in a Smi.
(...skipping 191 matching lines...) Expand 10 before | Expand all | Expand 10 after
1157 // Go to the next object in the prototype chain. 1158 // Go to the next object in the prototype chain.
1158 current = prototype; 1159 current = prototype;
1159 } 1160 }
1160 1161
1161 // Check the holder map. 1162 // Check the holder map.
1162 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); 1163 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1163 __ cmp(scratch1, Operand(Handle<Map>(current->map()))); 1164 __ cmp(scratch1, Operand(Handle<Map>(current->map())));
1164 __ b(ne, miss); 1165 __ b(ne, miss);
1165 1166
1166 // Log the check depth. 1167 // Log the check depth.
1167 LOG(Isolate::Current(), IntEvent("check-maps-depth", depth + 1)); 1168 LOG(masm()->isolate(), IntEvent("check-maps-depth", depth + 1));
1168 1169
1169 // Perform security check for access to the global object. 1170 // Perform security check for access to the global object.
1170 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded()); 1171 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1171 if (holder->IsJSGlobalProxy()) { 1172 if (holder->IsJSGlobalProxy()) {
1172 __ CheckAccessGlobalProxy(reg, scratch1, miss); 1173 __ CheckAccessGlobalProxy(reg, scratch1, miss);
1173 }; 1174 };
1174 1175
1175 // If we've skipped any global objects, it's not enough to verify 1176 // If we've skipped any global objects, it's not enough to verify
1176 // that their maps haven't changed. We also need to check that the 1177 // that their maps haven't changed. We also need to check that the
1177 // property cell for the property is still empty. 1178 // property cell for the property is still empty.
(...skipping 315 matching lines...) Expand 10 before | Expand all | Expand 10 after
1493 __ cmp(r4, r3); 1494 __ cmp(r4, r3);
1494 __ b(ne, miss); 1495 __ b(ne, miss);
1495 } else { 1496 } else {
1496 __ cmp(r1, Operand(Handle<JSFunction>(function))); 1497 __ cmp(r1, Operand(Handle<JSFunction>(function)));
1497 __ b(ne, miss); 1498 __ b(ne, miss);
1498 } 1499 }
1499 } 1500 }
1500 1501
1501 1502
1502 MaybeObject* CallStubCompiler::GenerateMissBranch() { 1503 MaybeObject* CallStubCompiler::GenerateMissBranch() {
1503 MaybeObject* maybe_obj = Isolate::Current()->stub_cache()->ComputeCallMiss( 1504 MaybeObject* maybe_obj = masm()->isolate()->stub_cache()->ComputeCallMiss(
1504 arguments().immediate(), kind_); 1505 arguments().immediate(), kind_);
1505 Object* obj; 1506 Object* obj;
1506 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 1507 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1507 __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET); 1508 __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
1508 return obj; 1509 return obj;
1509 } 1510 }
1510 1511
1511 1512
1512 MaybeObject* CallStubCompiler::CompileCallField(JSObject* object, 1513 MaybeObject* CallStubCompiler::CompileCallField(JSObject* object,
1513 JSObject* holder, 1514 JSObject* holder,
(...skipping 526 matching lines...) Expand 10 before | Expand all | Expand 10 after
2040 JSFunction* function, 2041 JSFunction* function,
2041 String* name) { 2042 String* name) {
2042 // ----------- S t a t e ------------- 2043 // ----------- S t a t e -------------
2043 // -- r2 : function name 2044 // -- r2 : function name
2044 // -- lr : return address 2045 // -- lr : return address
2045 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) 2046 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2046 // -- ... 2047 // -- ...
2047 // -- sp[argc * 4] : receiver 2048 // -- sp[argc * 4] : receiver
2048 // ----------------------------------- 2049 // -----------------------------------
2049 2050
2050 if (!Isolate::Current()->cpu_features()->IsSupported(VFP3)) 2051 if (!masm()->isolate()->cpu_features()->IsSupported(VFP3))
2051 return HEAP->undefined_value(); 2052 return HEAP->undefined_value();
2052 2053
2053 CpuFeatures::Scope scope_vfp3(VFP3); 2054 CpuFeatures::Scope scope_vfp3(VFP3);
2054 2055
2055 const int argc = arguments().immediate(); 2056 const int argc = arguments().immediate();
2056 2057
2057 // If the object is not a JSObject or we got an unexpected number of 2058 // If the object is not a JSObject or we got an unexpected number of
2058 // arguments, bail out to the regular call. 2059 // arguments, bail out to the regular call.
2059 if (!object->IsJSObject() || argc != 1) return HEAP->undefined_value(); 2060 if (!object->IsJSObject() || argc != 1) return HEAP->undefined_value();
2060 2061
(...skipping 544 matching lines...) Expand 10 before | Expand all | Expand 10 after
2605 // ----------------------------------- 2606 // -----------------------------------
2606 Label miss; 2607 Label miss;
2607 2608
2608 GenerateStoreField(masm(), 2609 GenerateStoreField(masm(),
2609 object, 2610 object,
2610 index, 2611 index,
2611 transition, 2612 transition,
2612 r1, r2, r3, 2613 r1, r2, r3,
2613 &miss); 2614 &miss);
2614 __ bind(&miss); 2615 __ bind(&miss);
2615 Handle<Code> ic(Isolate::Current()->builtins()->builtin( 2616 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2616 Builtins::StoreIC_Miss));
2617 __ Jump(ic, RelocInfo::CODE_TARGET); 2617 __ Jump(ic, RelocInfo::CODE_TARGET);
2618 2618
2619 // Return the generated code. 2619 // Return the generated code.
2620 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name); 2620 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2621 } 2621 }
2622 2622
2623 2623
2624 MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object, 2624 MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
2625 AccessorInfo* callback, 2625 AccessorInfo* callback,
2626 String* name) { 2626 String* name) {
(...skipping 28 matching lines...) Expand all
2655 __ Push(ip, r2, r0); 2655 __ Push(ip, r2, r0);
2656 2656
2657 // Do tail-call to the runtime system. 2657 // Do tail-call to the runtime system.
2658 ExternalReference store_callback_property = 2658 ExternalReference store_callback_property =
2659 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), 2659 ExternalReference(IC_Utility(IC::kStoreCallbackProperty),
2660 masm()->isolate()); 2660 masm()->isolate());
2661 __ TailCallExternalReference(store_callback_property, 4, 1); 2661 __ TailCallExternalReference(store_callback_property, 4, 1);
2662 2662
2663 // Handle store cache miss. 2663 // Handle store cache miss.
2664 __ bind(&miss); 2664 __ bind(&miss);
2665 Handle<Code> ic(Isolate::Current()->builtins()->builtin( 2665 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2666 Builtins::StoreIC_Miss));
2667 __ Jump(ic, RelocInfo::CODE_TARGET); 2666 __ Jump(ic, RelocInfo::CODE_TARGET);
2668 2667
2669 // Return the generated code. 2668 // Return the generated code.
2670 return GetCode(CALLBACKS, name); 2669 return GetCode(CALLBACKS, name);
2671 } 2670 }
2672 2671
2673 2672
2674 MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver, 2673 MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
2675 String* name) { 2674 String* name) {
2676 // ----------- S t a t e ------------- 2675 // ----------- S t a t e -------------
(...skipping 28 matching lines...) Expand all
2705 __ push(r0); // strict mode 2704 __ push(r0); // strict mode
2706 2705
2707 // Do tail-call to the runtime system. 2706 // Do tail-call to the runtime system.
2708 ExternalReference store_ic_property = 2707 ExternalReference store_ic_property =
2709 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), 2708 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty),
2710 masm()->isolate()); 2709 masm()->isolate());
2711 __ TailCallExternalReference(store_ic_property, 4, 1); 2710 __ TailCallExternalReference(store_ic_property, 4, 1);
2712 2711
2713 // Handle store cache miss. 2712 // Handle store cache miss.
2714 __ bind(&miss); 2713 __ bind(&miss);
2715 Handle<Code> ic(Isolate::Current()->builtins()->builtin( 2714 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2716 Builtins::StoreIC_Miss));
2717 __ Jump(ic, RelocInfo::CODE_TARGET); 2715 __ Jump(ic, RelocInfo::CODE_TARGET);
2718 2716
2719 // Return the generated code. 2717 // Return the generated code.
2720 return GetCode(INTERCEPTOR, name); 2718 return GetCode(INTERCEPTOR, name);
2721 } 2719 }
2722 2720
2723 2721
2724 MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object, 2722 MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
2725 JSGlobalPropertyCell* cell, 2723 JSGlobalPropertyCell* cell,
2726 String* name) { 2724 String* name) {
(...skipping 23 matching lines...) Expand all
2750 // Store the value in the cell. 2748 // Store the value in the cell.
2751 __ str(r0, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset)); 2749 __ str(r0, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset));
2752 2750
2753 Counters* counters = masm()->isolate()->counters(); 2751 Counters* counters = masm()->isolate()->counters();
2754 __ IncrementCounter(counters->named_store_global_inline(), 1, r4, r3); 2752 __ IncrementCounter(counters->named_store_global_inline(), 1, r4, r3);
2755 __ Ret(); 2753 __ Ret();
2756 2754
2757 // Handle store cache miss. 2755 // Handle store cache miss.
2758 __ bind(&miss); 2756 __ bind(&miss);
2759 __ IncrementCounter(counters->named_store_global_inline_miss(), 1, r4, r3); 2757 __ IncrementCounter(counters->named_store_global_inline_miss(), 1, r4, r3);
2760 Handle<Code> ic(Isolate::Current()->builtins()->builtin( 2758 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2761 Builtins::StoreIC_Miss));
2762 __ Jump(ic, RelocInfo::CODE_TARGET); 2759 __ Jump(ic, RelocInfo::CODE_TARGET);
2763 2760
2764 // Return the generated code. 2761 // Return the generated code.
2765 return GetCode(NORMAL, name); 2762 return GetCode(NORMAL, name);
2766 } 2763 }
2767 2764
2768 2765
2769 MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name, 2766 MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
2770 JSObject* object, 2767 JSObject* object,
2771 JSObject* last) { 2768 JSObject* last) {
(...skipping 432 matching lines...) Expand 10 before | Expand all | Expand 10 after
3204 // the miss label is generated. 3201 // the miss label is generated.
3205 GenerateStoreField(masm(), 3202 GenerateStoreField(masm(),
3206 object, 3203 object,
3207 index, 3204 index,
3208 transition, 3205 transition,
3209 r2, r1, r3, 3206 r2, r1, r3,
3210 &miss); 3207 &miss);
3211 __ bind(&miss); 3208 __ bind(&miss);
3212 3209
3213 __ DecrementCounter(counters->keyed_store_field(), 1, r3, r4); 3210 __ DecrementCounter(counters->keyed_store_field(), 1, r3, r4);
3214 Handle<Code> ic(Isolate::Current()->builtins()->builtin( 3211 Handle<Code> ic = masm()->isolate()->builtins()->KeyedStoreIC_Miss();
3215 Builtins::KeyedStoreIC_Miss));
3216
3217 __ Jump(ic, RelocInfo::CODE_TARGET); 3212 __ Jump(ic, RelocInfo::CODE_TARGET);
3218 3213
3219 // Return the generated code. 3214 // Return the generated code.
3220 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name); 3215 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
3221 } 3216 }
3222 3217
3223 3218
3224 MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized( 3219 MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
3225 JSObject* receiver) { 3220 JSObject* receiver) {
3226 // ----------- S t a t e ------------- 3221 // ----------- S t a t e -------------
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
3276 MemOperand(scratch, key_reg, LSL, kPointerSizeLog2 - kSmiTagSize)); 3271 MemOperand(scratch, key_reg, LSL, kPointerSizeLog2 - kSmiTagSize));
3277 __ RecordWrite(scratch, 3272 __ RecordWrite(scratch,
3278 Operand(key_reg, LSL, kPointerSizeLog2 - kSmiTagSize), 3273 Operand(key_reg, LSL, kPointerSizeLog2 - kSmiTagSize),
3279 receiver_reg , elements_reg); 3274 receiver_reg , elements_reg);
3280 3275
3281 // value_reg (r0) is preserved. 3276 // value_reg (r0) is preserved.
3282 // Done. 3277 // Done.
3283 __ Ret(); 3278 __ Ret();
3284 3279
3285 __ bind(&miss); 3280 __ bind(&miss);
3286 Handle<Code> ic( 3281 Handle<Code> ic = masm()->isolate()->builtins()->KeyedStoreIC_Miss();
3287 Isolate::Current()->builtins()->builtin(Builtins::KeyedStoreIC_Miss));
3288 __ Jump(ic, RelocInfo::CODE_TARGET); 3282 __ Jump(ic, RelocInfo::CODE_TARGET);
3289 3283
3290 // Return the generated code. 3284 // Return the generated code.
3291 return GetCode(NORMAL, NULL); 3285 return GetCode(NORMAL, NULL);
3292 } 3286 }
3293 3287
3294 3288
3295 MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) { 3289 MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
3296 // ----------- S t a t e ------------- 3290 // ----------- S t a t e -------------
3297 // -- r0 : argc 3291 // -- r0 : argc
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after
3421 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2)); 3415 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2));
3422 __ add(sp, sp, Operand(kPointerSize)); 3416 __ add(sp, sp, Operand(kPointerSize));
3423 Counters* counters = masm()->isolate()->counters(); 3417 Counters* counters = masm()->isolate()->counters();
3424 __ IncrementCounter(counters->constructed_objects(), 1, r1, r2); 3418 __ IncrementCounter(counters->constructed_objects(), 1, r1, r2);
3425 __ IncrementCounter(counters->constructed_objects_stub(), 1, r1, r2); 3419 __ IncrementCounter(counters->constructed_objects_stub(), 1, r1, r2);
3426 __ Jump(lr); 3420 __ Jump(lr);
3427 3421
3428 // Jump to the generic stub in case the specialized code cannot handle the 3422 // Jump to the generic stub in case the specialized code cannot handle the
3429 // construction. 3423 // construction.
3430 __ bind(&generic_stub_call); 3424 __ bind(&generic_stub_call);
3431 Code* code = Isolate::Current()->builtins()->builtin( 3425 Handle<Code> code = masm()->isolate()->builtins()->JSConstructStubGeneric();
3432 Builtins::JSConstructStubGeneric); 3426 __ Jump(code, RelocInfo::CODE_TARGET);
3433 Handle<Code> generic_construct_stub(code);
3434 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
3435 3427
3436 // Return the generated code. 3428 // Return the generated code.
3437 return GetCode(); 3429 return GetCode();
3438 } 3430 }
3439 3431
3440 3432
3441 static bool IsElementTypeSigned(ExternalArrayType array_type) { 3433 static bool IsElementTypeSigned(ExternalArrayType array_type) {
3442 switch (array_type) { 3434 switch (array_type) {
3443 case kExternalByteArray: 3435 case kExternalByteArray:
3444 case kExternalShortArray: 3436 case kExternalShortArray:
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
3511 __ ldrsh(value, MemOperand(r3, key, LSL, 0)); 3503 __ ldrsh(value, MemOperand(r3, key, LSL, 0));
3512 break; 3504 break;
3513 case kExternalUnsignedShortArray: 3505 case kExternalUnsignedShortArray:
3514 __ ldrh(value, MemOperand(r3, key, LSL, 0)); 3506 __ ldrh(value, MemOperand(r3, key, LSL, 0));
3515 break; 3507 break;
3516 case kExternalIntArray: 3508 case kExternalIntArray:
3517 case kExternalUnsignedIntArray: 3509 case kExternalUnsignedIntArray:
3518 __ ldr(value, MemOperand(r3, key, LSL, 1)); 3510 __ ldr(value, MemOperand(r3, key, LSL, 1));
3519 break; 3511 break;
3520 case kExternalFloatArray: 3512 case kExternalFloatArray:
3521 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) { 3513 if (masm()->isolate()->cpu_features()->IsSupported(VFP3)) {
3522 CpuFeatures::Scope scope(VFP3); 3514 CpuFeatures::Scope scope(VFP3);
3523 __ add(r2, r3, Operand(key, LSL, 1)); 3515 __ add(r2, r3, Operand(key, LSL, 1));
3524 __ vldr(s0, r2, 0); 3516 __ vldr(s0, r2, 0);
3525 } else { 3517 } else {
3526 __ ldr(value, MemOperand(r3, key, LSL, 1)); 3518 __ ldr(value, MemOperand(r3, key, LSL, 1));
3527 } 3519 }
3528 break; 3520 break;
3529 default: 3521 default:
3530 UNREACHABLE(); 3522 UNREACHABLE();
3531 break; 3523 break;
(...skipping 18 matching lines...) Expand all
3550 3542
3551 __ bind(&box_int); 3543 __ bind(&box_int);
3552 // Allocate a HeapNumber for the result and perform int-to-double 3544 // Allocate a HeapNumber for the result and perform int-to-double
3553 // conversion. Don't touch r0 or r1 as they are needed if allocation 3545 // conversion. Don't touch r0 or r1 as they are needed if allocation
3554 // fails. 3546 // fails.
3555 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex); 3547 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3556 __ AllocateHeapNumber(r5, r3, r4, r6, &slow); 3548 __ AllocateHeapNumber(r5, r3, r4, r6, &slow);
3557 // Now we can use r0 for the result as key is not needed any more. 3549 // Now we can use r0 for the result as key is not needed any more.
3558 __ mov(r0, r5); 3550 __ mov(r0, r5);
3559 3551
3560 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) { 3552 if (masm()->isolate()->cpu_features()->IsSupported(VFP3)) {
3561 CpuFeatures::Scope scope(VFP3); 3553 CpuFeatures::Scope scope(VFP3);
3562 __ vmov(s0, value); 3554 __ vmov(s0, value);
3563 __ vcvt_f64_s32(d0, s0); 3555 __ vcvt_f64_s32(d0, s0);
3564 __ sub(r3, r0, Operand(kHeapObjectTag)); 3556 __ sub(r3, r0, Operand(kHeapObjectTag));
3565 __ vstr(d0, r3, HeapNumber::kValueOffset); 3557 __ vstr(d0, r3, HeapNumber::kValueOffset);
3566 __ Ret(); 3558 __ Ret();
3567 } else { 3559 } else {
3568 WriteInt32ToHeapNumberStub stub(value, r0, r3); 3560 WriteInt32ToHeapNumberStub stub(value, r0, r3);
3569 __ TailCallStub(&stub); 3561 __ TailCallStub(&stub);
3570 } 3562 }
3571 } else if (array_type == kExternalUnsignedIntArray) { 3563 } else if (array_type == kExternalUnsignedIntArray) {
3572 // The test is different for unsigned int values. Since we need 3564 // The test is different for unsigned int values. Since we need
3573 // the value to be in the range of a positive smi, we can't 3565 // the value to be in the range of a positive smi, we can't
3574 // handle either of the top two bits being set in the value. 3566 // handle either of the top two bits being set in the value.
3575 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) { 3567 if (masm()->isolate()->cpu_features()->IsSupported(VFP3)) {
3576 CpuFeatures::Scope scope(VFP3); 3568 CpuFeatures::Scope scope(VFP3);
3577 Label box_int, done; 3569 Label box_int, done;
3578 __ tst(value, Operand(0xC0000000)); 3570 __ tst(value, Operand(0xC0000000));
3579 __ b(ne, &box_int); 3571 __ b(ne, &box_int);
3580 // Tag integer as smi and return it. 3572 // Tag integer as smi and return it.
3581 __ mov(r0, Operand(value, LSL, kSmiTagSize)); 3573 __ mov(r0, Operand(value, LSL, kSmiTagSize));
3582 __ Ret(); 3574 __ Ret();
3583 3575
3584 __ bind(&box_int); 3576 __ bind(&box_int);
3585 __ vmov(s0, value); 3577 __ vmov(s0, value);
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
3629 3621
3630 __ str(hiword, FieldMemOperand(r4, HeapNumber::kExponentOffset)); 3622 __ str(hiword, FieldMemOperand(r4, HeapNumber::kExponentOffset));
3631 __ str(loword, FieldMemOperand(r4, HeapNumber::kMantissaOffset)); 3623 __ str(loword, FieldMemOperand(r4, HeapNumber::kMantissaOffset));
3632 3624
3633 __ mov(r0, r4); 3625 __ mov(r0, r4);
3634 __ Ret(); 3626 __ Ret();
3635 } 3627 }
3636 } else if (array_type == kExternalFloatArray) { 3628 } else if (array_type == kExternalFloatArray) {
3637 // For the floating-point array type, we need to always allocate a 3629 // For the floating-point array type, we need to always allocate a
3638 // HeapNumber. 3630 // HeapNumber.
3639 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) { 3631 if (masm()->isolate()->cpu_features()->IsSupported(VFP3)) {
3640 CpuFeatures::Scope scope(VFP3); 3632 CpuFeatures::Scope scope(VFP3);
3641 // Allocate a HeapNumber for the result. Don't use r0 and r1 as 3633 // Allocate a HeapNumber for the result. Don't use r0 and r1 as
3642 // AllocateHeapNumber clobbers all registers - also when jumping due to 3634 // AllocateHeapNumber clobbers all registers - also when jumping due to
3643 // exhausted young space. 3635 // exhausted young space.
3644 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex); 3636 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3645 __ AllocateHeapNumber(r2, r3, r4, r6, &slow); 3637 __ AllocateHeapNumber(r2, r3, r4, r6, &slow);
3646 __ vcvt_f64_f32(d0, s0); 3638 __ vcvt_f64_f32(d0, s0);
3647 __ sub(r1, r2, Operand(kHeapObjectTag)); 3639 __ sub(r1, r2, Operand(kHeapObjectTag));
3648 __ vstr(d0, r1, HeapNumber::kValueOffset); 3640 __ vstr(d0, r1, HeapNumber::kValueOffset);
3649 3641
(...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after
3822 __ b(ne, &slow); 3814 __ b(ne, &slow);
3823 3815
3824 __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset)); 3816 __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
3825 3817
3826 // r3: base pointer of external storage. 3818 // r3: base pointer of external storage.
3827 // r4: key (integer). 3819 // r4: key (integer).
3828 3820
3829 // The WebGL specification leaves the behavior of storing NaN and 3821 // The WebGL specification leaves the behavior of storing NaN and
3830 // +/-Infinity into integer arrays basically undefined. For more 3822 // +/-Infinity into integer arrays basically undefined. For more
3831 // reproducible behavior, convert these to zero. 3823 // reproducible behavior, convert these to zero.
3832 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) { 3824 if (masm()->isolate()->cpu_features()->IsSupported(VFP3)) {
3833 CpuFeatures::Scope scope(VFP3); 3825 CpuFeatures::Scope scope(VFP3);
3834 3826
3835
3836 if (array_type == kExternalFloatArray) { 3827 if (array_type == kExternalFloatArray) {
3837 // vldr requires offset to be a multiple of 4 so we can not 3828 // vldr requires offset to be a multiple of 4 so we can not
3838 // include -kHeapObjectTag into it. 3829 // include -kHeapObjectTag into it.
3839 __ sub(r5, r0, Operand(kHeapObjectTag)); 3830 __ sub(r5, r0, Operand(kHeapObjectTag));
3840 __ vldr(d0, r5, HeapNumber::kValueOffset); 3831 __ vldr(d0, r5, HeapNumber::kValueOffset);
3841 __ add(r5, r3, Operand(r4, LSL, 2)); 3832 __ add(r5, r3, Operand(r4, LSL, 2));
3842 __ vcvt_f32_f64(s0, d0); 3833 __ vcvt_f32_f64(s0, d0);
3843 __ vstr(s0, r5, 0); 3834 __ vstr(s0, r5, 0);
3844 } else { 3835 } else {
3845 // Need to perform float-to-int conversion. 3836 // Need to perform float-to-int conversion.
(...skipping 189 matching lines...) Expand 10 before | Expand all | Expand 10 after
4035 4026
4036 return GetCode(flags); 4027 return GetCode(flags);
4037 } 4028 }
4038 4029
4039 4030
4040 #undef __ 4031 #undef __
4041 4032
4042 } } // namespace v8::internal 4033 } } // namespace v8::internal
4043 4034
4044 #endif // V8_TARGET_ARCH_ARM 4035 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/macro-assembler-arm.cc ('k') | src/arm/virtual-frame-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698