| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 159 void KeyedLoadFieldStub::InitializeInterfaceDescriptor( | 159 void KeyedLoadFieldStub::InitializeInterfaceDescriptor( |
| 160 Isolate* isolate, | 160 Isolate* isolate, |
| 161 CodeStubInterfaceDescriptor* descriptor) { | 161 CodeStubInterfaceDescriptor* descriptor) { |
| 162 static Register registers[] = { a1 }; | 162 static Register registers[] = { a1 }; |
| 163 descriptor->register_param_count_ = 1; | 163 descriptor->register_param_count_ = 1; |
| 164 descriptor->register_params_ = registers; | 164 descriptor->register_params_ = registers; |
| 165 descriptor->deoptimization_handler_ = NULL; | 165 descriptor->deoptimization_handler_ = NULL; |
| 166 } | 166 } |
| 167 | 167 |
| 168 | 168 |
| 169 void StringLengthStub::InitializeInterfaceDescriptor( |
| 170 Isolate* isolate, |
| 171 CodeStubInterfaceDescriptor* descriptor) { |
| 172 static Register registers[] = { a0, a2 }; |
| 173 descriptor->register_param_count_ = 2; |
| 174 descriptor->register_params_ = registers; |
| 175 descriptor->deoptimization_handler_ = NULL; |
| 176 } |
| 177 |
| 178 |
| 179 void KeyedStringLengthStub::InitializeInterfaceDescriptor( |
| 180 Isolate* isolate, |
| 181 CodeStubInterfaceDescriptor* descriptor) { |
| 182 static Register registers[] = { a1, a0 }; |
| 183 descriptor->register_param_count_ = 2; |
| 184 descriptor->register_params_ = registers; |
| 185 descriptor->deoptimization_handler_ = NULL; |
| 186 } |
| 187 |
| 188 |
| 169 void KeyedStoreFastElementStub::InitializeInterfaceDescriptor( | 189 void KeyedStoreFastElementStub::InitializeInterfaceDescriptor( |
| 170 Isolate* isolate, | 190 Isolate* isolate, |
| 171 CodeStubInterfaceDescriptor* descriptor) { | 191 CodeStubInterfaceDescriptor* descriptor) { |
| 172 static Register registers[] = { a2, a1, a0 }; | 192 static Register registers[] = { a2, a1, a0 }; |
| 173 descriptor->register_param_count_ = 3; | 193 descriptor->register_param_count_ = 3; |
| 174 descriptor->register_params_ = registers; | 194 descriptor->register_params_ = registers; |
| 175 descriptor->deoptimization_handler_ = | 195 descriptor->deoptimization_handler_ = |
| 176 FUNCTION_ADDR(KeyedStoreIC_MissFromStubFailure); | 196 FUNCTION_ADDR(KeyedStoreIC_MissFromStubFailure); |
| 177 } | 197 } |
| 178 | 198 |
| (...skipping 2018 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2197 receiver = a0; | 2217 receiver = a0; |
| 2198 } | 2218 } |
| 2199 | 2219 |
| 2200 StubCompiler::GenerateLoadFunctionPrototype(masm, receiver, a3, t0, &miss); | 2220 StubCompiler::GenerateLoadFunctionPrototype(masm, receiver, a3, t0, &miss); |
| 2201 __ bind(&miss); | 2221 __ bind(&miss); |
| 2202 StubCompiler::TailCallBuiltin( | 2222 StubCompiler::TailCallBuiltin( |
| 2203 masm, BaseLoadStoreStubCompiler::MissBuiltin(kind())); | 2223 masm, BaseLoadStoreStubCompiler::MissBuiltin(kind())); |
| 2204 } | 2224 } |
| 2205 | 2225 |
| 2206 | 2226 |
| 2207 void StringLengthStub::Generate(MacroAssembler* masm) { | |
| 2208 Label miss; | |
| 2209 Register receiver; | |
| 2210 if (kind() == Code::KEYED_LOAD_IC) { | |
| 2211 // ----------- S t a t e ------------- | |
| 2212 // -- ra : return address | |
| 2213 // -- a0 : key | |
| 2214 // -- a1 : receiver | |
| 2215 // ----------------------------------- | |
| 2216 __ Branch(&miss, ne, a0, | |
| 2217 Operand(masm->isolate()->factory()->length_string())); | |
| 2218 receiver = a1; | |
| 2219 } else { | |
| 2220 ASSERT(kind() == Code::LOAD_IC); | |
| 2221 // ----------- S t a t e ------------- | |
| 2222 // -- a2 : name | |
| 2223 // -- ra : return address | |
| 2224 // -- a0 : receiver | |
| 2225 // -- sp[0] : receiver | |
| 2226 // ----------------------------------- | |
| 2227 receiver = a0; | |
| 2228 } | |
| 2229 | |
| 2230 StubCompiler::GenerateLoadStringLength(masm, receiver, a3, t0, &miss); | |
| 2231 | |
| 2232 __ bind(&miss); | |
| 2233 StubCompiler::TailCallBuiltin( | |
| 2234 masm, BaseLoadStoreStubCompiler::MissBuiltin(kind())); | |
| 2235 } | |
| 2236 | |
| 2237 | |
| 2238 void StoreArrayLengthStub::Generate(MacroAssembler* masm) { | 2227 void StoreArrayLengthStub::Generate(MacroAssembler* masm) { |
| 2239 // This accepts as a receiver anything JSArray::SetElementsLength accepts | 2228 // This accepts as a receiver anything JSArray::SetElementsLength accepts |
| 2240 // (currently anything except for external arrays which means anything with | 2229 // (currently anything except for external arrays which means anything with |
| 2241 // elements of FixedArray type). Value must be a number, but only smis are | 2230 // elements of FixedArray type). Value must be a number, but only smis are |
| 2242 // accepted as the most common case. | 2231 // accepted as the most common case. |
| 2243 Label miss; | 2232 Label miss; |
| 2244 | 2233 |
| 2245 Register receiver; | 2234 Register receiver; |
| 2246 Register value; | 2235 Register value; |
| 2247 if (kind() == Code::KEYED_STORE_IC) { | 2236 if (kind() == Code::KEYED_STORE_IC) { |
| (...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2358 __ lw(v0, MemOperand(a3, kDisplacement)); | 2347 __ lw(v0, MemOperand(a3, kDisplacement)); |
| 2359 | 2348 |
| 2360 // Slow-case: Handle non-smi or out-of-bounds access to arguments | 2349 // Slow-case: Handle non-smi or out-of-bounds access to arguments |
| 2361 // by calling the runtime system. | 2350 // by calling the runtime system. |
| 2362 __ bind(&slow); | 2351 __ bind(&slow); |
| 2363 __ push(a1); | 2352 __ push(a1); |
| 2364 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1); | 2353 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1); |
| 2365 } | 2354 } |
| 2366 | 2355 |
| 2367 | 2356 |
| 2368 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) { | 2357 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) { |
| 2369 // sp[0] : number of parameters | 2358 // sp[0] : number of parameters |
| 2370 // sp[4] : receiver displacement | 2359 // sp[4] : receiver displacement |
| 2371 // sp[8] : function | 2360 // sp[8] : function |
| 2372 // Check if the calling frame is an arguments adaptor frame. | 2361 // Check if the calling frame is an arguments adaptor frame. |
| 2373 Label runtime; | 2362 Label runtime; |
| 2374 __ lw(a3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | 2363 __ lw(a3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
| 2375 __ lw(a2, MemOperand(a3, StandardFrameConstants::kContextOffset)); | 2364 __ lw(a2, MemOperand(a3, StandardFrameConstants::kContextOffset)); |
| 2376 __ Branch(&runtime, | 2365 __ Branch(&runtime, |
| 2377 ne, | 2366 ne, |
| 2378 a2, | 2367 a2, |
| 2379 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 2368 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
| 2380 | 2369 |
| 2381 // Patch the arguments.length and the parameters pointer in the current frame. | 2370 // Patch the arguments.length and the parameters pointer in the current frame. |
| 2382 __ lw(a2, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 2371 __ lw(a2, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 2383 __ sw(a2, MemOperand(sp, 0 * kPointerSize)); | 2372 __ sw(a2, MemOperand(sp, 0 * kPointerSize)); |
| 2384 __ sll(t3, a2, 1); | 2373 __ sll(t3, a2, 1); |
| 2385 __ Addu(a3, a3, Operand(t3)); | 2374 __ Addu(a3, a3, Operand(t3)); |
| 2386 __ addiu(a3, a3, StandardFrameConstants::kCallerSPOffset); | 2375 __ addiu(a3, a3, StandardFrameConstants::kCallerSPOffset); |
| 2387 __ sw(a3, MemOperand(sp, 1 * kPointerSize)); | 2376 __ sw(a3, MemOperand(sp, 1 * kPointerSize)); |
| 2388 | 2377 |
| 2389 __ bind(&runtime); | 2378 __ bind(&runtime); |
| 2390 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); | 2379 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); |
| 2391 } | 2380 } |
| 2392 | 2381 |
| 2393 | 2382 |
| 2394 void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) { | 2383 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { |
| 2395 // Stack layout: | 2384 // Stack layout: |
| 2396 // sp[0] : number of parameters (tagged) | 2385 // sp[0] : number of parameters (tagged) |
| 2397 // sp[4] : address of receiver argument | 2386 // sp[4] : address of receiver argument |
| 2398 // sp[8] : function | 2387 // sp[8] : function |
| 2399 // Registers used over whole function: | 2388 // Registers used over whole function: |
| 2400 // t2 : allocated object (tagged) | 2389 // t2 : allocated object (tagged) |
| 2401 // t5 : mapped parameter count (tagged) | 2390 // t5 : mapped parameter count (tagged) |
| 2402 | 2391 |
| 2403 __ lw(a1, MemOperand(sp, 0 * kPointerSize)); | 2392 __ lw(a1, MemOperand(sp, 0 * kPointerSize)); |
| 2404 // a1 = parameter count (tagged) | 2393 // a1 = parameter count (tagged) |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2448 __ sll(t5, a1, 1); | 2437 __ sll(t5, a1, 1); |
| 2449 __ addiu(t5, t5, kParameterMapHeaderSize); | 2438 __ addiu(t5, t5, kParameterMapHeaderSize); |
| 2450 __ bind(¶m_map_size); | 2439 __ bind(¶m_map_size); |
| 2451 | 2440 |
| 2452 // 2. Backing store. | 2441 // 2. Backing store. |
| 2453 __ sll(t6, a2, 1); | 2442 __ sll(t6, a2, 1); |
| 2454 __ Addu(t5, t5, Operand(t6)); | 2443 __ Addu(t5, t5, Operand(t6)); |
| 2455 __ Addu(t5, t5, Operand(FixedArray::kHeaderSize)); | 2444 __ Addu(t5, t5, Operand(FixedArray::kHeaderSize)); |
| 2456 | 2445 |
| 2457 // 3. Arguments object. | 2446 // 3. Arguments object. |
| 2458 __ Addu(t5, t5, Operand(Heap::kArgumentsObjectSize)); | 2447 __ Addu(t5, t5, Operand(Heap::kSloppyArgumentsObjectSize)); |
| 2459 | 2448 |
| 2460 // Do the allocation of all three objects in one go. | 2449 // Do the allocation of all three objects in one go. |
| 2461 __ Allocate(t5, v0, a3, t0, &runtime, TAG_OBJECT); | 2450 __ Allocate(t5, v0, a3, t0, &runtime, TAG_OBJECT); |
| 2462 | 2451 |
| 2463 // v0 = address of new object(s) (tagged) | 2452 // v0 = address of new object(s) (tagged) |
| 2464 // a2 = argument count (tagged) | 2453 // a2 = argument count (tagged) |
| 2465 // Get the arguments boilerplate from the current native context into t0. | 2454 // Get the arguments boilerplate from the current native context into t0. |
| 2466 const int kNormalOffset = | 2455 const int kNormalOffset = |
| 2467 Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX); | 2456 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_BOILERPLATE_INDEX); |
| 2468 const int kAliasedOffset = | 2457 const int kAliasedOffset = |
| 2469 Context::SlotOffset(Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX); | 2458 Context::SlotOffset(Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX); |
| 2470 | 2459 |
| 2471 __ lw(t0, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | 2460 __ lw(t0, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
| 2472 __ lw(t0, FieldMemOperand(t0, GlobalObject::kNativeContextOffset)); | 2461 __ lw(t0, FieldMemOperand(t0, GlobalObject::kNativeContextOffset)); |
| 2473 Label skip2_ne, skip2_eq; | 2462 Label skip2_ne, skip2_eq; |
| 2474 __ Branch(&skip2_ne, ne, a1, Operand(zero_reg)); | 2463 __ Branch(&skip2_ne, ne, a1, Operand(zero_reg)); |
| 2475 __ lw(t0, MemOperand(t0, kNormalOffset)); | 2464 __ lw(t0, MemOperand(t0, kNormalOffset)); |
| 2476 __ bind(&skip2_ne); | 2465 __ bind(&skip2_ne); |
| 2477 | 2466 |
| (...skipping 20 matching lines...) Expand all Loading... |
| 2498 | 2487 |
| 2499 // Use the length (smi tagged) and set that as an in-object property too. | 2488 // Use the length (smi tagged) and set that as an in-object property too. |
| 2500 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); | 2489 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); |
| 2501 const int kLengthOffset = JSObject::kHeaderSize + | 2490 const int kLengthOffset = JSObject::kHeaderSize + |
| 2502 Heap::kArgumentsLengthIndex * kPointerSize; | 2491 Heap::kArgumentsLengthIndex * kPointerSize; |
| 2503 __ sw(a2, FieldMemOperand(v0, kLengthOffset)); | 2492 __ sw(a2, FieldMemOperand(v0, kLengthOffset)); |
| 2504 | 2493 |
| 2505 // Set up the elements pointer in the allocated arguments object. | 2494 // Set up the elements pointer in the allocated arguments object. |
| 2506 // If we allocated a parameter map, t0 will point there, otherwise | 2495 // If we allocated a parameter map, t0 will point there, otherwise |
| 2507 // it will point to the backing store. | 2496 // it will point to the backing store. |
| 2508 __ Addu(t0, v0, Operand(Heap::kArgumentsObjectSize)); | 2497 __ Addu(t0, v0, Operand(Heap::kSloppyArgumentsObjectSize)); |
| 2509 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset)); | 2498 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset)); |
| 2510 | 2499 |
| 2511 // v0 = address of new object (tagged) | 2500 // v0 = address of new object (tagged) |
| 2512 // a1 = mapped parameter count (tagged) | 2501 // a1 = mapped parameter count (tagged) |
| 2513 // a2 = argument count (tagged) | 2502 // a2 = argument count (tagged) |
| 2514 // t0 = address of parameter map or backing store (tagged) | 2503 // t0 = address of parameter map or backing store (tagged) |
| 2515 // Initialize parameter map. If there are no mapped arguments, we're done. | 2504 // Initialize parameter map. If there are no mapped arguments, we're done. |
| 2516 Label skip_parameter_map; | 2505 Label skip_parameter_map; |
| 2517 Label skip3; | 2506 Label skip3; |
| 2518 __ Branch(&skip3, ne, a1, Operand(Smi::FromInt(0))); | 2507 __ Branch(&skip3, ne, a1, Operand(Smi::FromInt(0))); |
| 2519 // Move backing store address to a3, because it is | 2508 // Move backing store address to a3, because it is |
| 2520 // expected there when filling in the unmapped arguments. | 2509 // expected there when filling in the unmapped arguments. |
| 2521 __ mov(a3, t0); | 2510 __ mov(a3, t0); |
| 2522 __ bind(&skip3); | 2511 __ bind(&skip3); |
| 2523 | 2512 |
| 2524 __ Branch(&skip_parameter_map, eq, a1, Operand(Smi::FromInt(0))); | 2513 __ Branch(&skip_parameter_map, eq, a1, Operand(Smi::FromInt(0))); |
| 2525 | 2514 |
| 2526 __ LoadRoot(t2, Heap::kNonStrictArgumentsElementsMapRootIndex); | 2515 __ LoadRoot(t2, Heap::kSloppyArgumentsElementsMapRootIndex); |
| 2527 __ sw(t2, FieldMemOperand(t0, FixedArray::kMapOffset)); | 2516 __ sw(t2, FieldMemOperand(t0, FixedArray::kMapOffset)); |
| 2528 __ Addu(t2, a1, Operand(Smi::FromInt(2))); | 2517 __ Addu(t2, a1, Operand(Smi::FromInt(2))); |
| 2529 __ sw(t2, FieldMemOperand(t0, FixedArray::kLengthOffset)); | 2518 __ sw(t2, FieldMemOperand(t0, FixedArray::kLengthOffset)); |
| 2530 __ sw(cp, FieldMemOperand(t0, FixedArray::kHeaderSize + 0 * kPointerSize)); | 2519 __ sw(cp, FieldMemOperand(t0, FixedArray::kHeaderSize + 0 * kPointerSize)); |
| 2531 __ sll(t6, a1, 1); | 2520 __ sll(t6, a1, 1); |
| 2532 __ Addu(t2, t0, Operand(t6)); | 2521 __ Addu(t2, t0, Operand(t6)); |
| 2533 __ Addu(t2, t2, Operand(kParameterMapHeaderSize)); | 2522 __ Addu(t2, t2, Operand(kParameterMapHeaderSize)); |
| 2534 __ sw(t2, FieldMemOperand(t0, FixedArray::kHeaderSize + 1 * kPointerSize)); | 2523 __ sw(t2, FieldMemOperand(t0, FixedArray::kHeaderSize + 1 * kPointerSize)); |
| 2535 | 2524 |
| 2536 // Copy the parameter slots and the holes in the arguments. | 2525 // Copy the parameter slots and the holes in the arguments. |
| (...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2639 | 2628 |
| 2640 // Try the new space allocation. Start out with computing the size | 2629 // Try the new space allocation. Start out with computing the size |
| 2641 // of the arguments object and the elements array in words. | 2630 // of the arguments object and the elements array in words. |
| 2642 Label add_arguments_object; | 2631 Label add_arguments_object; |
| 2643 __ bind(&try_allocate); | 2632 __ bind(&try_allocate); |
| 2644 __ Branch(&add_arguments_object, eq, a1, Operand(zero_reg)); | 2633 __ Branch(&add_arguments_object, eq, a1, Operand(zero_reg)); |
| 2645 __ srl(a1, a1, kSmiTagSize); | 2634 __ srl(a1, a1, kSmiTagSize); |
| 2646 | 2635 |
| 2647 __ Addu(a1, a1, Operand(FixedArray::kHeaderSize / kPointerSize)); | 2636 __ Addu(a1, a1, Operand(FixedArray::kHeaderSize / kPointerSize)); |
| 2648 __ bind(&add_arguments_object); | 2637 __ bind(&add_arguments_object); |
| 2649 __ Addu(a1, a1, Operand(Heap::kArgumentsObjectSizeStrict / kPointerSize)); | 2638 __ Addu(a1, a1, Operand(Heap::kStrictArgumentsObjectSize / kPointerSize)); |
| 2650 | 2639 |
| 2651 // Do the allocation of both objects in one go. | 2640 // Do the allocation of both objects in one go. |
| 2652 __ Allocate(a1, v0, a2, a3, &runtime, | 2641 __ Allocate(a1, v0, a2, a3, &runtime, |
| 2653 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS)); | 2642 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS)); |
| 2654 | 2643 |
| 2655 // Get the arguments boilerplate from the current native context. | 2644 // Get the arguments boilerplate from the current native context. |
| 2656 __ lw(t0, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | 2645 __ lw(t0, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
| 2657 __ lw(t0, FieldMemOperand(t0, GlobalObject::kNativeContextOffset)); | 2646 __ lw(t0, FieldMemOperand(t0, GlobalObject::kNativeContextOffset)); |
| 2658 __ lw(t0, MemOperand(t0, Context::SlotOffset( | 2647 __ lw(t0, MemOperand(t0, Context::SlotOffset( |
| 2659 Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX))); | 2648 Context::STRICT_ARGUMENTS_BOILERPLATE_INDEX))); |
| 2660 | 2649 |
| 2661 // Copy the JS object part. | 2650 // Copy the JS object part. |
| 2662 __ CopyFields(v0, t0, a3.bit(), JSObject::kHeaderSize / kPointerSize); | 2651 __ CopyFields(v0, t0, a3.bit(), JSObject::kHeaderSize / kPointerSize); |
| 2663 | 2652 |
| 2664 // Get the length (smi tagged) and set that as an in-object property too. | 2653 // Get the length (smi tagged) and set that as an in-object property too. |
| 2665 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); | 2654 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); |
| 2666 __ lw(a1, MemOperand(sp, 0 * kPointerSize)); | 2655 __ lw(a1, MemOperand(sp, 0 * kPointerSize)); |
| 2667 __ sw(a1, FieldMemOperand(v0, JSObject::kHeaderSize + | 2656 __ sw(a1, FieldMemOperand(v0, JSObject::kHeaderSize + |
| 2668 Heap::kArgumentsLengthIndex * kPointerSize)); | 2657 Heap::kArgumentsLengthIndex * kPointerSize)); |
| 2669 | 2658 |
| 2670 Label done; | 2659 Label done; |
| 2671 __ Branch(&done, eq, a1, Operand(zero_reg)); | 2660 __ Branch(&done, eq, a1, Operand(zero_reg)); |
| 2672 | 2661 |
| 2673 // Get the parameters pointer from the stack. | 2662 // Get the parameters pointer from the stack. |
| 2674 __ lw(a2, MemOperand(sp, 1 * kPointerSize)); | 2663 __ lw(a2, MemOperand(sp, 1 * kPointerSize)); |
| 2675 | 2664 |
| 2676 // Set up the elements pointer in the allocated arguments object and | 2665 // Set up the elements pointer in the allocated arguments object and |
| 2677 // initialize the header in the elements fixed array. | 2666 // initialize the header in the elements fixed array. |
| 2678 __ Addu(t0, v0, Operand(Heap::kArgumentsObjectSizeStrict)); | 2667 __ Addu(t0, v0, Operand(Heap::kStrictArgumentsObjectSize)); |
| 2679 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset)); | 2668 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset)); |
| 2680 __ LoadRoot(a3, Heap::kFixedArrayMapRootIndex); | 2669 __ LoadRoot(a3, Heap::kFixedArrayMapRootIndex); |
| 2681 __ sw(a3, FieldMemOperand(t0, FixedArray::kMapOffset)); | 2670 __ sw(a3, FieldMemOperand(t0, FixedArray::kMapOffset)); |
| 2682 __ sw(a1, FieldMemOperand(t0, FixedArray::kLengthOffset)); | 2671 __ sw(a1, FieldMemOperand(t0, FixedArray::kLengthOffset)); |
| 2683 // Untag the length for the loop. | 2672 // Untag the length for the loop. |
| 2684 __ srl(a1, a1, kSmiTagSize); | 2673 __ srl(a1, a1, kSmiTagSize); |
| 2685 | 2674 |
| 2686 // Copy the fixed array slots. | 2675 // Copy the fixed array slots. |
| 2687 Label loop; | 2676 Label loop; |
| 2688 // Set up t0 to point to the first array slot. | 2677 // Set up t0 to point to the first array slot. |
| (...skipping 466 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3155 // Cache the called function in a feedback vector slot. Cache states | 3144 // Cache the called function in a feedback vector slot. Cache states |
| 3156 // are uninitialized, monomorphic (indicated by a JSFunction), and | 3145 // are uninitialized, monomorphic (indicated by a JSFunction), and |
| 3157 // megamorphic. | 3146 // megamorphic. |
| 3158 // a0 : number of arguments to the construct function | 3147 // a0 : number of arguments to the construct function |
| 3159 // a1 : the function to call | 3148 // a1 : the function to call |
| 3160 // a2 : Feedback vector | 3149 // a2 : Feedback vector |
| 3161 // a3 : slot in feedback vector (Smi) | 3150 // a3 : slot in feedback vector (Smi) |
| 3162 Label initialize, done, miss, megamorphic, not_array_function; | 3151 Label initialize, done, miss, megamorphic, not_array_function; |
| 3163 | 3152 |
| 3164 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()), | 3153 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()), |
| 3165 masm->isolate()->heap()->undefined_value()); | 3154 masm->isolate()->heap()->megamorphic_symbol()); |
| 3166 ASSERT_EQ(*TypeFeedbackInfo::UninitializedSentinel(masm->isolate()), | 3155 ASSERT_EQ(*TypeFeedbackInfo::UninitializedSentinel(masm->isolate()), |
| 3167 masm->isolate()->heap()->the_hole_value()); | 3156 masm->isolate()->heap()->uninitialized_symbol()); |
| 3168 | 3157 |
| 3169 // Load the cache state into t0. | 3158 // Load the cache state into t0. |
| 3170 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); | 3159 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); |
| 3171 __ Addu(t0, a2, Operand(t0)); | 3160 __ Addu(t0, a2, Operand(t0)); |
| 3172 __ lw(t0, FieldMemOperand(t0, FixedArray::kHeaderSize)); | 3161 __ lw(t0, FieldMemOperand(t0, FixedArray::kHeaderSize)); |
| 3173 | 3162 |
| 3174 // A monomorphic cache hit or an already megamorphic state: invoke the | 3163 // A monomorphic cache hit or an already megamorphic state: invoke the |
| 3175 // function without changing the state. | 3164 // function without changing the state. |
| 3176 __ Branch(&done, eq, t0, Operand(a1)); | 3165 __ Branch(&done, eq, t0, Operand(a1)); |
| 3177 | 3166 |
| 3178 // If we came here, we need to see if we are the array function. | 3167 // If we came here, we need to see if we are the array function. |
| 3179 // If we didn't have a matching function, and we didn't find the megamorph | 3168 // If we didn't have a matching function, and we didn't find the megamorph |
| 3180 // sentinel, then we have in the slot either some other function or an | 3169 // sentinel, then we have in the slot either some other function or an |
| 3181 // AllocationSite. Do a map check on the object in a3. | 3170 // AllocationSite. Do a map check on the object in a3. |
| 3182 __ lw(t1, FieldMemOperand(t0, 0)); | 3171 __ lw(t1, FieldMemOperand(t0, 0)); |
| 3183 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); | 3172 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); |
| 3184 __ Branch(&miss, ne, t1, Operand(at)); | 3173 __ Branch(&miss, ne, t1, Operand(at)); |
| 3185 | 3174 |
| 3186 // Make sure the function is the Array() function | 3175 // Make sure the function is the Array() function |
| 3187 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t0); | 3176 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t0); |
| 3188 __ Branch(&megamorphic, ne, a1, Operand(t0)); | 3177 __ Branch(&megamorphic, ne, a1, Operand(t0)); |
| 3189 __ jmp(&done); | 3178 __ jmp(&done); |
| 3190 | 3179 |
| 3191 __ bind(&miss); | 3180 __ bind(&miss); |
| 3192 | 3181 |
| 3193 // A monomorphic miss (i.e, here the cache is not uninitialized) goes | 3182 // A monomorphic miss (i.e, here the cache is not uninitialized) goes |
| 3194 // megamorphic. | 3183 // megamorphic. |
| 3195 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 3184 __ LoadRoot(at, Heap::kUninitializedSymbolRootIndex); |
| 3196 __ Branch(&initialize, eq, t0, Operand(at)); | 3185 __ Branch(&initialize, eq, t0, Operand(at)); |
| 3197 // MegamorphicSentinel is an immortal immovable object (undefined) so no | 3186 // MegamorphicSentinel is an immortal immovable object (undefined) so no |
| 3198 // write-barrier is needed. | 3187 // write-barrier is needed. |
| 3199 __ bind(&megamorphic); | 3188 __ bind(&megamorphic); |
| 3200 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); | 3189 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); |
| 3201 __ Addu(t0, a2, Operand(t0)); | 3190 __ Addu(t0, a2, Operand(t0)); |
| 3202 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 3191 __ LoadRoot(at, Heap::kMegamorphicSymbolRootIndex); |
| 3203 __ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize)); | 3192 __ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize)); |
| 3204 __ jmp(&done); | 3193 __ jmp(&done); |
| 3205 | 3194 |
| 3206 // An uninitialized cache is patched with the function or sentinel to | 3195 // An uninitialized cache is patched with the function or sentinel to |
| 3207 // indicate the ElementsKind if function is the Array constructor. | 3196 // indicate the ElementsKind if function is the Array constructor. |
| 3208 __ bind(&initialize); | 3197 __ bind(&initialize); |
| 3209 // Make sure the function is the Array() function | 3198 // Make sure the function is the Array() function |
| 3210 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t0); | 3199 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t0); |
| 3211 __ Branch(¬_array_function, ne, a1, Operand(t0)); | 3200 __ Branch(¬_array_function, ne, a1, Operand(t0)); |
| 3212 | 3201 |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3244 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | 3233 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
| 3245 __ Pop(t0, a2, a1); | 3234 __ Pop(t0, a2, a1); |
| 3246 | 3235 |
| 3247 __ bind(&done); | 3236 __ bind(&done); |
| 3248 } | 3237 } |
| 3249 | 3238 |
| 3250 | 3239 |
| 3251 void CallFunctionStub::Generate(MacroAssembler* masm) { | 3240 void CallFunctionStub::Generate(MacroAssembler* masm) { |
| 3252 // a1 : the function to call | 3241 // a1 : the function to call |
| 3253 // a2 : feedback vector | 3242 // a2 : feedback vector |
| 3254 // a3 : (only if a2 is not undefined) slot in feedback vector (Smi) | 3243 // a3 : (only if a2 is not the megamorphic symbol) slot in feedback |
| 3244 // vector (Smi) |
| 3255 Label slow, non_function, wrap, cont; | 3245 Label slow, non_function, wrap, cont; |
| 3256 | 3246 |
| 3257 if (NeedsChecks()) { | 3247 if (NeedsChecks()) { |
| 3258 // Check that the function is really a JavaScript function. | 3248 // Check that the function is really a JavaScript function. |
| 3259 // a1: pushed function (to be verified) | 3249 // a1: pushed function (to be verified) |
| 3260 __ JumpIfSmi(a1, &non_function); | 3250 __ JumpIfSmi(a1, &non_function); |
| 3261 | 3251 |
| 3262 // Goto slow case if we do not have a function. | 3252 // Goto slow case if we do not have a function. |
| 3263 __ GetObjectType(a1, t0, t0); | 3253 __ GetObjectType(a1, t0, t0); |
| 3264 __ Branch(&slow, ne, t0, Operand(JS_FUNCTION_TYPE)); | 3254 __ Branch(&slow, ne, t0, Operand(JS_FUNCTION_TYPE)); |
| (...skipping 12 matching lines...) Expand all Loading... |
| 3277 // Do not transform the receiver for strict mode functions and natives. | 3267 // Do not transform the receiver for strict mode functions and natives. |
| 3278 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | 3268 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
| 3279 __ lw(t0, FieldMemOperand(a3, SharedFunctionInfo::kCompilerHintsOffset)); | 3269 __ lw(t0, FieldMemOperand(a3, SharedFunctionInfo::kCompilerHintsOffset)); |
| 3280 int32_t strict_mode_function_mask = | 3270 int32_t strict_mode_function_mask = |
| 3281 1 << (SharedFunctionInfo::kStrictModeFunction + kSmiTagSize); | 3271 1 << (SharedFunctionInfo::kStrictModeFunction + kSmiTagSize); |
| 3282 int32_t native_mask = 1 << (SharedFunctionInfo::kNative + kSmiTagSize); | 3272 int32_t native_mask = 1 << (SharedFunctionInfo::kNative + kSmiTagSize); |
| 3283 __ And(at, t0, Operand(strict_mode_function_mask | native_mask)); | 3273 __ And(at, t0, Operand(strict_mode_function_mask | native_mask)); |
| 3284 __ Branch(&cont, ne, at, Operand(zero_reg)); | 3274 __ Branch(&cont, ne, at, Operand(zero_reg)); |
| 3285 } | 3275 } |
| 3286 | 3276 |
| 3287 // Compute the receiver in non-strict mode. | 3277 // Compute the receiver in sloppy mode. |
| 3288 __ lw(a3, MemOperand(sp, argc_ * kPointerSize)); | 3278 __ lw(a3, MemOperand(sp, argc_ * kPointerSize)); |
| 3289 | 3279 |
| 3290 if (NeedsChecks()) { | 3280 if (NeedsChecks()) { |
| 3291 __ JumpIfSmi(a3, &wrap); | 3281 __ JumpIfSmi(a3, &wrap); |
| 3292 __ GetObjectType(a3, t0, t0); | 3282 __ GetObjectType(a3, t0, t0); |
| 3293 __ Branch(&wrap, lt, t0, Operand(FIRST_SPEC_OBJECT_TYPE)); | 3283 __ Branch(&wrap, lt, t0, Operand(FIRST_SPEC_OBJECT_TYPE)); |
| 3294 } else { | 3284 } else { |
| 3295 __ jmp(&wrap); | 3285 __ jmp(&wrap); |
| 3296 } | 3286 } |
| 3297 | 3287 |
| 3298 __ bind(&cont); | 3288 __ bind(&cont); |
| 3299 } | 3289 } |
| 3300 __ InvokeFunction(a1, actual, JUMP_FUNCTION, NullCallWrapper()); | 3290 __ InvokeFunction(a1, actual, JUMP_FUNCTION, NullCallWrapper()); |
| 3301 | 3291 |
| 3302 if (NeedsChecks()) { | 3292 if (NeedsChecks()) { |
| 3303 // Slow-case: Non-function called. | 3293 // Slow-case: Non-function called. |
| 3304 __ bind(&slow); | 3294 __ bind(&slow); |
| 3305 if (RecordCallTarget()) { | 3295 if (RecordCallTarget()) { |
| 3306 // If there is a call target cache, mark it megamorphic in the | 3296 // If there is a call target cache, mark it megamorphic in the |
| 3307 // non-function case. MegamorphicSentinel is an immortal immovable | 3297 // non-function case. MegamorphicSentinel is an immortal immovable |
| 3308 // object (undefined) so no write barrier is needed. | 3298 // object (megamorphic symbol) so no write barrier is needed. |
| 3309 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()), | 3299 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()), |
| 3310 masm->isolate()->heap()->undefined_value()); | 3300 masm->isolate()->heap()->megamorphic_symbol()); |
| 3311 __ sll(t1, a3, kPointerSizeLog2 - kSmiTagSize); | 3301 __ sll(t1, a3, kPointerSizeLog2 - kSmiTagSize); |
| 3312 __ Addu(t1, a2, Operand(t1)); | 3302 __ Addu(t1, a2, Operand(t1)); |
| 3313 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 3303 __ LoadRoot(at, Heap::kMegamorphicSymbolRootIndex); |
| 3314 __ sw(at, FieldMemOperand(t1, FixedArray::kHeaderSize)); | 3304 __ sw(at, FieldMemOperand(t1, FixedArray::kHeaderSize)); |
| 3315 } | 3305 } |
| 3316 // Check for function proxy. | 3306 // Check for function proxy. |
| 3317 __ Branch(&non_function, ne, t0, Operand(JS_FUNCTION_PROXY_TYPE)); | 3307 __ Branch(&non_function, ne, t0, Operand(JS_FUNCTION_PROXY_TYPE)); |
| 3318 __ push(a1); // Put proxy as additional argument. | 3308 __ push(a1); // Put proxy as additional argument. |
| 3319 __ li(a0, Operand(argc_ + 1, RelocInfo::NONE32)); | 3309 __ li(a0, Operand(argc_ + 1, RelocInfo::NONE32)); |
| 3320 __ li(a2, Operand(0, RelocInfo::NONE32)); | 3310 __ li(a2, Operand(0, RelocInfo::NONE32)); |
| 3321 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY); | 3311 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY); |
| 3322 { | 3312 { |
| 3323 Handle<Code> adaptor = | 3313 Handle<Code> adaptor = |
| (...skipping 1695 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5019 __ CheckPageFlag(regs_.object(), | 5009 __ CheckPageFlag(regs_.object(), |
| 5020 regs_.scratch0(), | 5010 regs_.scratch0(), |
| 5021 1 << MemoryChunk::SCAN_ON_SCAVENGE, | 5011 1 << MemoryChunk::SCAN_ON_SCAVENGE, |
| 5022 ne, | 5012 ne, |
| 5023 &dont_need_remembered_set); | 5013 &dont_need_remembered_set); |
| 5024 | 5014 |
| 5025 // First notify the incremental marker if necessary, then update the | 5015 // First notify the incremental marker if necessary, then update the |
| 5026 // remembered set. | 5016 // remembered set. |
| 5027 CheckNeedsToInformIncrementalMarker( | 5017 CheckNeedsToInformIncrementalMarker( |
| 5028 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode); | 5018 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode); |
| 5029 InformIncrementalMarker(masm, mode); | 5019 InformIncrementalMarker(masm); |
| 5030 regs_.Restore(masm); | 5020 regs_.Restore(masm); |
| 5031 __ RememberedSetHelper(object_, | 5021 __ RememberedSetHelper(object_, |
| 5032 address_, | 5022 address_, |
| 5033 value_, | 5023 value_, |
| 5034 save_fp_regs_mode_, | 5024 save_fp_regs_mode_, |
| 5035 MacroAssembler::kReturnAtEnd); | 5025 MacroAssembler::kReturnAtEnd); |
| 5036 | 5026 |
| 5037 __ bind(&dont_need_remembered_set); | 5027 __ bind(&dont_need_remembered_set); |
| 5038 } | 5028 } |
| 5039 | 5029 |
| 5040 CheckNeedsToInformIncrementalMarker( | 5030 CheckNeedsToInformIncrementalMarker( |
| 5041 masm, kReturnOnNoNeedToInformIncrementalMarker, mode); | 5031 masm, kReturnOnNoNeedToInformIncrementalMarker, mode); |
| 5042 InformIncrementalMarker(masm, mode); | 5032 InformIncrementalMarker(masm); |
| 5043 regs_.Restore(masm); | 5033 regs_.Restore(masm); |
| 5044 __ Ret(); | 5034 __ Ret(); |
| 5045 } | 5035 } |
| 5046 | 5036 |
| 5047 | 5037 |
| 5048 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode mode) { | 5038 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) { |
| 5049 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_); | 5039 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_); |
| 5050 int argument_count = 3; | 5040 int argument_count = 3; |
| 5051 __ PrepareCallCFunction(argument_count, regs_.scratch0()); | 5041 __ PrepareCallCFunction(argument_count, regs_.scratch0()); |
| 5052 Register address = | 5042 Register address = |
| 5053 a0.is(regs_.address()) ? regs_.scratch0() : regs_.address(); | 5043 a0.is(regs_.address()) ? regs_.scratch0() : regs_.address(); |
| 5054 ASSERT(!address.is(regs_.object())); | 5044 ASSERT(!address.is(regs_.object())); |
| 5055 ASSERT(!address.is(a0)); | 5045 ASSERT(!address.is(a0)); |
| 5056 __ Move(address, regs_.address()); | 5046 __ Move(address, regs_.address()); |
| 5057 __ Move(a0, regs_.object()); | 5047 __ Move(a0, regs_.object()); |
| 5058 __ Move(a1, address); | 5048 __ Move(a1, address); |
| 5059 __ li(a2, Operand(ExternalReference::isolate_address(masm->isolate()))); | 5049 __ li(a2, Operand(ExternalReference::isolate_address(masm->isolate()))); |
| 5060 | 5050 |
| 5061 AllowExternalCallThatCantCauseGC scope(masm); | 5051 AllowExternalCallThatCantCauseGC scope(masm); |
| 5062 if (mode == INCREMENTAL_COMPACTION) { | 5052 __ CallCFunction( |
| 5063 __ CallCFunction( | 5053 ExternalReference::incremental_marking_record_write_function( |
| 5064 ExternalReference::incremental_evacuation_record_write_function( | 5054 masm->isolate()), |
| 5065 masm->isolate()), | 5055 argument_count); |
| 5066 argument_count); | |
| 5067 } else { | |
| 5068 ASSERT(mode == INCREMENTAL); | |
| 5069 __ CallCFunction( | |
| 5070 ExternalReference::incremental_marking_record_write_function( | |
| 5071 masm->isolate()), | |
| 5072 argument_count); | |
| 5073 } | |
| 5074 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); | 5056 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); |
| 5075 } | 5057 } |
| 5076 | 5058 |
| 5077 | 5059 |
| 5078 void RecordWriteStub::CheckNeedsToInformIncrementalMarker( | 5060 void RecordWriteStub::CheckNeedsToInformIncrementalMarker( |
| 5079 MacroAssembler* masm, | 5061 MacroAssembler* masm, |
| 5080 OnNoNeedToInformIncrementalMarker on_no_need, | 5062 OnNoNeedToInformIncrementalMarker on_no_need, |
| 5081 Mode mode) { | 5063 Mode mode) { |
| 5082 Label on_black; | 5064 Label on_black; |
| 5083 Label need_incremental; | 5065 Label need_incremental; |
| (...skipping 395 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5479 } else { | 5461 } else { |
| 5480 UNREACHABLE(); | 5462 UNREACHABLE(); |
| 5481 } | 5463 } |
| 5482 } | 5464 } |
| 5483 | 5465 |
| 5484 | 5466 |
| 5485 void ArrayConstructorStub::Generate(MacroAssembler* masm) { | 5467 void ArrayConstructorStub::Generate(MacroAssembler* masm) { |
| 5486 // ----------- S t a t e ------------- | 5468 // ----------- S t a t e ------------- |
| 5487 // -- a0 : argc (only if argument_count_ == ANY) | 5469 // -- a0 : argc (only if argument_count_ == ANY) |
| 5488 // -- a1 : constructor | 5470 // -- a1 : constructor |
| 5489 // -- a2 : feedback vector (fixed array or undefined) | 5471 // -- a2 : feedback vector (fixed array or megamorphic symbol) |
| 5490 // -- a3 : slot index (if a2 is fixed array) | 5472 // -- a3 : slot index (if a2 is fixed array) |
| 5491 // -- sp[0] : return address | 5473 // -- sp[0] : return address |
| 5492 // -- sp[4] : last argument | 5474 // -- sp[4] : last argument |
| 5493 // ----------------------------------- | 5475 // ----------------------------------- |
| 5476 |
| 5477 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()), |
| 5478 masm->isolate()->heap()->megamorphic_symbol()); |
| 5479 |
| 5494 if (FLAG_debug_code) { | 5480 if (FLAG_debug_code) { |
| 5495 // The array construct code is only set for the global and natives | 5481 // The array construct code is only set for the global and natives |
| 5496 // builtin Array functions which always have maps. | 5482 // builtin Array functions which always have maps. |
| 5497 | 5483 |
| 5498 // Initial map for the builtin Array function should be a map. | 5484 // Initial map for the builtin Array function should be a map. |
| 5499 __ lw(t0, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); | 5485 __ lw(t0, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); |
| 5500 // Will both indicate a NULL and a Smi. | 5486 // Will both indicate a NULL and a Smi. |
| 5501 __ SmiTst(t0, at); | 5487 __ SmiTst(t0, at); |
| 5502 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, | 5488 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, |
| 5503 at, Operand(zero_reg)); | 5489 at, Operand(zero_reg)); |
| 5504 __ GetObjectType(t0, t0, t1); | 5490 __ GetObjectType(t0, t0, t1); |
| 5505 __ Assert(eq, kUnexpectedInitialMapForArrayFunction, | 5491 __ Assert(eq, kUnexpectedInitialMapForArrayFunction, |
| 5506 t1, Operand(MAP_TYPE)); | 5492 t1, Operand(MAP_TYPE)); |
| 5507 | 5493 |
| 5508 // We should either have undefined in a2 or a valid fixed array. | 5494 // We should either have the megamorphic symbol in a2 or a valid |
| 5495 // fixed array. |
| 5509 Label okay_here; | 5496 Label okay_here; |
| 5510 Handle<Map> fixed_array_map = masm->isolate()->factory()->fixed_array_map(); | 5497 Handle<Map> fixed_array_map = masm->isolate()->factory()->fixed_array_map(); |
| 5511 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 5498 __ LoadRoot(at, Heap::kMegamorphicSymbolRootIndex); |
| 5512 __ Branch(&okay_here, eq, a2, Operand(at)); | 5499 __ Branch(&okay_here, eq, a2, Operand(at)); |
| 5513 __ lw(t0, FieldMemOperand(a2, 0)); | 5500 __ lw(t0, FieldMemOperand(a2, 0)); |
| 5514 __ Assert(eq, kExpectedFixedArrayInRegisterA2, | 5501 __ Assert(eq, kExpectedFixedArrayInRegisterA2, |
| 5515 t0, Operand(fixed_array_map)); | 5502 t0, Operand(fixed_array_map)); |
| 5516 | 5503 |
| 5517 // a3 should be a smi if we don't have undefined in a2 | 5504 // a3 should be a smi if we don't have undefined in a2 |
| 5518 __ AssertSmi(a3); | 5505 __ AssertSmi(a3); |
| 5519 | 5506 |
| 5520 __ bind(&okay_here); | 5507 __ bind(&okay_here); |
| 5521 } | 5508 } |
| 5522 | 5509 |
| 5523 Label no_info; | 5510 Label no_info; |
| 5524 // Get the elements kind and case on that. | 5511 // Get the elements kind and case on that. |
| 5525 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 5512 __ LoadRoot(at, Heap::kMegamorphicSymbolRootIndex); |
| 5526 __ Branch(&no_info, eq, a2, Operand(at)); | 5513 __ Branch(&no_info, eq, a2, Operand(at)); |
| 5527 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); | 5514 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); |
| 5528 __ Addu(a2, a2, Operand(t0)); | 5515 __ Addu(a2, a2, Operand(t0)); |
| 5529 __ lw(a2, FieldMemOperand(a2, FixedArray::kHeaderSize)); | 5516 __ lw(a2, FieldMemOperand(a2, FixedArray::kHeaderSize)); |
| 5530 | 5517 |
| 5531 // If the feedback vector is undefined, or contains anything other than an | 5518 // If the feedback vector is undefined, or contains anything other than an |
| 5532 // AllocationSite, call an array constructor that doesn't use AllocationSites. | 5519 // AllocationSite, call an array constructor that doesn't use AllocationSites. |
| 5533 __ lw(t0, FieldMemOperand(a2, 0)); | 5520 __ lw(t0, FieldMemOperand(a2, 0)); |
| 5534 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); | 5521 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); |
| 5535 __ Branch(&no_info, ne, t0, Operand(at)); | 5522 __ Branch(&no_info, ne, t0, Operand(at)); |
| (...skipping 224 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5760 MemOperand(fp, 6 * kPointerSize), | 5747 MemOperand(fp, 6 * kPointerSize), |
| 5761 NULL); | 5748 NULL); |
| 5762 } | 5749 } |
| 5763 | 5750 |
| 5764 | 5751 |
| 5765 #undef __ | 5752 #undef __ |
| 5766 | 5753 |
| 5767 } } // namespace v8::internal | 5754 } } // namespace v8::internal |
| 5768 | 5755 |
| 5769 #endif // V8_TARGET_ARCH_MIPS | 5756 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |