OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2291 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2302 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { | 2302 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { |
2303 ASSERT(ToRegister(instr->object()).is(rdx)); | 2303 ASSERT(ToRegister(instr->object()).is(rdx)); |
2304 ASSERT(ToRegister(instr->key()).is(rcx)); | 2304 ASSERT(ToRegister(instr->key()).is(rcx)); |
2305 ASSERT(ToRegister(instr->value()).is(rax)); | 2305 ASSERT(ToRegister(instr->value()).is(rax)); |
2306 | 2306 |
2307 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); | 2307 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); |
2308 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2308 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
2309 } | 2309 } |
2310 | 2310 |
2311 | 2311 |
| 2312 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { |
| 2313 class DeferredStringCharCodeAt: public LDeferredCode { |
| 2314 public: |
| 2315 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) |
| 2316 : LDeferredCode(codegen), instr_(instr) { } |
| 2317 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); } |
| 2318 private: |
| 2319 LStringCharCodeAt* instr_; |
| 2320 }; |
| 2321 |
| 2322 Register string = ToRegister(instr->string()); |
| 2323 Register index = no_reg; |
| 2324 int const_index = -1; |
| 2325 if (instr->index()->IsConstantOperand()) { |
| 2326 const_index = ToInteger32(LConstantOperand::cast(instr->index())); |
| 2327 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue); |
| 2328 if (!Smi::IsValid(const_index)) { |
| 2329 // Guaranteed to be out of bounds because of the assert above. |
| 2330 // So the bounds check that must dominate this instruction must |
| 2331 // have deoptimized already. |
| 2332 if (FLAG_debug_code) { |
| 2333 __ Abort("StringCharCodeAt: out of bounds index."); |
| 2334 } |
| 2335 // No code needs to be generated. |
| 2336 return; |
| 2337 } |
| 2338 } else { |
| 2339 index = ToRegister(instr->index()); |
| 2340 } |
| 2341 Register result = ToRegister(instr->result()); |
| 2342 |
| 2343 DeferredStringCharCodeAt* deferred = |
| 2344 new DeferredStringCharCodeAt(this, instr); |
| 2345 |
| 2346 NearLabel flat_string, ascii_string, done; |
| 2347 |
| 2348 // Fetch the instance type of the receiver into result register. |
| 2349 __ movq(result, FieldOperand(string, HeapObject::kMapOffset)); |
| 2350 __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset)); |
| 2351 |
| 2352 // We need special handling for non-sequential strings. |
| 2353 STATIC_ASSERT(kSeqStringTag == 0); |
| 2354 __ testb(result, Immediate(kStringRepresentationMask)); |
| 2355 __ j(zero, &flat_string); |
| 2356 |
| 2357 // Handle cons strings and go to deferred code for the rest. |
| 2358 __ testb(result, Immediate(kIsConsStringMask)); |
| 2359 __ j(zero, deferred->entry()); |
| 2360 |
| 2361 // ConsString. |
| 2362 // Check whether the right hand side is the empty string (i.e. if |
| 2363 // this is really a flat string in a cons string). If that is not |
| 2364 // the case we would rather go to the runtime system now to flatten |
| 2365 // the string. |
| 2366 __ CompareRoot(FieldOperand(string, ConsString::kSecondOffset), |
| 2367 Heap::kEmptyStringRootIndex); |
| 2368 __ j(not_equal, deferred->entry()); |
| 2369 // Get the first of the two strings and load its instance type. |
| 2370 __ movq(string, FieldOperand(string, ConsString::kFirstOffset)); |
| 2371 __ movq(result, FieldOperand(string, HeapObject::kMapOffset)); |
| 2372 __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset)); |
| 2373 // If the first cons component is also non-flat, then go to runtime. |
| 2374 STATIC_ASSERT(kSeqStringTag == 0); |
| 2375 __ testb(result, Immediate(kStringRepresentationMask)); |
| 2376 __ j(not_zero, deferred->entry()); |
| 2377 |
| 2378 // Check for ASCII or two-byte string. |
| 2379 __ bind(&flat_string); |
| 2380 STATIC_ASSERT(kAsciiStringTag != 0); |
| 2381 __ testb(result, Immediate(kStringEncodingMask)); |
| 2382 __ j(not_zero, &ascii_string); |
| 2383 |
| 2384 // Two-byte string. |
| 2385 // Load the two-byte character code into the result register. |
| 2386 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); |
| 2387 if (instr->index()->IsConstantOperand()) { |
| 2388 __ movzxwl(result, |
| 2389 FieldOperand(string, |
| 2390 SeqTwoByteString::kHeaderSize + |
| 2391 (kUC16Size * const_index))); |
| 2392 } else { |
| 2393 __ movzxwl(result, FieldOperand(string, |
| 2394 index, |
| 2395 times_2, |
| 2396 SeqTwoByteString::kHeaderSize)); |
| 2397 } |
| 2398 __ jmp(&done); |
| 2399 |
| 2400 // ASCII string. |
| 2401 // Load the byte into the result register. |
| 2402 __ bind(&ascii_string); |
| 2403 if (instr->index()->IsConstantOperand()) { |
| 2404 __ movzxbl(result, FieldOperand(string, |
| 2405 SeqAsciiString::kHeaderSize + const_index)); |
| 2406 } else { |
| 2407 __ movzxbl(result, FieldOperand(string, |
| 2408 index, |
| 2409 times_1, |
| 2410 SeqAsciiString::kHeaderSize)); |
| 2411 } |
| 2412 __ bind(&done); |
| 2413 __ bind(deferred->exit()); |
| 2414 } |
| 2415 |
| 2416 |
| 2417 void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) { |
| 2418 Register string = ToRegister(instr->string()); |
| 2419 Register result = ToRegister(instr->result()); |
| 2420 |
| 2421 // TODO(3095996): Get rid of this. For now, we need to make the |
| 2422 // result register contain a valid pointer because it is already |
| 2423 // contained in the register pointer map. |
| 2424 __ Set(result, 0); |
| 2425 |
| 2426 __ PushSafepointRegisters(); |
| 2427 __ push(string); |
| 2428 // Push the index as a smi. This is safe because of the checks in |
| 2429 // DoStringCharCodeAt above. |
| 2430 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue); |
| 2431 if (instr->index()->IsConstantOperand()) { |
| 2432 int const_index = ToInteger32(LConstantOperand::cast(instr->index())); |
| 2433 __ Push(Smi::FromInt(const_index)); |
| 2434 } else { |
| 2435 Register index = ToRegister(instr->index()); |
| 2436 __ Integer32ToSmi(index, index); |
| 2437 __ push(index); |
| 2438 } |
| 2439 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 2440 __ CallRuntimeSaveDoubles(Runtime::kStringCharCodeAt); |
| 2441 RecordSafepointWithRegisters( |
| 2442 instr->pointer_map(), 2, Safepoint::kNoDeoptimizationIndex); |
| 2443 if (FLAG_debug_code) { |
| 2444 __ AbortIfNotSmi(rax); |
| 2445 } |
| 2446 __ SmiToInteger32(rax, rax); |
| 2447 __ movq(Operand(rsp, Register::ToRspIndexForPushAll(result) * kPointerSize), |
| 2448 rax); |
| 2449 __ PopSafepointRegisters(); |
| 2450 } |
| 2451 |
| 2452 |
2312 void LCodeGen::DoStringLength(LStringLength* instr) { | 2453 void LCodeGen::DoStringLength(LStringLength* instr) { |
2313 Register string = ToRegister(instr->string()); | 2454 Register string = ToRegister(instr->string()); |
2314 Register result = ToRegister(instr->result()); | 2455 Register result = ToRegister(instr->result()); |
2315 __ movq(result, FieldOperand(string, String::kLengthOffset)); | 2456 __ movq(result, FieldOperand(string, String::kLengthOffset)); |
2316 } | 2457 } |
2317 | 2458 |
2318 | 2459 |
2319 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { | 2460 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { |
2320 LOperand* input = instr->InputAt(0); | 2461 LOperand* input = instr->InputAt(0); |
2321 ASSERT(input->IsRegister() || input->IsStackSlot()); | 2462 ASSERT(input->IsRegister() || input->IsStackSlot()); |
(...skipping 338 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2660 // Pick the right runtime function to call. | 2801 // Pick the right runtime function to call. |
2661 if (instr->hydrogen()->depth() > 1) { | 2802 if (instr->hydrogen()->depth() > 1) { |
2662 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr); | 2803 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr); |
2663 } else { | 2804 } else { |
2664 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr); | 2805 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr); |
2665 } | 2806 } |
2666 } | 2807 } |
2667 | 2808 |
2668 | 2809 |
2669 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { | 2810 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { |
2670 Abort("Unimplemented: %s", "DoRegExpLiteral"); | 2811 NearLabel materialized; |
| 2812 // Registers will be used as follows: |
| 2813 // rdi = JS function. |
| 2814 // rcx = literals array. |
| 2815 // rbx = regexp literal. |
| 2816 // rax = regexp literal clone. |
| 2817 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); |
| 2818 __ movq(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset)); |
| 2819 int literal_offset = FixedArray::kHeaderSize + |
| 2820 instr->hydrogen()->literal_index() * kPointerSize; |
| 2821 __ movq(rbx, FieldOperand(rcx, literal_offset)); |
| 2822 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); |
| 2823 __ j(not_equal, &materialized); |
| 2824 |
| 2825 // Create regexp literal using runtime function |
| 2826 // Result will be in rax. |
| 2827 __ push(rcx); |
| 2828 __ Push(Smi::FromInt(instr->hydrogen()->literal_index())); |
| 2829 __ Push(instr->hydrogen()->pattern()); |
| 2830 __ Push(instr->hydrogen()->flags()); |
| 2831 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr); |
| 2832 __ movq(rbx, rax); |
| 2833 |
| 2834 __ bind(&materialized); |
| 2835 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; |
| 2836 Label allocated, runtime_allocate; |
| 2837 __ AllocateInNewSpace(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT); |
| 2838 __ jmp(&allocated); |
| 2839 |
| 2840 __ bind(&runtime_allocate); |
| 2841 __ push(rbx); |
| 2842 __ Push(Smi::FromInt(size)); |
| 2843 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); |
| 2844 __ pop(rbx); |
| 2845 |
| 2846 __ bind(&allocated); |
| 2847 // Copy the content into the newly allocated memory. |
| 2848 // (Unroll copy loop once for better throughput). |
| 2849 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) { |
| 2850 __ movq(rdx, FieldOperand(rbx, i)); |
| 2851 __ movq(rcx, FieldOperand(rbx, i + kPointerSize)); |
| 2852 __ movq(FieldOperand(rax, i), rdx); |
| 2853 __ movq(FieldOperand(rax, i + kPointerSize), rcx); |
| 2854 } |
| 2855 if ((size % (2 * kPointerSize)) != 0) { |
| 2856 __ movq(rdx, FieldOperand(rbx, size - kPointerSize)); |
| 2857 __ movq(FieldOperand(rax, size - kPointerSize), rdx); |
| 2858 } |
2671 } | 2859 } |
2672 | 2860 |
2673 | 2861 |
2674 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { | 2862 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { |
2675 // Use the fast case closure allocation code that allocates in new | 2863 // Use the fast case closure allocation code that allocates in new |
2676 // space for nested functions that don't need literals cloning. | 2864 // space for nested functions that don't need literals cloning. |
2677 Handle<SharedFunctionInfo> shared_info = instr->shared_info(); | 2865 Handle<SharedFunctionInfo> shared_info = instr->shared_info(); |
2678 bool pretenure = instr->hydrogen()->pretenure(); | 2866 bool pretenure = instr->hydrogen()->pretenure(); |
2679 if (shared_info->num_literals() == 0 && !pretenure) { | 2867 if (shared_info->num_literals() == 0 && !pretenure) { |
2680 FastNewClosureStub stub; | 2868 FastNewClosureStub stub; |
(...skipping 188 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2869 RegisterEnvironmentForDeoptimization(environment); | 3057 RegisterEnvironmentForDeoptimization(environment); |
2870 ASSERT(osr_pc_offset_ == -1); | 3058 ASSERT(osr_pc_offset_ == -1); |
2871 osr_pc_offset_ = masm()->pc_offset(); | 3059 osr_pc_offset_ = masm()->pc_offset(); |
2872 } | 3060 } |
2873 | 3061 |
2874 #undef __ | 3062 #undef __ |
2875 | 3063 |
2876 } } // namespace v8::internal | 3064 } } // namespace v8::internal |
2877 | 3065 |
2878 #endif // V8_TARGET_ARCH_X64 | 3066 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |