Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(143)

Side by Side Diff: src/a64/code-stubs-a64.cc

Issue 204293004: A64: Remove Operand constructors where an implicit constructor can be used. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/a64/builtins-a64.cc ('k') | src/a64/codegen-a64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 1062 matching lines...) Expand 10 before | Expand all | Expand 10 after
1073 native = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS; 1073 native = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
1074 } else { 1074 } else {
1075 native = Builtins::COMPARE; 1075 native = Builtins::COMPARE;
1076 int ncr; // NaN compare result 1076 int ncr; // NaN compare result
1077 if ((cond == lt) || (cond == le)) { 1077 if ((cond == lt) || (cond == le)) {
1078 ncr = GREATER; 1078 ncr = GREATER;
1079 } else { 1079 } else {
1080 ASSERT((cond == gt) || (cond == ge)); // remaining cases 1080 ASSERT((cond == gt) || (cond == ge)); // remaining cases
1081 ncr = LESS; 1081 ncr = LESS;
1082 } 1082 }
1083 __ Mov(x10, Operand(Smi::FromInt(ncr))); 1083 __ Mov(x10, Smi::FromInt(ncr));
1084 __ Push(x10); 1084 __ Push(x10);
1085 } 1085 }
1086 1086
1087 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) 1087 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
1088 // tagged as a small integer. 1088 // tagged as a small integer.
1089 __ InvokeBuiltin(native, JUMP_FUNCTION); 1089 __ InvokeBuiltin(native, JUMP_FUNCTION);
1090 1090
1091 __ Bind(&miss); 1091 __ Bind(&miss);
1092 GenerateMiss(masm); 1092 GenerateMiss(masm);
1093 } 1093 }
(...skipping 10 matching lines...) Expand all
1104 1104
1105 // We don't allow a GC during a store buffer overflow so there is no need to 1105 // We don't allow a GC during a store buffer overflow so there is no need to
1106 // store the registers in any particular way, but we do have to store and 1106 // store the registers in any particular way, but we do have to store and
1107 // restore them. 1107 // restore them.
1108 __ PushCPURegList(saved_regs); 1108 __ PushCPURegList(saved_regs);
1109 if (save_doubles_ == kSaveFPRegs) { 1109 if (save_doubles_ == kSaveFPRegs) {
1110 __ PushCPURegList(kCallerSavedFP); 1110 __ PushCPURegList(kCallerSavedFP);
1111 } 1111 }
1112 1112
1113 AllowExternalCallThatCantCauseGC scope(masm); 1113 AllowExternalCallThatCantCauseGC scope(masm);
1114 __ Mov(x0, Operand(ExternalReference::isolate_address(masm->isolate()))); 1114 __ Mov(x0, ExternalReference::isolate_address(masm->isolate()));
1115 __ CallCFunction( 1115 __ CallCFunction(
1116 ExternalReference::store_buffer_overflow_function(masm->isolate()), 1116 ExternalReference::store_buffer_overflow_function(masm->isolate()),
1117 1, 0); 1117 1, 0);
1118 1118
1119 if (save_doubles_ == kSaveFPRegs) { 1119 if (save_doubles_ == kSaveFPRegs) {
1120 __ PopCPURegList(kCallerSavedFP); 1120 __ PopCPURegList(kCallerSavedFP);
1121 } 1121 }
1122 __ PopCPURegList(saved_regs); 1122 __ PopCPURegList(saved_regs);
1123 __ Ret(); 1123 __ Ret();
1124 } 1124 }
(...skipping 358 matching lines...) Expand 10 before | Expand all | Expand 10 after
1483 1483
1484 Isolate* isolate = masm->isolate(); 1484 Isolate* isolate = masm->isolate();
1485 1485
1486 const Register& argv = x21; 1486 const Register& argv = x21;
1487 const Register& argc = x22; 1487 const Register& argc = x22;
1488 const Register& target = x23; 1488 const Register& target = x23;
1489 1489
1490 if (do_gc) { 1490 if (do_gc) {
1491 // Call Runtime::PerformGC, passing x0 (the result parameter for 1491 // Call Runtime::PerformGC, passing x0 (the result parameter for
1492 // PerformGC) and x1 (the isolate). 1492 // PerformGC) and x1 (the isolate).
1493 __ Mov(x1, Operand(ExternalReference::isolate_address(masm->isolate()))); 1493 __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
1494 __ CallCFunction( 1494 __ CallCFunction(
1495 ExternalReference::perform_gc_function(isolate), 2, 0); 1495 ExternalReference::perform_gc_function(isolate), 2, 0);
1496 } 1496 }
1497 1497
1498 ExternalReference scope_depth = 1498 ExternalReference scope_depth =
1499 ExternalReference::heap_always_allocate_scope_depth(isolate); 1499 ExternalReference::heap_always_allocate_scope_depth(isolate);
1500 if (always_allocate) { 1500 if (always_allocate) {
1501 __ Mov(x10, Operand(scope_depth)); 1501 __ Mov(x10, Operand(scope_depth));
1502 __ Ldr(x11, MemOperand(x10)); 1502 __ Ldr(x11, MemOperand(x10));
1503 __ Add(x11, x11, 1); 1503 __ Add(x11, x11, 1);
1504 __ Str(x11, MemOperand(x10)); 1504 __ Str(x11, MemOperand(x10));
1505 } 1505 }
1506 1506
1507 // Prepare AAPCS64 arguments to pass to the builtin. 1507 // Prepare AAPCS64 arguments to pass to the builtin.
1508 __ Mov(x0, argc); 1508 __ Mov(x0, argc);
1509 __ Mov(x1, argv); 1509 __ Mov(x1, argv);
1510 __ Mov(x2, Operand(ExternalReference::isolate_address(isolate))); 1510 __ Mov(x2, ExternalReference::isolate_address(isolate));
1511 1511
1512 // Store the return address on the stack, in the space previously allocated 1512 // Store the return address on the stack, in the space previously allocated
1513 // by EnterExitFrame. The return address is queried by 1513 // by EnterExitFrame. The return address is queried by
1514 // ExitFrame::GetStateForFramePointer. 1514 // ExitFrame::GetStateForFramePointer.
1515 Label return_location; 1515 Label return_location;
1516 __ Adr(x12, &return_location); 1516 __ Adr(x12, &return_location);
1517 __ Poke(x12, 0); 1517 __ Poke(x12, 0);
1518 if (__ emit_debug_code()) { 1518 if (__ emit_debug_code()) {
1519 // Verify that the slot below fp[kSPOffset]-8 points to the return location 1519 // Verify that the slot below fp[kSPOffset]-8 points to the return location
1520 // (currently in x12). 1520 // (currently in x12).
(...skipping 292 matching lines...) Expand 10 before | Expand all | Expand 10 after
1813 // Set up the reserved register for 0.0. 1813 // Set up the reserved register for 0.0.
1814 __ Fmov(fp_zero, 0.0); 1814 __ Fmov(fp_zero, 0.0);
1815 1815
1816 // Build an entry frame (see layout below). 1816 // Build an entry frame (see layout below).
1817 Isolate* isolate = masm->isolate(); 1817 Isolate* isolate = masm->isolate();
1818 1818
1819 // Build an entry frame. 1819 // Build an entry frame.
1820 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; 1820 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
1821 int64_t bad_frame_pointer = -1L; // Bad frame pointer to fail if it is used. 1821 int64_t bad_frame_pointer = -1L; // Bad frame pointer to fail if it is used.
1822 __ Mov(x13, bad_frame_pointer); 1822 __ Mov(x13, bad_frame_pointer);
1823 __ Mov(x12, Operand(Smi::FromInt(marker))); 1823 __ Mov(x12, Smi::FromInt(marker));
1824 __ Mov(x11, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate))); 1824 __ Mov(x11, ExternalReference(Isolate::kCEntryFPAddress, isolate));
1825 __ Ldr(x10, MemOperand(x11)); 1825 __ Ldr(x10, MemOperand(x11));
1826 1826
1827 __ Push(x13, xzr, x12, x10); 1827 __ Push(x13, xzr, x12, x10);
1828 // Set up fp. 1828 // Set up fp.
1829 __ Sub(fp, jssp, EntryFrameConstants::kCallerFPOffset); 1829 __ Sub(fp, jssp, EntryFrameConstants::kCallerFPOffset);
1830 1830
1831 // Push the JS entry frame marker. Also set js_entry_sp if this is the 1831 // Push the JS entry frame marker. Also set js_entry_sp if this is the
1832 // outermost JS call. 1832 // outermost JS call.
1833 Label non_outermost_js, done; 1833 Label non_outermost_js, done;
1834 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate); 1834 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
1835 __ Mov(x10, Operand(ExternalReference(js_entry_sp))); 1835 __ Mov(x10, ExternalReference(js_entry_sp));
1836 __ Ldr(x11, MemOperand(x10)); 1836 __ Ldr(x11, MemOperand(x10));
1837 __ Cbnz(x11, &non_outermost_js); 1837 __ Cbnz(x11, &non_outermost_js);
1838 __ Str(fp, MemOperand(x10)); 1838 __ Str(fp, MemOperand(x10));
1839 __ Mov(x12, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); 1839 __ Mov(x12, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
1840 __ Push(x12); 1840 __ Push(x12);
1841 __ B(&done); 1841 __ B(&done);
1842 __ Bind(&non_outermost_js); 1842 __ Bind(&non_outermost_js);
1843 // We spare one instruction by pushing xzr since the marker is 0. 1843 // We spare one instruction by pushing xzr since the marker is 0.
1844 ASSERT(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME) == NULL); 1844 ASSERT(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME) == NULL);
1845 __ Push(xzr); 1845 __ Push(xzr);
1846 __ Bind(&done); 1846 __ Bind(&done);
1847 1847
1848 // The frame set up looks like this: 1848 // The frame set up looks like this:
1849 // jssp[0] : JS entry frame marker. 1849 // jssp[0] : JS entry frame marker.
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
1898 1898
1899 // Expected registers by Builtins::JSEntryTrampoline 1899 // Expected registers by Builtins::JSEntryTrampoline
1900 // x0: code entry. 1900 // x0: code entry.
1901 // x1: function. 1901 // x1: function.
1902 // x2: receiver. 1902 // x2: receiver.
1903 // x3: argc. 1903 // x3: argc.
1904 // x4: argv. 1904 // x4: argv.
1905 ExternalReference entry(is_construct ? Builtins::kJSConstructEntryTrampoline 1905 ExternalReference entry(is_construct ? Builtins::kJSConstructEntryTrampoline
1906 : Builtins::kJSEntryTrampoline, 1906 : Builtins::kJSEntryTrampoline,
1907 isolate); 1907 isolate);
1908 __ Mov(x10, Operand(entry)); 1908 __ Mov(x10, entry);
1909 1909
1910 // Call the JSEntryTrampoline. 1910 // Call the JSEntryTrampoline.
1911 __ Ldr(x11, MemOperand(x10)); // Dereference the address. 1911 __ Ldr(x11, MemOperand(x10)); // Dereference the address.
1912 __ Add(x12, x11, Code::kHeaderSize - kHeapObjectTag); 1912 __ Add(x12, x11, Code::kHeaderSize - kHeapObjectTag);
1913 __ Blr(x12); 1913 __ Blr(x12);
1914 1914
1915 // Unlink this frame from the handler chain. 1915 // Unlink this frame from the handler chain.
1916 __ PopTryHandler(); 1916 __ PopTryHandler();
1917 1917
1918 1918
1919 __ Bind(&exit); 1919 __ Bind(&exit);
1920 // x0 holds the result. 1920 // x0 holds the result.
1921 // The stack pointer points to the top of the entry frame pushed on entry from 1921 // The stack pointer points to the top of the entry frame pushed on entry from
1922 // C++ (at the beginning of this stub): 1922 // C++ (at the beginning of this stub):
1923 // jssp[0] : JS entry frame marker. 1923 // jssp[0] : JS entry frame marker.
1924 // jssp[1] : C entry FP. 1924 // jssp[1] : C entry FP.
1925 // jssp[2] : stack frame marker. 1925 // jssp[2] : stack frame marker.
1926 // jssp[3] : stack frmae marker. 1926 // jssp[3] : stack frmae marker.
1927 // jssp[4] : bad frame pointer 0xfff...ff <- fp points here. 1927 // jssp[4] : bad frame pointer 0xfff...ff <- fp points here.
1928 1928
1929 // Check if the current stack frame is marked as the outermost JS frame. 1929 // Check if the current stack frame is marked as the outermost JS frame.
1930 Label non_outermost_js_2; 1930 Label non_outermost_js_2;
1931 __ Pop(x10); 1931 __ Pop(x10);
1932 __ Cmp(x10, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); 1932 __ Cmp(x10, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
1933 __ B(ne, &non_outermost_js_2); 1933 __ B(ne, &non_outermost_js_2);
1934 __ Mov(x11, Operand(ExternalReference(js_entry_sp))); 1934 __ Mov(x11, ExternalReference(js_entry_sp));
1935 __ Str(xzr, MemOperand(x11)); 1935 __ Str(xzr, MemOperand(x11));
1936 __ Bind(&non_outermost_js_2); 1936 __ Bind(&non_outermost_js_2);
1937 1937
1938 // Restore the top frame descriptors from the stack. 1938 // Restore the top frame descriptors from the stack.
1939 __ Pop(x10); 1939 __ Pop(x10);
1940 __ Mov(x11, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate))); 1940 __ Mov(x11, ExternalReference(Isolate::kCEntryFPAddress, isolate));
1941 __ Str(x10, MemOperand(x11)); 1941 __ Str(x10, MemOperand(x11));
1942 1942
1943 // Reset the stack to the callee saved registers. 1943 // Reset the stack to the callee saved registers.
1944 __ Drop(-EntryFrameConstants::kCallerFPOffset, kByteSizeInBytes); 1944 __ Drop(-EntryFrameConstants::kCallerFPOffset, kByteSizeInBytes);
1945 // Restore the callee-saved registers and return. 1945 // Restore the callee-saved registers and return.
1946 ASSERT(jssp.Is(__ StackPointer())); 1946 ASSERT(jssp.Is(__ StackPointer()));
1947 __ Mov(csp, jssp); 1947 __ Mov(csp, jssp);
1948 __ SetStackPointer(csp); 1948 __ SetStackPointer(csp);
1949 __ PopCalleeSavedRegisters(); 1949 __ PopCalleeSavedRegisters();
1950 // After this point, we must not modify jssp because it is a callee-saved 1950 // After this point, we must not modify jssp because it is a callee-saved
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
2010 Label not_js_object, slow; 2010 Label not_js_object, slow;
2011 2011
2012 if (!HasArgsInRegisters()) { 2012 if (!HasArgsInRegisters()) {
2013 __ Pop(function, object); 2013 __ Pop(function, object);
2014 } 2014 }
2015 2015
2016 if (ReturnTrueFalseObject()) { 2016 if (ReturnTrueFalseObject()) {
2017 __ LoadTrueFalseRoots(res_true, res_false); 2017 __ LoadTrueFalseRoots(res_true, res_false);
2018 } else { 2018 } else {
2019 // This is counter-intuitive, but correct. 2019 // This is counter-intuitive, but correct.
2020 __ Mov(res_true, Operand(Smi::FromInt(0))); 2020 __ Mov(res_true, Smi::FromInt(0));
2021 __ Mov(res_false, Operand(Smi::FromInt(1))); 2021 __ Mov(res_false, Smi::FromInt(1));
2022 } 2022 }
2023 2023
2024 // Check that the left hand side is a JS object and load its map as a side 2024 // Check that the left hand side is a JS object and load its map as a side
2025 // effect. 2025 // effect.
2026 Register map = x12; 2026 Register map = x12;
2027 __ JumpIfSmi(object, &not_js_object); 2027 __ JumpIfSmi(object, &not_js_object);
2028 __ IsObjectJSObjectType(object, map, scratch2, &not_js_object); 2028 __ IsObjectJSObjectType(object, map, scratch2, &not_js_object);
2029 2029
2030 // If there is a call site cache, don't look in the global cache, but do the 2030 // If there is a call site cache, don't look in the global cache, but do the
2031 // real lookup and update the call site cache. 2031 // real lookup and update the call site cache.
(...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after
2181 __ JumpIfNotSmi(key, &slow); 2181 __ JumpIfNotSmi(key, &slow);
2182 2182
2183 // Check if the calling frame is an arguments adaptor frame. 2183 // Check if the calling frame is an arguments adaptor frame.
2184 Register local_fp = x11; 2184 Register local_fp = x11;
2185 Register caller_fp = x11; 2185 Register caller_fp = x11;
2186 Register caller_ctx = x12; 2186 Register caller_ctx = x12;
2187 Label skip_adaptor; 2187 Label skip_adaptor;
2188 __ Ldr(caller_fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 2188 __ Ldr(caller_fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2189 __ Ldr(caller_ctx, MemOperand(caller_fp, 2189 __ Ldr(caller_ctx, MemOperand(caller_fp,
2190 StandardFrameConstants::kContextOffset)); 2190 StandardFrameConstants::kContextOffset));
2191 __ Cmp(caller_ctx, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 2191 __ Cmp(caller_ctx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2192 __ Csel(local_fp, fp, caller_fp, ne); 2192 __ Csel(local_fp, fp, caller_fp, ne);
2193 __ B(ne, &skip_adaptor); 2193 __ B(ne, &skip_adaptor);
2194 2194
2195 // Load the actual arguments limit found in the arguments adaptor frame. 2195 // Load the actual arguments limit found in the arguments adaptor frame.
2196 __ Ldr(arg_count, MemOperand(caller_fp, 2196 __ Ldr(arg_count, MemOperand(caller_fp,
2197 ArgumentsAdaptorFrameConstants::kLengthOffset)); 2197 ArgumentsAdaptorFrameConstants::kLengthOffset));
2198 __ Bind(&skip_adaptor); 2198 __ Bind(&skip_adaptor);
2199 2199
2200 // Check index against formal parameters count limit. Use unsigned comparison 2200 // Check index against formal parameters count limit. Use unsigned comparison
2201 // to get negative check for free: branch if key < 0 or key >= arg_count. 2201 // to get negative check for free: branch if key < 0 or key >= arg_count.
(...skipping 29 matching lines...) Expand all
2231 __ Ldr(w11, MemOperand(caller_fp, StandardFrameConstants::kContextOffset + 2231 __ Ldr(w11, MemOperand(caller_fp, StandardFrameConstants::kContextOffset +
2232 (kSmiShift / kBitsPerByte))); 2232 (kSmiShift / kBitsPerByte)));
2233 __ Cmp(w11, StackFrame::ARGUMENTS_ADAPTOR); 2233 __ Cmp(w11, StackFrame::ARGUMENTS_ADAPTOR);
2234 __ B(ne, &runtime); 2234 __ B(ne, &runtime);
2235 2235
2236 // Patch the arguments.length and parameters pointer in the current frame. 2236 // Patch the arguments.length and parameters pointer in the current frame.
2237 __ Ldr(x11, MemOperand(caller_fp, 2237 __ Ldr(x11, MemOperand(caller_fp,
2238 ArgumentsAdaptorFrameConstants::kLengthOffset)); 2238 ArgumentsAdaptorFrameConstants::kLengthOffset));
2239 __ Poke(x11, 0 * kXRegSize); 2239 __ Poke(x11, 0 * kXRegSize);
2240 __ Add(x10, caller_fp, Operand::UntagSmiAndScale(x11, kPointerSizeLog2)); 2240 __ Add(x10, caller_fp, Operand::UntagSmiAndScale(x11, kPointerSizeLog2));
2241 __ Add(x10, x10, Operand(StandardFrameConstants::kCallerSPOffset)); 2241 __ Add(x10, x10, StandardFrameConstants::kCallerSPOffset);
2242 __ Poke(x10, 1 * kXRegSize); 2242 __ Poke(x10, 1 * kXRegSize);
2243 2243
2244 __ Bind(&runtime); 2244 __ Bind(&runtime);
2245 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); 2245 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
2246 } 2246 }
2247 2247
2248 2248
2249 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { 2249 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
2250 // Stack layout on entry. 2250 // Stack layout on entry.
2251 // jssp[0]: number of parameters (tagged) 2251 // jssp[0]: number of parameters (tagged)
(...skipping 12 matching lines...) Expand all
2264 __ SmiUntag(param_count, param_count_smi); 2264 __ SmiUntag(param_count, param_count_smi);
2265 2265
2266 // Check if the calling frame is an arguments adaptor frame. 2266 // Check if the calling frame is an arguments adaptor frame.
2267 Register caller_fp = x11; 2267 Register caller_fp = x11;
2268 Register caller_ctx = x12; 2268 Register caller_ctx = x12;
2269 Label runtime; 2269 Label runtime;
2270 Label adaptor_frame, try_allocate; 2270 Label adaptor_frame, try_allocate;
2271 __ Ldr(caller_fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 2271 __ Ldr(caller_fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2272 __ Ldr(caller_ctx, MemOperand(caller_fp, 2272 __ Ldr(caller_ctx, MemOperand(caller_fp,
2273 StandardFrameConstants::kContextOffset)); 2273 StandardFrameConstants::kContextOffset));
2274 __ Cmp(caller_ctx, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 2274 __ Cmp(caller_ctx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2275 __ B(eq, &adaptor_frame); 2275 __ B(eq, &adaptor_frame);
2276 2276
2277 // No adaptor, parameter count = argument count. 2277 // No adaptor, parameter count = argument count.
2278 2278
2279 // x1 mapped_params number of mapped params, min(params, args) (uninit) 2279 // x1 mapped_params number of mapped params, min(params, args) (uninit)
2280 // x2 arg_count number of function arguments (uninit) 2280 // x2 arg_count number of function arguments (uninit)
2281 // x3 arg_count_smi number of function arguments (smi) 2281 // x3 arg_count_smi number of function arguments (smi)
2282 // x4 function function pointer 2282 // x4 function function pointer
2283 // x7 param_count number of function parameters 2283 // x7 param_count number of function parameters
2284 // x11 caller_fp caller's frame pointer 2284 // x11 caller_fp caller's frame pointer
(...skipping 181 matching lines...) Expand 10 before | Expand all | Expand 10 after
2466 2466
2467 __ B(&parameters_test); 2467 __ B(&parameters_test);
2468 2468
2469 __ Bind(&parameters_loop); 2469 __ Bind(&parameters_loop);
2470 __ Sub(loop_count, loop_count, 1); 2470 __ Sub(loop_count, loop_count, 1);
2471 __ Mov(x10, Operand(loop_count, LSL, kPointerSizeLog2)); 2471 __ Mov(x10, Operand(loop_count, LSL, kPointerSizeLog2));
2472 __ Add(x10, x10, kParameterMapHeaderSize - kHeapObjectTag); 2472 __ Add(x10, x10, kParameterMapHeaderSize - kHeapObjectTag);
2473 __ Str(index, MemOperand(elements, x10)); 2473 __ Str(index, MemOperand(elements, x10));
2474 __ Sub(x10, x10, kParameterMapHeaderSize - FixedArray::kHeaderSize); 2474 __ Sub(x10, x10, kParameterMapHeaderSize - FixedArray::kHeaderSize);
2475 __ Str(the_hole, MemOperand(backing_store, x10)); 2475 __ Str(the_hole, MemOperand(backing_store, x10));
2476 __ Add(index, index, Operand(Smi::FromInt(1))); 2476 __ Add(index, index, Smi::FromInt(1));
2477 __ Bind(&parameters_test); 2477 __ Bind(&parameters_test);
2478 __ Cbnz(loop_count, &parameters_loop); 2478 __ Cbnz(loop_count, &parameters_loop);
2479 2479
2480 __ Bind(&skip_parameter_map); 2480 __ Bind(&skip_parameter_map);
2481 // Copy arguments header and remaining slots (if there are any.) 2481 // Copy arguments header and remaining slots (if there are any.)
2482 __ LoadRoot(x10, Heap::kFixedArrayMapRootIndex); 2482 __ LoadRoot(x10, Heap::kFixedArrayMapRootIndex);
2483 __ Str(x10, FieldMemOperand(backing_store, FixedArray::kMapOffset)); 2483 __ Str(x10, FieldMemOperand(backing_store, FixedArray::kMapOffset));
2484 __ Str(arg_count_smi, FieldMemOperand(backing_store, 2484 __ Str(arg_count_smi, FieldMemOperand(backing_store,
2485 FixedArray::kLengthOffset)); 2485 FixedArray::kLengthOffset));
2486 2486
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
2535 __ Pop(param_count_smi, params, function); 2535 __ Pop(param_count_smi, params, function);
2536 __ SmiUntag(param_count, param_count_smi); 2536 __ SmiUntag(param_count, param_count_smi);
2537 2537
2538 // Test if arguments adaptor needed. 2538 // Test if arguments adaptor needed.
2539 Register caller_fp = x11; 2539 Register caller_fp = x11;
2540 Register caller_ctx = x12; 2540 Register caller_ctx = x12;
2541 Label try_allocate, runtime; 2541 Label try_allocate, runtime;
2542 __ Ldr(caller_fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 2542 __ Ldr(caller_fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2543 __ Ldr(caller_ctx, MemOperand(caller_fp, 2543 __ Ldr(caller_ctx, MemOperand(caller_fp,
2544 StandardFrameConstants::kContextOffset)); 2544 StandardFrameConstants::kContextOffset));
2545 __ Cmp(caller_ctx, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 2545 __ Cmp(caller_ctx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2546 __ B(ne, &try_allocate); 2546 __ B(ne, &try_allocate);
2547 2547
2548 // x1 param_count_smi number of parameters passed to function (smi) 2548 // x1 param_count_smi number of parameters passed to function (smi)
2549 // x2 params pointer to parameters 2549 // x2 params pointer to parameters
2550 // x3 function function pointer 2550 // x3 function function pointer
2551 // x11 caller_fp caller's frame pointer 2551 // x11 caller_fp caller's frame pointer
2552 // x13 param_count number of parameters passed to function 2552 // x13 param_count number of parameters passed to function
2553 2553
2554 // Patch the argument length and parameters pointer. 2554 // Patch the argument length and parameters pointer.
2555 __ Ldr(param_count_smi, 2555 __ Ldr(param_count_smi,
(...skipping 165 matching lines...) Expand 10 before | Expand all | Expand 10 after
2721 const int kPreviousIndexOffset = 5 * kPointerSize; 2721 const int kPreviousIndexOffset = 5 * kPointerSize;
2722 const int kSubjectOffset = 6 * kPointerSize; 2722 const int kSubjectOffset = 6 * kPointerSize;
2723 const int kJSRegExpOffset = 7 * kPointerSize; 2723 const int kJSRegExpOffset = 7 * kPointerSize;
2724 2724
2725 // Ensure that a RegExp stack is allocated. 2725 // Ensure that a RegExp stack is allocated.
2726 Isolate* isolate = masm->isolate(); 2726 Isolate* isolate = masm->isolate();
2727 ExternalReference address_of_regexp_stack_memory_address = 2727 ExternalReference address_of_regexp_stack_memory_address =
2728 ExternalReference::address_of_regexp_stack_memory_address(isolate); 2728 ExternalReference::address_of_regexp_stack_memory_address(isolate);
2729 ExternalReference address_of_regexp_stack_memory_size = 2729 ExternalReference address_of_regexp_stack_memory_size =
2730 ExternalReference::address_of_regexp_stack_memory_size(isolate); 2730 ExternalReference::address_of_regexp_stack_memory_size(isolate);
2731 __ Mov(x10, Operand(address_of_regexp_stack_memory_size)); 2731 __ Mov(x10, address_of_regexp_stack_memory_size);
2732 __ Ldr(x10, MemOperand(x10)); 2732 __ Ldr(x10, MemOperand(x10));
2733 __ Cbz(x10, &runtime); 2733 __ Cbz(x10, &runtime);
2734 2734
2735 // Check that the first argument is a JSRegExp object. 2735 // Check that the first argument is a JSRegExp object.
2736 ASSERT(jssp.Is(__ StackPointer())); 2736 ASSERT(jssp.Is(__ StackPointer()));
2737 __ Peek(jsregexp_object, kJSRegExpOffset); 2737 __ Peek(jsregexp_object, kJSRegExpOffset);
2738 __ JumpIfSmi(jsregexp_object, &runtime); 2738 __ JumpIfSmi(jsregexp_object, &runtime);
2739 __ JumpIfNotObjectType(jsregexp_object, x10, x10, JS_REGEXP_TYPE, &runtime); 2739 __ JumpIfNotObjectType(jsregexp_object, x10, x10, JS_REGEXP_TYPE, &runtime);
2740 2740
2741 // Check that the RegExp has been compiled (data contains a fixed array). 2741 // Check that the RegExp has been compiled (data contains a fixed array).
2742 __ Ldr(regexp_data, FieldMemOperand(jsregexp_object, JSRegExp::kDataOffset)); 2742 __ Ldr(regexp_data, FieldMemOperand(jsregexp_object, JSRegExp::kDataOffset));
2743 if (FLAG_debug_code) { 2743 if (FLAG_debug_code) {
2744 STATIC_ASSERT(kSmiTag == 0); 2744 STATIC_ASSERT(kSmiTag == 0);
2745 __ Tst(regexp_data, kSmiTagMask); 2745 __ Tst(regexp_data, kSmiTagMask);
2746 __ Check(ne, kUnexpectedTypeForRegExpDataFixedArrayExpected); 2746 __ Check(ne, kUnexpectedTypeForRegExpDataFixedArrayExpected);
2747 __ CompareObjectType(regexp_data, x10, x10, FIXED_ARRAY_TYPE); 2747 __ CompareObjectType(regexp_data, x10, x10, FIXED_ARRAY_TYPE);
2748 __ Check(eq, kUnexpectedTypeForRegExpDataFixedArrayExpected); 2748 __ Check(eq, kUnexpectedTypeForRegExpDataFixedArrayExpected);
2749 } 2749 }
2750 2750
2751 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP. 2751 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
2752 __ Ldr(x10, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset)); 2752 __ Ldr(x10, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset));
2753 __ Cmp(x10, Operand(Smi::FromInt(JSRegExp::IRREGEXP))); 2753 __ Cmp(x10, Smi::FromInt(JSRegExp::IRREGEXP));
2754 __ B(ne, &runtime); 2754 __ B(ne, &runtime);
2755 2755
2756 // Check that the number of captures fit in the static offsets vector buffer. 2756 // Check that the number of captures fit in the static offsets vector buffer.
2757 // We have always at least one capture for the whole match, plus additional 2757 // We have always at least one capture for the whole match, plus additional
2758 // ones due to capturing parentheses. A capture takes 2 registers. 2758 // ones due to capturing parentheses. A capture takes 2 registers.
2759 // The number of capture registers then is (number_of_captures + 1) * 2. 2759 // The number of capture registers then is (number_of_captures + 1) * 2.
2760 __ Ldrsw(x10, 2760 __ Ldrsw(x10,
2761 UntagSmiFieldMemOperand(regexp_data, 2761 UntagSmiFieldMemOperand(regexp_data,
2762 JSRegExp::kIrregexpCaptureCountOffset)); 2762 JSRegExp::kIrregexpCaptureCountOffset));
2763 // Check (number_of_captures + 1) * 2 <= offsets vector size 2763 // Check (number_of_captures + 1) * 2 <= offsets vector size
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after
2898 ASSERT(csp.Is(__ StackPointer())); 2898 ASSERT(csp.Is(__ StackPointer()));
2899 2899
2900 // We have 9 arguments to pass to the regexp code, therefore we have to pass 2900 // We have 9 arguments to pass to the regexp code, therefore we have to pass
2901 // one on the stack and the rest as registers. 2901 // one on the stack and the rest as registers.
2902 2902
2903 // Note that the placement of the argument on the stack isn't standard 2903 // Note that the placement of the argument on the stack isn't standard
2904 // AAPCS64: 2904 // AAPCS64:
2905 // csp[0]: Space for the return address placed by DirectCEntryStub. 2905 // csp[0]: Space for the return address placed by DirectCEntryStub.
2906 // csp[8]: Argument 9, the current isolate address. 2906 // csp[8]: Argument 9, the current isolate address.
2907 2907
2908 __ Mov(x10, Operand(ExternalReference::isolate_address(isolate))); 2908 __ Mov(x10, ExternalReference::isolate_address(isolate));
2909 __ Poke(x10, kPointerSize); 2909 __ Poke(x10, kPointerSize);
2910 2910
2911 Register length = w11; 2911 Register length = w11;
2912 Register previous_index_in_bytes = w12; 2912 Register previous_index_in_bytes = w12;
2913 Register start = x13; 2913 Register start = x13;
2914 2914
2915 // Load start of the subject string. 2915 // Load start of the subject string.
2916 __ Add(start, subject, SeqString::kHeaderSize - kHeapObjectTag); 2916 __ Add(start, subject, SeqString::kHeaderSize - kHeapObjectTag);
2917 // Load the length from the original subject string from the previous stack 2917 // Load the length from the original subject string from the previous stack
2918 // frame. Therefore we have to use fp, which points exactly to two pointer 2918 // frame. Therefore we have to use fp, which points exactly to two pointer
(...skipping 28 matching lines...) Expand all
2947 // is not sliced). 2947 // is not sliced).
2948 __ Add(w10, previous_index_in_bytes, sliced_string_offset); 2948 __ Add(w10, previous_index_in_bytes, sliced_string_offset);
2949 __ Add(x2, start, Operand(w10, UXTW)); 2949 __ Add(x2, start, Operand(w10, UXTW));
2950 2950
2951 // Argument 4 (x3): 2951 // Argument 4 (x3):
2952 // End of input = start of input + (length of input - previous index) 2952 // End of input = start of input + (length of input - previous index)
2953 __ Sub(w10, length, previous_index_in_bytes); 2953 __ Sub(w10, length, previous_index_in_bytes);
2954 __ Add(x3, x2, Operand(w10, UXTW)); 2954 __ Add(x3, x2, Operand(w10, UXTW));
2955 2955
2956 // Argument 5 (x4): static offsets vector buffer. 2956 // Argument 5 (x4): static offsets vector buffer.
2957 __ Mov(x4, 2957 __ Mov(x4, ExternalReference::address_of_static_offsets_vector(isolate));
2958 Operand(ExternalReference::address_of_static_offsets_vector(isolate)));
2959 2958
2960 // Argument 6 (x5): Set the number of capture registers to zero to force 2959 // Argument 6 (x5): Set the number of capture registers to zero to force
2961 // global regexps to behave as non-global. This stub is not used for global 2960 // global regexps to behave as non-global. This stub is not used for global
2962 // regexps. 2961 // regexps.
2963 __ Mov(x5, 0); 2962 __ Mov(x5, 0);
2964 2963
2965 // Argument 7 (x6): Start (high end) of backtracking stack memory area. 2964 // Argument 7 (x6): Start (high end) of backtracking stack memory area.
2966 __ Mov(x10, Operand(address_of_regexp_stack_memory_address)); 2965 __ Mov(x10, address_of_regexp_stack_memory_address);
2967 __ Ldr(x10, MemOperand(x10)); 2966 __ Ldr(x10, MemOperand(x10));
2968 __ Mov(x11, Operand(address_of_regexp_stack_memory_size)); 2967 __ Mov(x11, address_of_regexp_stack_memory_size);
2969 __ Ldr(x11, MemOperand(x11)); 2968 __ Ldr(x11, MemOperand(x11));
2970 __ Add(x6, x10, x11); 2969 __ Add(x6, x10, x11);
2971 2970
2972 // Argument 8 (x7): Indicate that this is a direct call from JavaScript. 2971 // Argument 8 (x7): Indicate that this is a direct call from JavaScript.
2973 __ Mov(x7, 1); 2972 __ Mov(x7, 1);
2974 2973
2975 // Locate the code entry and call it. 2974 // Locate the code entry and call it.
2976 __ Add(code_object, code_object, Code::kHeaderSize - kHeapObjectTag); 2975 __ Add(code_object, code_object, Code::kHeaderSize - kHeapObjectTag);
2977 DirectCEntryStub stub; 2976 DirectCEntryStub stub;
2978 stub.GenerateCall(masm, code_object); 2977 stub.GenerateCall(masm, code_object);
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
3054 kDontSaveFPRegs); 3053 kDontSaveFPRegs);
3055 3054
3056 Register last_match_offsets = x13; 3055 Register last_match_offsets = x13;
3057 Register offsets_vector_index = x14; 3056 Register offsets_vector_index = x14;
3058 Register current_offset = x15; 3057 Register current_offset = x15;
3059 3058
3060 // Get the static offsets vector filled by the native regexp code 3059 // Get the static offsets vector filled by the native regexp code
3061 // and fill the last match info. 3060 // and fill the last match info.
3062 ExternalReference address_of_static_offsets_vector = 3061 ExternalReference address_of_static_offsets_vector =
3063 ExternalReference::address_of_static_offsets_vector(isolate); 3062 ExternalReference::address_of_static_offsets_vector(isolate);
3064 __ Mov(offsets_vector_index, Operand(address_of_static_offsets_vector)); 3063 __ Mov(offsets_vector_index, address_of_static_offsets_vector);
3065 3064
3066 Label next_capture, done; 3065 Label next_capture, done;
3067 // Capture register counter starts from number of capture registers and 3066 // Capture register counter starts from number of capture registers and
3068 // iterates down to zero (inclusive). 3067 // iterates down to zero (inclusive).
3069 __ Add(last_match_offsets, 3068 __ Add(last_match_offsets,
3070 last_match_info_elements, 3069 last_match_info_elements,
3071 RegExpImpl::kFirstCaptureOffset - kHeapObjectTag); 3070 RegExpImpl::kFirstCaptureOffset - kHeapObjectTag);
3072 __ Bind(&next_capture); 3071 __ Bind(&next_capture);
3073 __ Subs(number_of_capture_registers, number_of_capture_registers, 2); 3072 __ Subs(number_of_capture_registers, number_of_capture_registers, 2);
3074 __ B(mi, &done); 3073 __ B(mi, &done);
(...skipping 474 matching lines...) Expand 10 before | Expand all | Expand 10 after
3549 __ Mov(result_, x0); 3548 __ Mov(result_, x0);
3550 call_helper.AfterCall(masm); 3549 call_helper.AfterCall(masm);
3551 __ B(&exit_); 3550 __ B(&exit_);
3552 3551
3553 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); 3552 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
3554 } 3553 }
3555 3554
3556 3555
3557 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) { 3556 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
3558 __ JumpIfNotSmi(code_, &slow_case_); 3557 __ JumpIfNotSmi(code_, &slow_case_);
3559 __ Cmp(code_, Operand(Smi::FromInt(String::kMaxOneByteCharCode))); 3558 __ Cmp(code_, Smi::FromInt(String::kMaxOneByteCharCode));
3560 __ B(hi, &slow_case_); 3559 __ B(hi, &slow_case_);
3561 3560
3562 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex); 3561 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
3563 // At this point code register contains smi tagged ASCII char code. 3562 // At this point code register contains smi tagged ASCII char code.
3564 STATIC_ASSERT(kSmiShift > kPointerSizeLog2); 3563 STATIC_ASSERT(kSmiShift > kPointerSizeLog2);
3565 __ Add(result_, result_, Operand(code_, LSR, kSmiShift - kPointerSizeLog2)); 3564 __ Add(result_, result_, Operand(code_, LSR, kSmiShift - kPointerSizeLog2));
3566 __ Ldr(result_, FieldMemOperand(result_, FixedArray::kHeaderSize)); 3565 __ Ldr(result_, FieldMemOperand(result_, FixedArray::kHeaderSize));
3567 __ JumpIfRoot(result_, Heap::kUndefinedValueRootIndex, &slow_case_); 3566 __ JumpIfRoot(result_, Heap::kUndefinedValueRootIndex, &slow_case_);
3568 __ Bind(&exit_); 3567 __ Bind(&exit_);
3569 } 3568 }
(...skipping 329 matching lines...) Expand 10 before | Expand all | Expand 10 after
3899 ExternalReference miss = 3898 ExternalReference miss =
3900 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate()); 3899 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
3901 3900
3902 FrameScope scope(masm, StackFrame::INTERNAL); 3901 FrameScope scope(masm, StackFrame::INTERNAL);
3903 Register op = x10; 3902 Register op = x10;
3904 Register left = x1; 3903 Register left = x1;
3905 Register right = x0; 3904 Register right = x0;
3906 // Preserve some caller-saved registers. 3905 // Preserve some caller-saved registers.
3907 __ Push(x1, x0, lr); 3906 __ Push(x1, x0, lr);
3908 // Push the arguments. 3907 // Push the arguments.
3909 __ Mov(op, Operand(Smi::FromInt(op_))); 3908 __ Mov(op, Smi::FromInt(op_));
3910 __ Push(left, right, op); 3909 __ Push(left, right, op);
3911 3910
3912 // Call the miss handler. This also pops the arguments. 3911 // Call the miss handler. This also pops the arguments.
3913 __ CallExternalReference(miss, 3); 3912 __ CallExternalReference(miss, 3);
3914 3913
3915 // Compute the entry point of the rewritten stub. 3914 // Compute the entry point of the rewritten stub.
3916 __ Add(stub_entry, x0, Code::kHeaderSize - kHeapObjectTag); 3915 __ Add(stub_entry, x0, Code::kHeaderSize - kHeapObjectTag);
3917 // Restore caller-saved registers. 3916 // Restore caller-saved registers.
3918 __ Pop(lr, x0, x1); 3917 __ Pop(lr, x0, x1);
3919 } 3918 }
(...skipping 328 matching lines...) Expand 10 before | Expand all | Expand 10 after
4248 4247
4249 // Compare lengths. If lengths differ, strings can't be equal. Lengths are 4248 // Compare lengths. If lengths differ, strings can't be equal. Lengths are
4250 // smis, and don't need to be untagged. 4249 // smis, and don't need to be untagged.
4251 Label strings_not_equal, check_zero_length; 4250 Label strings_not_equal, check_zero_length;
4252 __ Ldr(left_length, FieldMemOperand(left, String::kLengthOffset)); 4251 __ Ldr(left_length, FieldMemOperand(left, String::kLengthOffset));
4253 __ Ldr(right_length, FieldMemOperand(right, String::kLengthOffset)); 4252 __ Ldr(right_length, FieldMemOperand(right, String::kLengthOffset));
4254 __ Cmp(left_length, right_length); 4253 __ Cmp(left_length, right_length);
4255 __ B(eq, &check_zero_length); 4254 __ B(eq, &check_zero_length);
4256 4255
4257 __ Bind(&strings_not_equal); 4256 __ Bind(&strings_not_equal);
4258 __ Mov(result, Operand(Smi::FromInt(NOT_EQUAL))); 4257 __ Mov(result, Smi::FromInt(NOT_EQUAL));
4259 __ Ret(); 4258 __ Ret();
4260 4259
4261 // Check if the length is zero. If so, the strings must be equal (and empty.) 4260 // Check if the length is zero. If so, the strings must be equal (and empty.)
4262 Label compare_chars; 4261 Label compare_chars;
4263 __ Bind(&check_zero_length); 4262 __ Bind(&check_zero_length);
4264 STATIC_ASSERT(kSmiTag == 0); 4263 STATIC_ASSERT(kSmiTag == 0);
4265 __ Cbnz(left_length, &compare_chars); 4264 __ Cbnz(left_length, &compare_chars);
4266 __ Mov(result, Operand(Smi::FromInt(EQUAL))); 4265 __ Mov(result, Smi::FromInt(EQUAL));
4267 __ Ret(); 4266 __ Ret();
4268 4267
4269 // Compare characters. Falls through if all characters are equal. 4268 // Compare characters. Falls through if all characters are equal.
4270 __ Bind(&compare_chars); 4269 __ Bind(&compare_chars);
4271 GenerateAsciiCharsCompareLoop(masm, left, right, left_length, scratch2, 4270 GenerateAsciiCharsCompareLoop(masm, left, right, left_length, scratch2,
4272 scratch3, &strings_not_equal); 4271 scratch3, &strings_not_equal);
4273 4272
4274 // Characters in strings are equal. 4273 // Characters in strings are equal.
4275 __ Mov(result, Operand(Smi::FromInt(EQUAL))); 4274 __ Mov(result, Smi::FromInt(EQUAL));
4276 __ Ret(); 4275 __ Ret();
4277 } 4276 }
4278 4277
4279 4278
4280 void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm, 4279 void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
4281 Register left, 4280 Register left,
4282 Register right, 4281 Register right,
4283 Register scratch1, 4282 Register scratch1,
4284 Register scratch2, 4283 Register scratch2,
4285 Register scratch3, 4284 Register scratch3,
(...skipping 21 matching lines...) Expand all
4307 4306
4308 ASSERT(Smi::FromInt(EQUAL) == static_cast<Smi*>(0)); 4307 ASSERT(Smi::FromInt(EQUAL) == static_cast<Smi*>(0));
4309 4308
4310 // Use length_delta as result if it's zero. 4309 // Use length_delta as result if it's zero.
4311 Register result = x0; 4310 Register result = x0;
4312 __ Subs(result, length_delta, 0); 4311 __ Subs(result, length_delta, 0);
4313 4312
4314 __ Bind(&result_not_equal); 4313 __ Bind(&result_not_equal);
4315 Register greater = x10; 4314 Register greater = x10;
4316 Register less = x11; 4315 Register less = x11;
4317 __ Mov(greater, Operand(Smi::FromInt(GREATER))); 4316 __ Mov(greater, Smi::FromInt(GREATER));
4318 __ Mov(less, Operand(Smi::FromInt(LESS))); 4317 __ Mov(less, Smi::FromInt(LESS));
4319 __ CmovX(result, greater, gt); 4318 __ CmovX(result, greater, gt);
4320 __ CmovX(result, less, lt); 4319 __ CmovX(result, less, lt);
4321 __ Ret(); 4320 __ Ret();
4322 } 4321 }
4323 4322
4324 4323
4325 void StringCompareStub::GenerateAsciiCharsCompareLoop( 4324 void StringCompareStub::GenerateAsciiCharsCompareLoop(
4326 MacroAssembler* masm, 4325 MacroAssembler* masm,
4327 Register left, 4326 Register left,
4328 Register right, 4327 Register right,
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after
4434 __ CheckMap(elements, 4433 __ CheckMap(elements,
4435 x10, 4434 x10,
4436 Heap::kFixedArrayMapRootIndex, 4435 Heap::kFixedArrayMapRootIndex,
4437 &call_builtin, 4436 &call_builtin,
4438 DONT_DO_SMI_CHECK); 4437 DONT_DO_SMI_CHECK);
4439 } 4438 }
4440 4439
4441 // Get the array's length and calculate new length. 4440 // Get the array's length and calculate new length.
4442 __ Ldr(length, FieldMemOperand(receiver, JSArray::kLengthOffset)); 4441 __ Ldr(length, FieldMemOperand(receiver, JSArray::kLengthOffset));
4443 STATIC_ASSERT(kSmiTag == 0); 4442 STATIC_ASSERT(kSmiTag == 0);
4444 __ Add(length, length, Operand(Smi::FromInt(argc))); 4443 __ Add(length, length, Smi::FromInt(argc));
4445 4444
4446 // Check if we could survive without allocation. 4445 // Check if we could survive without allocation.
4447 __ Ldr(elements_length, 4446 __ Ldr(elements_length,
4448 FieldMemOperand(elements, FixedArray::kLengthOffset)); 4447 FieldMemOperand(elements, FixedArray::kLengthOffset));
4449 __ Cmp(length, elements_length); 4448 __ Cmp(length, elements_length);
4450 4449
4451 const int kEndElementsOffset = 4450 const int kEndElementsOffset =
4452 FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize; 4451 FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize;
4453 4452
4454 if (IsFastSmiOrObjectElementsKind(elements_kind())) { 4453 if (IsFastSmiOrObjectElementsKind(elements_kind())) {
(...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after
4561 ExternalReference::new_space_allocation_limit_address(isolate); 4560 ExternalReference::new_space_allocation_limit_address(isolate);
4562 4561
4563 const int kAllocationDelta = 4; 4562 const int kAllocationDelta = 4;
4564 ASSERT(kAllocationDelta >= argc); 4563 ASSERT(kAllocationDelta >= argc);
4565 Register allocation_top_addr = x5; 4564 Register allocation_top_addr = x5;
4566 Register allocation_top = x9; 4565 Register allocation_top = x9;
4567 // Load top and check if it is the end of elements. 4566 // Load top and check if it is the end of elements.
4568 __ Add(end_elements, elements, 4567 __ Add(end_elements, elements,
4569 Operand::UntagSmiAndScale(length, kPointerSizeLog2)); 4568 Operand::UntagSmiAndScale(length, kPointerSizeLog2));
4570 __ Add(end_elements, end_elements, kEndElementsOffset); 4569 __ Add(end_elements, end_elements, kEndElementsOffset);
4571 __ Mov(allocation_top_addr, Operand(new_space_allocation_top)); 4570 __ Mov(allocation_top_addr, new_space_allocation_top);
4572 __ Ldr(allocation_top, MemOperand(allocation_top_addr)); 4571 __ Ldr(allocation_top, MemOperand(allocation_top_addr));
4573 __ Cmp(end_elements, allocation_top); 4572 __ Cmp(end_elements, allocation_top);
4574 __ B(ne, &call_builtin); 4573 __ B(ne, &call_builtin);
4575 4574
4576 __ Mov(x10, Operand(new_space_allocation_limit)); 4575 __ Mov(x10, new_space_allocation_limit);
4577 __ Ldr(x10, MemOperand(x10)); 4576 __ Ldr(x10, MemOperand(x10));
4578 __ Add(allocation_top, allocation_top, kAllocationDelta * kPointerSize); 4577 __ Add(allocation_top, allocation_top, kAllocationDelta * kPointerSize);
4579 __ Cmp(allocation_top, x10); 4578 __ Cmp(allocation_top, x10);
4580 __ B(hi, &call_builtin); 4579 __ B(hi, &call_builtin);
4581 4580
4582 // We fit and could grow elements. 4581 // We fit and could grow elements.
4583 // Update new_space_allocation_top. 4582 // Update new_space_allocation_top.
4584 __ Str(allocation_top, MemOperand(allocation_top_addr)); 4583 __ Str(allocation_top, MemOperand(allocation_top_addr));
4585 // Push the argument. 4584 // Push the argument.
4586 __ Str(argument, MemOperand(end_elements)); 4585 __ Str(argument, MemOperand(end_elements));
4587 // Fill the rest with holes. 4586 // Fill the rest with holes.
4588 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex); 4587 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
4589 ASSERT(kAllocationDelta == 4); 4588 ASSERT(kAllocationDelta == 4);
4590 __ Stp(x10, x10, MemOperand(end_elements, 1 * kPointerSize)); 4589 __ Stp(x10, x10, MemOperand(end_elements, 1 * kPointerSize));
4591 __ Stp(x10, x10, MemOperand(end_elements, 3 * kPointerSize)); 4590 __ Stp(x10, x10, MemOperand(end_elements, 3 * kPointerSize));
4592 4591
4593 // Update elements' and array's sizes. 4592 // Update elements' and array's sizes.
4594 __ Str(length, FieldMemOperand(receiver, JSArray::kLengthOffset)); 4593 __ Str(length, FieldMemOperand(receiver, JSArray::kLengthOffset));
4595 __ Add(elements_length, 4594 __ Add(elements_length, elements_length, Smi::FromInt(kAllocationDelta));
4596 elements_length,
4597 Operand(Smi::FromInt(kAllocationDelta)));
4598 __ Str(elements_length, 4595 __ Str(elements_length,
4599 FieldMemOperand(elements, FixedArray::kLengthOffset)); 4596 FieldMemOperand(elements, FixedArray::kLengthOffset));
4600 4597
4601 // Elements are in new space, so write barrier is not required. 4598 // Elements are in new space, so write barrier is not required.
4602 __ Drop(argc + 1); 4599 __ Drop(argc + 1);
4603 __ Mov(x0, length); 4600 __ Mov(x0, length);
4604 __ Ret(); 4601 __ Ret();
4605 4602
4606 __ Bind(&call_builtin); 4603 __ Bind(&call_builtin);
4607 __ TailCallExternalReference( 4604 __ TailCallExternalReference(
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after
4686 4683
4687 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) { 4684 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
4688 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_); 4685 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
4689 Register address = 4686 Register address =
4690 x0.Is(regs_.address()) ? regs_.scratch0() : regs_.address(); 4687 x0.Is(regs_.address()) ? regs_.scratch0() : regs_.address();
4691 ASSERT(!address.Is(regs_.object())); 4688 ASSERT(!address.Is(regs_.object()));
4692 ASSERT(!address.Is(x0)); 4689 ASSERT(!address.Is(x0));
4693 __ Mov(address, regs_.address()); 4690 __ Mov(address, regs_.address());
4694 __ Mov(x0, regs_.object()); 4691 __ Mov(x0, regs_.object());
4695 __ Mov(x1, address); 4692 __ Mov(x1, address);
4696 __ Mov(x2, Operand(ExternalReference::isolate_address(masm->isolate()))); 4693 __ Mov(x2, ExternalReference::isolate_address(masm->isolate()));
4697 4694
4698 AllowExternalCallThatCantCauseGC scope(masm); 4695 AllowExternalCallThatCantCauseGC scope(masm);
4699 ExternalReference function = 4696 ExternalReference function =
4700 ExternalReference::incremental_marking_record_write_function( 4697 ExternalReference::incremental_marking_record_write_function(
4701 masm->isolate()); 4698 masm->isolate());
4702 __ CallCFunction(function, 3, 0); 4699 __ CallCFunction(function, 3, 0);
4703 4700
4704 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); 4701 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
4705 } 4702 }
4706 4703
(...skipping 234 matching lines...) Expand 10 before | Expand all | Expand 10 after
4941 reinterpret_cast<uintptr_t>(masm->isolate()->function_entry_hook()); 4938 reinterpret_cast<uintptr_t>(masm->isolate()->function_entry_hook());
4942 __ Mov(x10, entry_hook); 4939 __ Mov(x10, entry_hook);
4943 #else 4940 #else
4944 // Under the simulator we need to indirect the entry hook through a trampoline 4941 // Under the simulator we need to indirect the entry hook through a trampoline
4945 // function at a known address. 4942 // function at a known address.
4946 ApiFunction dispatcher(FUNCTION_ADDR(EntryHookTrampoline)); 4943 ApiFunction dispatcher(FUNCTION_ADDR(EntryHookTrampoline));
4947 __ Mov(x10, Operand(ExternalReference(&dispatcher, 4944 __ Mov(x10, Operand(ExternalReference(&dispatcher,
4948 ExternalReference::BUILTIN_CALL, 4945 ExternalReference::BUILTIN_CALL,
4949 masm->isolate()))); 4946 masm->isolate())));
4950 // It additionally takes an isolate as a third parameter 4947 // It additionally takes an isolate as a third parameter
4951 __ Mov(x2, Operand(ExternalReference::isolate_address(masm->isolate()))); 4948 __ Mov(x2, ExternalReference::isolate_address(masm->isolate()));
4952 #endif 4949 #endif
4953 4950
4954 // The caller's return address is above the saved temporaries. 4951 // The caller's return address is above the saved temporaries.
4955 // Grab its location for the second argument to the hook. 4952 // Grab its location for the second argument to the hook.
4956 __ Add(x1, __ StackPointer(), kNumSavedRegs * kPointerSize); 4953 __ Add(x1, __ StackPointer(), kNumSavedRegs * kPointerSize);
4957 4954
4958 { 4955 {
4959 // Create a dummy frame, as CallCFunction requires this. 4956 // Create a dummy frame, as CallCFunction requires this.
4960 FrameScope frame(masm, StackFrame::MANUAL); 4957 FrameScope frame(masm, StackFrame::MANUAL);
4961 __ CallCFunction(x10, 2, 0); 4958 __ CallCFunction(x10, 2, 0);
(...skipping 366 matching lines...) Expand 10 before | Expand all | Expand 10 after
5328 &normal_sequence); 5325 &normal_sequence);
5329 __ Assert(eq, kExpectedAllocationSite); 5326 __ Assert(eq, kExpectedAllocationSite);
5330 } 5327 }
5331 5328
5332 // Save the resulting elements kind in type info. We can't just store 'kind' 5329 // Save the resulting elements kind in type info. We can't just store 'kind'
5333 // in the AllocationSite::transition_info field because elements kind is 5330 // in the AllocationSite::transition_info field because elements kind is
5334 // restricted to a portion of the field; upper bits need to be left alone. 5331 // restricted to a portion of the field; upper bits need to be left alone.
5335 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); 5332 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
5336 __ Ldr(x11, FieldMemOperand(allocation_site, 5333 __ Ldr(x11, FieldMemOperand(allocation_site,
5337 AllocationSite::kTransitionInfoOffset)); 5334 AllocationSite::kTransitionInfoOffset));
5338 __ Add(x11, x11, Operand(Smi::FromInt(kFastElementsKindPackedToHoley))); 5335 __ Add(x11, x11, Smi::FromInt(kFastElementsKindPackedToHoley));
5339 __ Str(x11, FieldMemOperand(allocation_site, 5336 __ Str(x11, FieldMemOperand(allocation_site,
5340 AllocationSite::kTransitionInfoOffset)); 5337 AllocationSite::kTransitionInfoOffset));
5341 5338
5342 __ Bind(&normal_sequence); 5339 __ Bind(&normal_sequence);
5343 int last_index = 5340 int last_index =
5344 GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND); 5341 GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
5345 for (int i = 0; i <= last_index; ++i) { 5342 for (int i = 0; i <= last_index; ++i) {
5346 Label next; 5343 Label next;
5347 ElementsKind candidate_kind = GetFastElementsKindFromSequenceIndex(i); 5344 ElementsKind candidate_kind = GetFastElementsKindFromSequenceIndex(i);
5348 __ CompareAndBranch(kind, candidate_kind, ne, &next); 5345 __ CompareAndBranch(kind, candidate_kind, ne, &next);
(...skipping 261 matching lines...) Expand 10 before | Expand all | Expand 10 after
5610 // FunctionCallbackArguments: context, callee and call data. 5607 // FunctionCallbackArguments: context, callee and call data.
5611 __ Push(context, callee, call_data); 5608 __ Push(context, callee, call_data);
5612 5609
5613 // Load context from callee 5610 // Load context from callee
5614 __ Ldr(context, FieldMemOperand(callee, JSFunction::kContextOffset)); 5611 __ Ldr(context, FieldMemOperand(callee, JSFunction::kContextOffset));
5615 5612
5616 if (!call_data_undefined) { 5613 if (!call_data_undefined) {
5617 __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex); 5614 __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
5618 } 5615 }
5619 Register isolate_reg = x5; 5616 Register isolate_reg = x5;
5620 __ Mov(isolate_reg, Operand(ExternalReference::isolate_address(isolate))); 5617 __ Mov(isolate_reg, ExternalReference::isolate_address(isolate));
5621 5618
5622 // FunctionCallbackArguments: 5619 // FunctionCallbackArguments:
5623 // return value, return value default, isolate, holder. 5620 // return value, return value default, isolate, holder.
5624 __ Push(call_data, call_data, isolate_reg, holder); 5621 __ Push(call_data, call_data, isolate_reg, holder);
5625 5622
5626 // Prepare arguments. 5623 // Prepare arguments.
5627 Register args = x6; 5624 Register args = x6;
5628 __ Mov(args, masm->StackPointer()); 5625 __ Mov(args, masm->StackPointer());
5629 5626
5630 // Allocate the v8::Arguments structure in the arguments' space, since it's 5627 // Allocate the v8::Arguments structure in the arguments' space, since it's
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
5723 MemOperand(fp, 6 * kPointerSize), 5720 MemOperand(fp, 6 * kPointerSize),
5724 NULL); 5721 NULL);
5725 } 5722 }
5726 5723
5727 5724
5728 #undef __ 5725 #undef __
5729 5726
5730 } } // namespace v8::internal 5727 } } // namespace v8::internal
5731 5728
5732 #endif // V8_TARGET_ARCH_A64 5729 #endif // V8_TARGET_ARCH_A64
OLDNEW
« no previous file with comments | « src/a64/builtins-a64.cc ('k') | src/a64/codegen-a64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698