Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(999)

Side by Side Diff: src/arm64/code-stubs-arm64.cc

Issue 1731253003: Revert of [Interpreter] Implements calls through CallICStub in the interpreter. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/arm64/builtins-arm64.cc ('k') | src/arm64/interface-descriptors-arm64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_ARM64 5 #if V8_TARGET_ARCH_ARM64
6 6
7 #include "src/bootstrapper.h" 7 #include "src/bootstrapper.h"
8 #include "src/code-stubs.h" 8 #include "src/code-stubs.h"
9 #include "src/codegen.h" 9 #include "src/codegen.h"
10 #include "src/ic/handler-compiler.h" 10 #include "src/ic/handler-compiler.h"
(...skipping 2271 matching lines...) Expand 10 before | Expand all | Expand 10 after
2282 __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag); 2282 __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag);
2283 __ Br(x4); 2283 __ Br(x4);
2284 2284
2285 __ Bind(&non_function); 2285 __ Bind(&non_function);
2286 __ Mov(x3, function); 2286 __ Mov(x3, function);
2287 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 2287 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2288 } 2288 }
2289 2289
2290 2290
2291 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { 2291 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
2292 // x0 - number of arguments
2293 // x1 - function 2292 // x1 - function
2294 // x3 - slot id 2293 // x3 - slot id
2295 // x2 - vector 2294 // x2 - vector
2296 // x4 - allocation site (loaded from vector[slot]) 2295 // x4 - allocation site (loaded from vector[slot])
2297 Register function = x1; 2296 Register function = x1;
2298 Register feedback_vector = x2; 2297 Register feedback_vector = x2;
2299 Register index = x3; 2298 Register index = x3;
2300 Register allocation_site = x4; 2299 Register allocation_site = x4;
2301 Register scratch = x5; 2300 Register scratch = x5;
2302 2301
2303 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, scratch); 2302 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, scratch);
2304 __ Cmp(function, scratch); 2303 __ Cmp(function, scratch);
2305 __ B(ne, miss); 2304 __ B(ne, miss);
2306 2305
2306 __ Mov(x0, Operand(arg_count()));
2307
2307 // Increment the call count for monomorphic function calls. 2308 // Increment the call count for monomorphic function calls.
2308 __ Add(feedback_vector, feedback_vector, 2309 __ Add(feedback_vector, feedback_vector,
2309 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); 2310 Operand::UntagSmiAndScale(index, kPointerSizeLog2));
2310 __ Add(feedback_vector, feedback_vector, 2311 __ Add(feedback_vector, feedback_vector,
2311 Operand(FixedArray::kHeaderSize + kPointerSize)); 2312 Operand(FixedArray::kHeaderSize + kPointerSize));
2312 __ Ldr(index, FieldMemOperand(feedback_vector, 0)); 2313 __ Ldr(index, FieldMemOperand(feedback_vector, 0));
2313 __ Add(index, index, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement))); 2314 __ Add(index, index, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
2314 __ Str(index, FieldMemOperand(feedback_vector, 0)); 2315 __ Str(index, FieldMemOperand(feedback_vector, 0));
2315 2316
2316 // Set up arguments for the array constructor stub. 2317 // Set up arguments for the array constructor stub.
2317 Register allocation_site_arg = feedback_vector; 2318 Register allocation_site_arg = feedback_vector;
2318 Register new_target_arg = index; 2319 Register new_target_arg = index;
2319 __ Mov(allocation_site_arg, allocation_site); 2320 __ Mov(allocation_site_arg, allocation_site);
2320 __ Mov(new_target_arg, function); 2321 __ Mov(new_target_arg, function);
2321 if (argc_in_register()) { 2322 ArrayConstructorStub stub(masm->isolate(), arg_count());
2322 // Pass a default ArgumentCountKey::Any since the argc is only available 2323 __ TailCallStub(&stub);
2323 // in r0. We do not have the actual count here.
2324 ArrayConstructorStub stub(masm->isolate());
2325 __ TailCallStub(&stub);
2326 } else {
2327 // arg_count() is expected in r0 if the arg_count() >= 2
2328 // (ArgumentCountKey::MORE_THAN_ONE).
2329 ArrayConstructorStub stub(masm->isolate(), arg_count());
2330 __ TailCallStub(&stub);
2331 }
2332 } 2324 }
2333 2325
2334 2326
2335 void CallICStub::Generate(MacroAssembler* masm) { 2327 void CallICStub::Generate(MacroAssembler* masm) {
2336 ASM_LOCATION("CallICStub"); 2328 ASM_LOCATION("CallICStub");
2337 2329
2338 // x0 - number of arguments if argc_in_register() is true
2339 // x1 - function 2330 // x1 - function
2340 // x3 - slot id (Smi) 2331 // x3 - slot id (Smi)
2341 // x2 - vector 2332 // x2 - vector
2342 Label extra_checks_or_miss, call, call_function; 2333 Label extra_checks_or_miss, call, call_function;
2343 if (!argc_in_register()) { 2334 int argc = arg_count();
2344 __ Mov(x0, arg_count()); 2335 ParameterCount actual(argc);
2345 }
2346 2336
2347 Register function = x1; 2337 Register function = x1;
2348 Register feedback_vector = x2; 2338 Register feedback_vector = x2;
2349 Register index = x3; 2339 Register index = x3;
2350 2340
2351 // The checks. First, does x1 match the recorded monomorphic target? 2341 // The checks. First, does x1 match the recorded monomorphic target?
2352 __ Add(x4, feedback_vector, 2342 __ Add(x4, feedback_vector,
2353 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); 2343 Operand::UntagSmiAndScale(index, kPointerSizeLog2));
2354 __ Ldr(x4, FieldMemOperand(x4, FixedArray::kHeaderSize)); 2344 __ Ldr(x4, FieldMemOperand(x4, FixedArray::kHeaderSize));
2355 2345
(...skipping 22 matching lines...) Expand all
2378 // Increment the call count for monomorphic function calls. 2368 // Increment the call count for monomorphic function calls.
2379 __ Add(feedback_vector, feedback_vector, 2369 __ Add(feedback_vector, feedback_vector,
2380 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); 2370 Operand::UntagSmiAndScale(index, kPointerSizeLog2));
2381 __ Add(feedback_vector, feedback_vector, 2371 __ Add(feedback_vector, feedback_vector,
2382 Operand(FixedArray::kHeaderSize + kPointerSize)); 2372 Operand(FixedArray::kHeaderSize + kPointerSize));
2383 __ Ldr(index, FieldMemOperand(feedback_vector, 0)); 2373 __ Ldr(index, FieldMemOperand(feedback_vector, 0));
2384 __ Add(index, index, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement))); 2374 __ Add(index, index, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
2385 __ Str(index, FieldMemOperand(feedback_vector, 0)); 2375 __ Str(index, FieldMemOperand(feedback_vector, 0));
2386 2376
2387 __ Bind(&call_function); 2377 __ Bind(&call_function);
2378 __ Mov(x0, argc);
2388 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), 2379 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
2389 tail_call_mode()), 2380 tail_call_mode()),
2390 RelocInfo::CODE_TARGET); 2381 RelocInfo::CODE_TARGET);
2391 2382
2392 __ bind(&extra_checks_or_miss); 2383 __ bind(&extra_checks_or_miss);
2393 Label uninitialized, miss, not_allocation_site; 2384 Label uninitialized, miss, not_allocation_site;
2394 2385
2395 __ JumpIfRoot(x4, Heap::kmegamorphic_symbolRootIndex, &call); 2386 __ JumpIfRoot(x4, Heap::kmegamorphic_symbolRootIndex, &call);
2396 2387
2397 __ Ldr(x5, FieldMemOperand(x4, HeapObject::kMapOffset)); 2388 __ Ldr(x5, FieldMemOperand(x4, HeapObject::kMapOffset));
(...skipping 14 matching lines...) Expand all
2412 // We are going megamorphic. If the feedback is a JSFunction, it is fine 2403 // We are going megamorphic. If the feedback is a JSFunction, it is fine
2413 // to handle it here. More complex cases are dealt with in the runtime. 2404 // to handle it here. More complex cases are dealt with in the runtime.
2414 __ AssertNotSmi(x4); 2405 __ AssertNotSmi(x4);
2415 __ JumpIfNotObjectType(x4, x5, x5, JS_FUNCTION_TYPE, &miss); 2406 __ JumpIfNotObjectType(x4, x5, x5, JS_FUNCTION_TYPE, &miss);
2416 __ Add(x4, feedback_vector, 2407 __ Add(x4, feedback_vector,
2417 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); 2408 Operand::UntagSmiAndScale(index, kPointerSizeLog2));
2418 __ LoadRoot(x5, Heap::kmegamorphic_symbolRootIndex); 2409 __ LoadRoot(x5, Heap::kmegamorphic_symbolRootIndex);
2419 __ Str(x5, FieldMemOperand(x4, FixedArray::kHeaderSize)); 2410 __ Str(x5, FieldMemOperand(x4, FixedArray::kHeaderSize));
2420 2411
2421 __ Bind(&call); 2412 __ Bind(&call);
2413 __ Mov(x0, argc);
2422 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), 2414 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
2423 RelocInfo::CODE_TARGET); 2415 RelocInfo::CODE_TARGET);
2424 2416
2425 __ bind(&uninitialized); 2417 __ bind(&uninitialized);
2426 2418
2427 // We are going monomorphic, provided we actually have a JSFunction. 2419 // We are going monomorphic, provided we actually have a JSFunction.
2428 __ JumpIfSmi(function, &miss); 2420 __ JumpIfSmi(function, &miss);
2429 2421
2430 // Goto miss case if we do not have a function. 2422 // Goto miss case if we do not have a function.
2431 __ JumpIfNotObjectType(function, x5, x5, JS_FUNCTION_TYPE, &miss); 2423 __ JumpIfNotObjectType(function, x5, x5, JS_FUNCTION_TYPE, &miss);
(...skipping 17 matching lines...) Expand all
2449 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); 2441 Operand::UntagSmiAndScale(index, kPointerSizeLog2));
2450 __ Str(x5, FieldMemOperand(x4, FixedArray::kHeaderSize + kPointerSize)); 2442 __ Str(x5, FieldMemOperand(x4, FixedArray::kHeaderSize + kPointerSize));
2451 2443
2452 // Store the function. Use a stub since we need a frame for allocation. 2444 // Store the function. Use a stub since we need a frame for allocation.
2453 // x2 - vector 2445 // x2 - vector
2454 // x3 - slot 2446 // x3 - slot
2455 // x1 - function 2447 // x1 - function
2456 { 2448 {
2457 FrameScope scope(masm, StackFrame::INTERNAL); 2449 FrameScope scope(masm, StackFrame::INTERNAL);
2458 CreateWeakCellStub create_stub(masm->isolate()); 2450 CreateWeakCellStub create_stub(masm->isolate());
2459 __ SmiTag(x0);
2460 __ Push(x0);
2461 __ Push(function); 2451 __ Push(function);
2462 __ CallStub(&create_stub); 2452 __ CallStub(&create_stub);
2463 __ Pop(function); 2453 __ Pop(function);
2464 __ Pop(x0);
2465 __ SmiUntag(x0);
2466 } 2454 }
2467 2455
2468 __ B(&call_function); 2456 __ B(&call_function);
2469 2457
2470 // We are here because tracing is on or we encountered a MISS case we can't 2458 // We are here because tracing is on or we encountered a MISS case we can't
2471 // handle here. 2459 // handle here.
2472 __ bind(&miss); 2460 __ bind(&miss);
2473 GenerateMiss(masm); 2461 GenerateMiss(masm);
2474 2462
2475 __ B(&call); 2463 __ B(&call);
2476 } 2464 }
2477 2465
2478 2466
2479 void CallICStub::GenerateMiss(MacroAssembler* masm) { 2467 void CallICStub::GenerateMiss(MacroAssembler* masm) {
2480 ASM_LOCATION("CallICStub[Miss]"); 2468 ASM_LOCATION("CallICStub[Miss]");
2481 2469
2482 FrameScope scope(masm, StackFrame::INTERNAL); 2470 FrameScope scope(masm, StackFrame::INTERNAL);
2483 2471
2484 // Store number of arguments.
2485 __ SmiTag(x0);
2486 __ Push(x0);
2487
2488 // Push the receiver and the function and feedback info. 2472 // Push the receiver and the function and feedback info.
2489 __ Push(x1, x2, x3); 2473 __ Push(x1, x2, x3);
2490 2474
2491 // Call the entry. 2475 // Call the entry.
2492 __ CallRuntime(Runtime::kCallIC_Miss); 2476 __ CallRuntime(Runtime::kCallIC_Miss);
2493 2477
2494 // Move result to edi and exit the internal frame. 2478 // Move result to edi and exit the internal frame.
2495 __ Mov(x1, x0); 2479 __ Mov(x1, x0);
2496
2497 // Restore number of arguments.
2498 __ Pop(x0);
2499 __ SmiUntag(x0);
2500 } 2480 }
2501 2481
2502 2482
2503 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { 2483 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
2504 // If the receiver is a smi trigger the non-string case. 2484 // If the receiver is a smi trigger the non-string case.
2505 if (check_mode_ == RECEIVER_IS_UNKNOWN) { 2485 if (check_mode_ == RECEIVER_IS_UNKNOWN) {
2506 __ JumpIfSmi(object_, receiver_not_string_); 2486 __ JumpIfSmi(object_, receiver_not_string_);
2507 2487
2508 // Fetch the instance type of the receiver into result register. 2488 // Fetch the instance type of the receiver into result register.
2509 __ Ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); 2489 __ Ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
(...skipping 3505 matching lines...) Expand 10 before | Expand all | Expand 10 after
6015 return_value_operand, NULL); 5995 return_value_operand, NULL);
6016 } 5996 }
6017 5997
6018 5998
6019 #undef __ 5999 #undef __
6020 6000
6021 } // namespace internal 6001 } // namespace internal
6022 } // namespace v8 6002 } // namespace v8
6023 6003
6024 #endif // V8_TARGET_ARCH_ARM64 6004 #endif // V8_TARGET_ARCH_ARM64
OLDNEW
« no previous file with comments | « src/arm64/builtins-arm64.cc ('k') | src/arm64/interface-descriptors-arm64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698