OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <assert.h> // For assert | 5 #include <assert.h> // For assert |
6 #include <limits.h> // For LONG_MIN, LONG_MAX. | 6 #include <limits.h> // For LONG_MIN, LONG_MAX. |
7 | 7 |
8 #if V8_TARGET_ARCH_PPC | 8 #if V8_TARGET_ARCH_PPC |
9 | 9 |
10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
(...skipping 1284 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1295 | 1295 |
1296 // Load current lexical context from the stack frame. | 1296 // Load current lexical context from the stack frame. |
1297 LoadP(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 1297 LoadP(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
1298 // In debug mode, make sure the lexical context is set. | 1298 // In debug mode, make sure the lexical context is set. |
1299 #ifdef DEBUG | 1299 #ifdef DEBUG |
1300 cmpi(scratch, Operand::Zero()); | 1300 cmpi(scratch, Operand::Zero()); |
1301 Check(ne, kWeShouldNotHaveAnEmptyLexicalContext); | 1301 Check(ne, kWeShouldNotHaveAnEmptyLexicalContext); |
1302 #endif | 1302 #endif |
1303 | 1303 |
1304 // Load the native context of the current context. | 1304 // Load the native context of the current context. |
1305 int offset = | 1305 LoadP(scratch, ContextMemOperand(scratch, Context::NATIVE_CONTEXT_INDEX)); |
1306 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize; | |
1307 LoadP(scratch, FieldMemOperand(scratch, offset)); | |
1308 LoadP(scratch, | |
1309 FieldMemOperand(scratch, JSGlobalObject::kNativeContextOffset)); | |
1310 | 1306 |
1311 // Check the context is a native context. | 1307 // Check the context is a native context. |
1312 if (emit_debug_code()) { | 1308 if (emit_debug_code()) { |
1313 // Cannot use ip as a temporary in this verification code. Due to the fact | 1309 // Cannot use ip as a temporary in this verification code. Due to the fact |
1314 // that ip is clobbered as part of cmp with an object Operand. | 1310 // that ip is clobbered as part of cmp with an object Operand. |
1315 push(holder_reg); // Temporarily save holder on the stack. | 1311 push(holder_reg); // Temporarily save holder on the stack. |
1316 // Read the first word and compare to the native_context_map. | 1312 // Read the first word and compare to the native_context_map. |
1317 LoadP(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset)); | 1313 LoadP(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset)); |
1318 LoadRoot(ip, Heap::kNativeContextMapRootIndex); | 1314 LoadRoot(ip, Heap::kNativeContextMapRootIndex); |
1319 cmp(holder_reg, ip); | 1315 cmp(holder_reg, ip); |
(...skipping 1020 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2340 CEntryStub stub(isolate(), 1); | 2336 CEntryStub stub(isolate(), 1); |
2341 Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 2337 Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
2342 } | 2338 } |
2343 | 2339 |
2344 | 2340 |
2345 void MacroAssembler::InvokeBuiltin(int native_context_index, InvokeFlag flag, | 2341 void MacroAssembler::InvokeBuiltin(int native_context_index, InvokeFlag flag, |
2346 const CallWrapper& call_wrapper) { | 2342 const CallWrapper& call_wrapper) { |
2347 // You can't call a builtin without a valid frame. | 2343 // You can't call a builtin without a valid frame. |
2348 DCHECK(flag == JUMP_FUNCTION || has_frame()); | 2344 DCHECK(flag == JUMP_FUNCTION || has_frame()); |
2349 | 2345 |
2350 GetBuiltinEntry(ip, native_context_index); | 2346 LoadNativeContextSlot(native_context_index, r4); |
| 2347 LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset)); |
2351 if (flag == CALL_FUNCTION) { | 2348 if (flag == CALL_FUNCTION) { |
2352 call_wrapper.BeforeCall(CallSize(ip)); | 2349 call_wrapper.BeforeCall(CallSize(ip)); |
2353 CallJSEntry(ip); | 2350 CallJSEntry(ip); |
2354 call_wrapper.AfterCall(); | 2351 call_wrapper.AfterCall(); |
2355 } else { | 2352 } else { |
2356 DCHECK(flag == JUMP_FUNCTION); | 2353 DCHECK(flag == JUMP_FUNCTION); |
2357 JumpToJSEntry(ip); | 2354 JumpToJSEntry(ip); |
2358 } | 2355 } |
2359 } | 2356 } |
2360 | 2357 |
2361 | 2358 |
2362 void MacroAssembler::GetBuiltinFunction(Register target, | |
2363 int native_context_index) { | |
2364 // Load the builtins object into target register. | |
2365 LoadP(target, | |
2366 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | |
2367 LoadP(target, FieldMemOperand(target, JSGlobalObject::kNativeContextOffset)); | |
2368 // Load the JavaScript builtin function from the builtins object. | |
2369 LoadP(target, ContextOperand(target, native_context_index), r0); | |
2370 } | |
2371 | |
2372 | |
2373 void MacroAssembler::GetBuiltinEntry(Register target, | |
2374 int native_context_index) { | |
2375 DCHECK(!target.is(r4)); | |
2376 GetBuiltinFunction(r4, native_context_index); | |
2377 // Load the code entry point from the builtins object. | |
2378 LoadP(target, FieldMemOperand(r4, JSFunction::kCodeEntryOffset)); | |
2379 } | |
2380 | |
2381 | |
2382 void MacroAssembler::SetCounter(StatsCounter* counter, int value, | 2359 void MacroAssembler::SetCounter(StatsCounter* counter, int value, |
2383 Register scratch1, Register scratch2) { | 2360 Register scratch1, Register scratch2) { |
2384 if (FLAG_native_code_counters && counter->Enabled()) { | 2361 if (FLAG_native_code_counters && counter->Enabled()) { |
2385 mov(scratch1, Operand(value)); | 2362 mov(scratch1, Operand(value)); |
2386 mov(scratch2, Operand(ExternalReference(counter))); | 2363 mov(scratch2, Operand(ExternalReference(counter))); |
2387 stw(scratch1, MemOperand(scratch2)); | 2364 stw(scratch1, MemOperand(scratch2)); |
2388 } | 2365 } |
2389 } | 2366 } |
2390 | 2367 |
2391 | 2368 |
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2490 } | 2467 } |
2491 } else { | 2468 } else { |
2492 // Slot is in the current function context. Move it into the | 2469 // Slot is in the current function context. Move it into the |
2493 // destination register in case we store into it (the write barrier | 2470 // destination register in case we store into it (the write barrier |
2494 // cannot be allowed to destroy the context in esi). | 2471 // cannot be allowed to destroy the context in esi). |
2495 mr(dst, cp); | 2472 mr(dst, cp); |
2496 } | 2473 } |
2497 } | 2474 } |
2498 | 2475 |
2499 | 2476 |
2500 void MacroAssembler::LoadGlobalProxy(Register dst) { | |
2501 LoadP(dst, GlobalObjectOperand()); | |
2502 LoadP(dst, FieldMemOperand(dst, JSGlobalObject::kGlobalProxyOffset)); | |
2503 } | |
2504 | |
2505 | |
2506 void MacroAssembler::LoadTransitionedArrayMapConditional( | 2477 void MacroAssembler::LoadTransitionedArrayMapConditional( |
2507 ElementsKind expected_kind, ElementsKind transitioned_kind, | 2478 ElementsKind expected_kind, ElementsKind transitioned_kind, |
2508 Register map_in_out, Register scratch, Label* no_map_match) { | 2479 Register map_in_out, Register scratch, Label* no_map_match) { |
2509 // Load the global or builtins object from the current context. | |
2510 LoadP(scratch, | |
2511 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | |
2512 LoadP(scratch, | |
2513 FieldMemOperand(scratch, JSGlobalObject::kNativeContextOffset)); | |
2514 | |
2515 // Check that the function's map is the same as the expected cached map. | 2480 // Check that the function's map is the same as the expected cached map. |
2516 LoadP(scratch, | 2481 LoadNativeContextSlot(Context::JS_ARRAY_MAPS_INDEX, scratch); |
2517 MemOperand(scratch, Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX))); | |
2518 size_t offset = expected_kind * kPointerSize + FixedArrayBase::kHeaderSize; | 2482 size_t offset = expected_kind * kPointerSize + FixedArrayBase::kHeaderSize; |
2519 LoadP(ip, FieldMemOperand(scratch, offset)); | 2483 LoadP(ip, FieldMemOperand(scratch, offset)); |
2520 cmp(map_in_out, ip); | 2484 cmp(map_in_out, ip); |
2521 bne(no_map_match); | 2485 bne(no_map_match); |
2522 | 2486 |
2523 // Use the transitioned cached map. | 2487 // Use the transitioned cached map. |
2524 offset = transitioned_kind * kPointerSize + FixedArrayBase::kHeaderSize; | 2488 offset = transitioned_kind * kPointerSize + FixedArrayBase::kHeaderSize; |
2525 LoadP(map_in_out, FieldMemOperand(scratch, offset)); | 2489 LoadP(map_in_out, FieldMemOperand(scratch, offset)); |
2526 } | 2490 } |
2527 | 2491 |
2528 | 2492 |
2529 void MacroAssembler::LoadGlobalFunction(int index, Register function) { | 2493 void MacroAssembler::LoadNativeContextSlot(int index, Register dst) { |
2530 // Load the global or builtins object from the current context. | 2494 LoadP(dst, NativeContextMemOperand()); |
2531 LoadP(function, | 2495 LoadP(dst, ContextMemOperand(dst, index)); |
2532 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | |
2533 // Load the native context from the global or builtins object. | |
2534 LoadP(function, | |
2535 FieldMemOperand(function, JSGlobalObject::kNativeContextOffset)); | |
2536 // Load the function from the native context. | |
2537 LoadP(function, MemOperand(function, Context::SlotOffset(index)), r0); | |
2538 } | 2496 } |
2539 | 2497 |
2540 | 2498 |
2541 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, | 2499 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, |
2542 Register map, | 2500 Register map, |
2543 Register scratch) { | 2501 Register scratch) { |
2544 // Load the initial map. The global functions all have initial maps. | 2502 // Load the initial map. The global functions all have initial maps. |
2545 LoadP(map, | 2503 LoadP(map, |
2546 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); | 2504 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); |
2547 if (emit_debug_code()) { | 2505 if (emit_debug_code()) { |
(...skipping 1903 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4451 } | 4409 } |
4452 if (mag.shift > 0) srawi(result, result, mag.shift); | 4410 if (mag.shift > 0) srawi(result, result, mag.shift); |
4453 ExtractBit(r0, dividend, 31); | 4411 ExtractBit(r0, dividend, 31); |
4454 add(result, result, r0); | 4412 add(result, result, r0); |
4455 } | 4413 } |
4456 | 4414 |
4457 } // namespace internal | 4415 } // namespace internal |
4458 } // namespace v8 | 4416 } // namespace v8 |
4459 | 4417 |
4460 #endif // V8_TARGET_ARCH_PPC | 4418 #endif // V8_TARGET_ARCH_PPC |
OLD | NEW |