| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 205 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 216 // Possibly allocate a local context. | 216 // Possibly allocate a local context. |
| 217 int heap_slots = info_->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; | 217 int heap_slots = info_->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; |
| 218 if (heap_slots > 0) { | 218 if (heap_slots > 0) { |
| 219 Comment(";;; Allocate local context"); | 219 Comment(";;; Allocate local context"); |
| 220 // Argument to NewContext is the function, which is still in rdi. | 220 // Argument to NewContext is the function, which is still in rdi. |
| 221 if (heap_slots <= FastNewContextStub::kMaximumSlots) { | 221 if (heap_slots <= FastNewContextStub::kMaximumSlots) { |
| 222 FastNewContextStub stub(heap_slots); | 222 FastNewContextStub stub(heap_slots); |
| 223 __ CallStub(&stub); | 223 __ CallStub(&stub); |
| 224 } else { | 224 } else { |
| 225 __ Push(rdi); | 225 __ Push(rdi); |
| 226 __ CallRuntime(Runtime::kNewFunctionContext, 1); | 226 __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1); |
| 227 } | 227 } |
| 228 RecordSafepoint(Safepoint::kNoLazyDeopt); | 228 RecordSafepoint(Safepoint::kNoLazyDeopt); |
| 229 // Context is returned in rax. It replaces the context passed to us. | 229 // Context is returned in rax. It replaces the context passed to us. |
| 230 // It's saved in the stack and kept live in rsi. | 230 // It's saved in the stack and kept live in rsi. |
| 231 __ movp(rsi, rax); | 231 __ movp(rsi, rax); |
| 232 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rax); | 232 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rax); |
| 233 | 233 |
| 234 // Copy any necessary parameters into the context. | 234 // Copy any necessary parameters into the context. |
| 235 int num_parameters = scope()->num_parameters(); | 235 int num_parameters = scope()->num_parameters(); |
| 236 for (int i = 0; i < num_parameters; i++) { | 236 for (int i = 0; i < num_parameters; i++) { |
| (...skipping 3104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3341 ASSERT(result.is(rsi)); | 3341 ASSERT(result.is(rsi)); |
| 3342 } | 3342 } |
| 3343 } | 3343 } |
| 3344 | 3344 |
| 3345 | 3345 |
| 3346 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) { | 3346 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) { |
| 3347 ASSERT(ToRegister(instr->context()).is(rsi)); | 3347 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 3348 __ Push(rsi); // The context is the first argument. | 3348 __ Push(rsi); // The context is the first argument. |
| 3349 __ Push(instr->hydrogen()->pairs()); | 3349 __ Push(instr->hydrogen()->pairs()); |
| 3350 __ Push(Smi::FromInt(instr->hydrogen()->flags())); | 3350 __ Push(Smi::FromInt(instr->hydrogen()->flags())); |
| 3351 CallRuntime(Runtime::kDeclareGlobals, 3, instr); | 3351 CallRuntime(Runtime::kHiddenDeclareGlobals, 3, instr); |
| 3352 } | 3352 } |
| 3353 | 3353 |
| 3354 | 3354 |
| 3355 void LCodeGen::CallKnownFunction(Handle<JSFunction> function, | 3355 void LCodeGen::CallKnownFunction(Handle<JSFunction> function, |
| 3356 int formal_parameter_count, | 3356 int formal_parameter_count, |
| 3357 int arity, | 3357 int arity, |
| 3358 LInstruction* instr, | 3358 LInstruction* instr, |
| 3359 RDIState rdi_state) { | 3359 RDIState rdi_state) { |
| 3360 bool dont_adapt_arguments = | 3360 bool dont_adapt_arguments = |
| 3361 formal_parameter_count == SharedFunctionInfo::kDontAdaptArgumentsSentinel; | 3361 formal_parameter_count == SharedFunctionInfo::kDontAdaptArgumentsSentinel; |
| (...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3473 // unchanged by popping safepoint registers. | 3473 // unchanged by popping safepoint registers. |
| 3474 __ testl(tmp, Immediate(HeapNumber::kSignMask)); | 3474 __ testl(tmp, Immediate(HeapNumber::kSignMask)); |
| 3475 __ j(zero, &done); | 3475 __ j(zero, &done); |
| 3476 | 3476 |
| 3477 __ AllocateHeapNumber(tmp, tmp2, &slow); | 3477 __ AllocateHeapNumber(tmp, tmp2, &slow); |
| 3478 __ jmp(&allocated, Label::kNear); | 3478 __ jmp(&allocated, Label::kNear); |
| 3479 | 3479 |
| 3480 // Slow case: Call the runtime system to do the number allocation. | 3480 // Slow case: Call the runtime system to do the number allocation. |
| 3481 __ bind(&slow); | 3481 __ bind(&slow); |
| 3482 CallRuntimeFromDeferred( | 3482 CallRuntimeFromDeferred( |
| 3483 Runtime::kAllocateHeapNumber, 0, instr, instr->context()); | 3483 Runtime::kHiddenAllocateHeapNumber, 0, instr, instr->context()); |
| 3484 // Set the pointer to the new heap number in tmp. | 3484 // Set the pointer to the new heap number in tmp. |
| 3485 if (!tmp.is(rax)) __ movp(tmp, rax); | 3485 if (!tmp.is(rax)) __ movp(tmp, rax); |
| 3486 // Restore input_reg after call to runtime. | 3486 // Restore input_reg after call to runtime. |
| 3487 __ LoadFromSafepointRegisterSlot(input_reg, input_reg); | 3487 __ LoadFromSafepointRegisterSlot(input_reg, input_reg); |
| 3488 | 3488 |
| 3489 __ bind(&allocated); | 3489 __ bind(&allocated); |
| 3490 __ movq(tmp2, FieldOperand(input_reg, HeapNumber::kValueOffset)); | 3490 __ movq(tmp2, FieldOperand(input_reg, HeapNumber::kValueOffset)); |
| 3491 __ shl(tmp2, Immediate(1)); | 3491 __ shl(tmp2, Immediate(1)); |
| 3492 __ shr(tmp2, Immediate(1)); | 3492 __ shr(tmp2, Immediate(1)); |
| 3493 __ movq(FieldOperand(tmp, HeapNumber::kValueOffset), tmp2); | 3493 __ movq(FieldOperand(tmp, HeapNumber::kValueOffset), tmp2); |
| (...skipping 1106 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4600 // Put a valid pointer value in the stack slot where the result | 4600 // Put a valid pointer value in the stack slot where the result |
| 4601 // register is stored, as this register is in the pointer map, but contains | 4601 // register is stored, as this register is in the pointer map, but contains |
| 4602 // an integer value. | 4602 // an integer value. |
| 4603 __ Set(reg, 0); | 4603 __ Set(reg, 0); |
| 4604 | 4604 |
| 4605 // Preserve the value of all registers. | 4605 // Preserve the value of all registers. |
| 4606 PushSafepointRegistersScope scope(this); | 4606 PushSafepointRegistersScope scope(this); |
| 4607 | 4607 |
| 4608 // NumberTagU uses the context from the frame, rather than | 4608 // NumberTagU uses the context from the frame, rather than |
| 4609 // the environment's HContext or HInlinedContext value. | 4609 // the environment's HContext or HInlinedContext value. |
| 4610 // They only call Runtime::kAllocateHeapNumber. | 4610 // They only call Runtime::kHiddenAllocateHeapNumber. |
| 4611 // The corresponding HChange instructions are added in a phase that does | 4611 // The corresponding HChange instructions are added in a phase that does |
| 4612 // not have easy access to the local context. | 4612 // not have easy access to the local context. |
| 4613 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 4613 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 4614 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); | 4614 __ CallRuntimeSaveDoubles(Runtime::kHiddenAllocateHeapNumber); |
| 4615 RecordSafepointWithRegisters( | 4615 RecordSafepointWithRegisters( |
| 4616 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); | 4616 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); |
| 4617 __ StoreToSafepointRegisterSlot(reg, rax); | 4617 __ StoreToSafepointRegisterSlot(reg, rax); |
| 4618 } | 4618 } |
| 4619 | 4619 |
| 4620 // Done. Put the value in temp_xmm into the value of the allocated heap | 4620 // Done. Put the value in temp_xmm into the value of the allocated heap |
| 4621 // number. | 4621 // number. |
| 4622 __ bind(&done); | 4622 __ bind(&done); |
| 4623 __ movsd(FieldOperand(reg, HeapNumber::kValueOffset), temp_xmm); | 4623 __ movsd(FieldOperand(reg, HeapNumber::kValueOffset), temp_xmm); |
| 4624 } | 4624 } |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4656 // TODO(3095996): Get rid of this. For now, we need to make the | 4656 // TODO(3095996): Get rid of this. For now, we need to make the |
| 4657 // result register contain a valid pointer because it is already | 4657 // result register contain a valid pointer because it is already |
| 4658 // contained in the register pointer map. | 4658 // contained in the register pointer map. |
| 4659 Register reg = ToRegister(instr->result()); | 4659 Register reg = ToRegister(instr->result()); |
| 4660 __ Move(reg, Smi::FromInt(0)); | 4660 __ Move(reg, Smi::FromInt(0)); |
| 4661 | 4661 |
| 4662 { | 4662 { |
| 4663 PushSafepointRegistersScope scope(this); | 4663 PushSafepointRegistersScope scope(this); |
| 4664 // NumberTagD uses the context from the frame, rather than | 4664 // NumberTagD uses the context from the frame, rather than |
| 4665 // the environment's HContext or HInlinedContext value. | 4665 // the environment's HContext or HInlinedContext value. |
| 4666 // They only call Runtime::kAllocateHeapNumber. | 4666 // They only call Runtime::kHiddenAllocateHeapNumber. |
| 4667 // The corresponding HChange instructions are added in a phase that does | 4667 // The corresponding HChange instructions are added in a phase that does |
| 4668 // not have easy access to the local context. | 4668 // not have easy access to the local context. |
| 4669 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 4669 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 4670 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); | 4670 __ CallRuntimeSaveDoubles(Runtime::kHiddenAllocateHeapNumber); |
| 4671 RecordSafepointWithRegisters( | 4671 RecordSafepointWithRegisters( |
| 4672 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); | 4672 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); |
| 4673 __ movp(kScratchRegister, rax); | 4673 __ movp(kScratchRegister, rax); |
| 4674 } | 4674 } |
| 4675 __ movp(reg, kScratchRegister); | 4675 __ movp(reg, kScratchRegister); |
| 4676 } | 4676 } |
| 4677 | 4677 |
| 4678 | 4678 |
| 4679 void LCodeGen::DoSmiTag(LSmiTag* instr) { | 4679 void LCodeGen::DoSmiTag(LSmiTag* instr) { |
| 4680 HChange* hchange = instr->hydrogen(); | 4680 HChange* hchange = instr->hydrogen(); |
| (...skipping 531 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5212 flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE); | 5212 flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE); |
| 5213 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { | 5213 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { |
| 5214 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); | 5214 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); |
| 5215 flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE); | 5215 flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE); |
| 5216 } else { | 5216 } else { |
| 5217 flags = AllocateTargetSpace::update(flags, NEW_SPACE); | 5217 flags = AllocateTargetSpace::update(flags, NEW_SPACE); |
| 5218 } | 5218 } |
| 5219 __ Push(Smi::FromInt(flags)); | 5219 __ Push(Smi::FromInt(flags)); |
| 5220 | 5220 |
| 5221 CallRuntimeFromDeferred( | 5221 CallRuntimeFromDeferred( |
| 5222 Runtime::kAllocateInTargetSpace, 2, instr, instr->context()); | 5222 Runtime::kHiddenAllocateInTargetSpace, 2, instr, instr->context()); |
| 5223 __ StoreToSafepointRegisterSlot(result, rax); | 5223 __ StoreToSafepointRegisterSlot(result, rax); |
| 5224 } | 5224 } |
| 5225 | 5225 |
| 5226 | 5226 |
| 5227 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { | 5227 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { |
| 5228 ASSERT(ToRegister(instr->value()).is(rax)); | 5228 ASSERT(ToRegister(instr->value()).is(rax)); |
| 5229 __ Push(rax); | 5229 __ Push(rax); |
| 5230 CallRuntime(Runtime::kToFastProperties, 1, instr); | 5230 CallRuntime(Runtime::kToFastProperties, 1, instr); |
| 5231 } | 5231 } |
| 5232 | 5232 |
| (...skipping 11 matching lines...) Expand all Loading... |
| 5244 __ movp(rbx, FieldOperand(rcx, literal_offset)); | 5244 __ movp(rbx, FieldOperand(rcx, literal_offset)); |
| 5245 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); | 5245 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); |
| 5246 __ j(not_equal, &materialized, Label::kNear); | 5246 __ j(not_equal, &materialized, Label::kNear); |
| 5247 | 5247 |
| 5248 // Create regexp literal using runtime function | 5248 // Create regexp literal using runtime function |
| 5249 // Result will be in rax. | 5249 // Result will be in rax. |
| 5250 __ Push(rcx); | 5250 __ Push(rcx); |
| 5251 __ Push(Smi::FromInt(instr->hydrogen()->literal_index())); | 5251 __ Push(Smi::FromInt(instr->hydrogen()->literal_index())); |
| 5252 __ Push(instr->hydrogen()->pattern()); | 5252 __ Push(instr->hydrogen()->pattern()); |
| 5253 __ Push(instr->hydrogen()->flags()); | 5253 __ Push(instr->hydrogen()->flags()); |
| 5254 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr); | 5254 CallRuntime(Runtime::kHiddenMaterializeRegExpLiteral, 4, instr); |
| 5255 __ movp(rbx, rax); | 5255 __ movp(rbx, rax); |
| 5256 | 5256 |
| 5257 __ bind(&materialized); | 5257 __ bind(&materialized); |
| 5258 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; | 5258 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; |
| 5259 Label allocated, runtime_allocate; | 5259 Label allocated, runtime_allocate; |
| 5260 __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT); | 5260 __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT); |
| 5261 __ jmp(&allocated, Label::kNear); | 5261 __ jmp(&allocated, Label::kNear); |
| 5262 | 5262 |
| 5263 __ bind(&runtime_allocate); | 5263 __ bind(&runtime_allocate); |
| 5264 __ Push(rbx); | 5264 __ Push(rbx); |
| 5265 __ Push(Smi::FromInt(size)); | 5265 __ Push(Smi::FromInt(size)); |
| 5266 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); | 5266 CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1, instr); |
| 5267 __ Pop(rbx); | 5267 __ Pop(rbx); |
| 5268 | 5268 |
| 5269 __ bind(&allocated); | 5269 __ bind(&allocated); |
| 5270 // Copy the content into the newly allocated memory. | 5270 // Copy the content into the newly allocated memory. |
| 5271 // (Unroll copy loop once for better throughput). | 5271 // (Unroll copy loop once for better throughput). |
| 5272 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) { | 5272 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) { |
| 5273 __ movp(rdx, FieldOperand(rbx, i)); | 5273 __ movp(rdx, FieldOperand(rbx, i)); |
| 5274 __ movp(rcx, FieldOperand(rbx, i + kPointerSize)); | 5274 __ movp(rcx, FieldOperand(rbx, i + kPointerSize)); |
| 5275 __ movp(FieldOperand(rax, i), rdx); | 5275 __ movp(FieldOperand(rax, i), rdx); |
| 5276 __ movp(FieldOperand(rax, i + kPointerSize), rcx); | 5276 __ movp(FieldOperand(rax, i + kPointerSize), rcx); |
| (...skipping 13 matching lines...) Expand all Loading... |
| 5290 if (!pretenure && instr->hydrogen()->has_no_literals()) { | 5290 if (!pretenure && instr->hydrogen()->has_no_literals()) { |
| 5291 FastNewClosureStub stub(instr->hydrogen()->strict_mode(), | 5291 FastNewClosureStub stub(instr->hydrogen()->strict_mode(), |
| 5292 instr->hydrogen()->is_generator()); | 5292 instr->hydrogen()->is_generator()); |
| 5293 __ Move(rbx, instr->hydrogen()->shared_info()); | 5293 __ Move(rbx, instr->hydrogen()->shared_info()); |
| 5294 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 5294 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 5295 } else { | 5295 } else { |
| 5296 __ Push(rsi); | 5296 __ Push(rsi); |
| 5297 __ Push(instr->hydrogen()->shared_info()); | 5297 __ Push(instr->hydrogen()->shared_info()); |
| 5298 __ PushRoot(pretenure ? Heap::kTrueValueRootIndex : | 5298 __ PushRoot(pretenure ? Heap::kTrueValueRootIndex : |
| 5299 Heap::kFalseValueRootIndex); | 5299 Heap::kFalseValueRootIndex); |
| 5300 CallRuntime(Runtime::kNewClosure, 3, instr); | 5300 CallRuntime(Runtime::kHiddenNewClosure, 3, instr); |
| 5301 } | 5301 } |
| 5302 } | 5302 } |
| 5303 | 5303 |
| 5304 | 5304 |
| 5305 void LCodeGen::DoTypeof(LTypeof* instr) { | 5305 void LCodeGen::DoTypeof(LTypeof* instr) { |
| 5306 ASSERT(ToRegister(instr->context()).is(rsi)); | 5306 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 5307 LOperand* input = instr->value(); | 5307 LOperand* input = instr->value(); |
| 5308 EmitPushTaggedOperand(input); | 5308 EmitPushTaggedOperand(input); |
| 5309 CallRuntime(Runtime::kTypeof, 1, instr); | 5309 CallRuntime(Runtime::kTypeof, 1, instr); |
| 5310 } | 5310 } |
| (...skipping 174 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5485 | 5485 |
| 5486 | 5486 |
| 5487 void LCodeGen::DoDummyUse(LDummyUse* instr) { | 5487 void LCodeGen::DoDummyUse(LDummyUse* instr) { |
| 5488 // Nothing to see here, move on! | 5488 // Nothing to see here, move on! |
| 5489 } | 5489 } |
| 5490 | 5490 |
| 5491 | 5491 |
| 5492 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { | 5492 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { |
| 5493 PushSafepointRegistersScope scope(this); | 5493 PushSafepointRegistersScope scope(this); |
| 5494 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 5494 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 5495 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); | 5495 __ CallRuntimeSaveDoubles(Runtime::kHiddenStackGuard); |
| 5496 RecordSafepointWithLazyDeopt(instr, RECORD_SAFEPOINT_WITH_REGISTERS, 0); | 5496 RecordSafepointWithLazyDeopt(instr, RECORD_SAFEPOINT_WITH_REGISTERS, 0); |
| 5497 ASSERT(instr->HasEnvironment()); | 5497 ASSERT(instr->HasEnvironment()); |
| 5498 LEnvironment* env = instr->environment(); | 5498 LEnvironment* env = instr->environment(); |
| 5499 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5499 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 5500 } | 5500 } |
| 5501 | 5501 |
| 5502 | 5502 |
| 5503 void LCodeGen::DoStackCheck(LStackCheck* instr) { | 5503 void LCodeGen::DoStackCheck(LStackCheck* instr) { |
| 5504 class DeferredStackCheck V8_FINAL : public LDeferredCode { | 5504 class DeferredStackCheck V8_FINAL : public LDeferredCode { |
| 5505 public: | 5505 public: |
| (...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5650 FixedArray::kHeaderSize - kPointerSize)); | 5650 FixedArray::kHeaderSize - kPointerSize)); |
| 5651 __ bind(&done); | 5651 __ bind(&done); |
| 5652 } | 5652 } |
| 5653 | 5653 |
| 5654 | 5654 |
| 5655 #undef __ | 5655 #undef __ |
| 5656 | 5656 |
| 5657 } } // namespace v8::internal | 5657 } } // namespace v8::internal |
| 5658 | 5658 |
| 5659 #endif // V8_TARGET_ARCH_X64 | 5659 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |