OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "v8.h" | 5 #include "v8.h" |
6 | 6 |
7 #include "arm64/lithium-codegen-arm64.h" | 7 #include "arm64/lithium-codegen-arm64.h" |
8 #include "arm64/lithium-gap-resolver-arm64.h" | 8 #include "arm64/lithium-gap-resolver-arm64.h" |
9 #include "code-stubs.h" | 9 #include "code-stubs.h" |
10 #include "stub-cache.h" | 10 #include "stub-cache.h" |
(...skipping 391 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
402 | 402 |
403 | 403 |
404 void LCodeGen::DoCallFunction(LCallFunction* instr) { | 404 void LCodeGen::DoCallFunction(LCallFunction* instr) { |
405 ASSERT(ToRegister(instr->context()).is(cp)); | 405 ASSERT(ToRegister(instr->context()).is(cp)); |
406 ASSERT(ToRegister(instr->function()).Is(x1)); | 406 ASSERT(ToRegister(instr->function()).Is(x1)); |
407 ASSERT(ToRegister(instr->result()).Is(x0)); | 407 ASSERT(ToRegister(instr->result()).Is(x0)); |
408 | 408 |
409 int arity = instr->arity(); | 409 int arity = instr->arity(); |
410 CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags()); | 410 CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags()); |
411 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 411 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 412 after_push_argument_ = false; |
412 } | 413 } |
413 | 414 |
414 | 415 |
415 void LCodeGen::DoCallNew(LCallNew* instr) { | 416 void LCodeGen::DoCallNew(LCallNew* instr) { |
416 ASSERT(ToRegister(instr->context()).is(cp)); | 417 ASSERT(ToRegister(instr->context()).is(cp)); |
417 ASSERT(instr->IsMarkedAsCall()); | 418 ASSERT(instr->IsMarkedAsCall()); |
418 ASSERT(ToRegister(instr->constructor()).is(x1)); | 419 ASSERT(ToRegister(instr->constructor()).is(x1)); |
419 | 420 |
420 __ Mov(x0, instr->arity()); | 421 __ Mov(x0, instr->arity()); |
421 // No cell in x2 for construct type feedback in optimized code. | 422 // No cell in x2 for construct type feedback in optimized code. |
422 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex); | 423 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex); |
423 | 424 |
424 CallConstructStub stub(isolate(), NO_CALL_FUNCTION_FLAGS); | 425 CallConstructStub stub(isolate(), NO_CALL_FUNCTION_FLAGS); |
425 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); | 426 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); |
| 427 after_push_argument_ = false; |
426 | 428 |
427 ASSERT(ToRegister(instr->result()).is(x0)); | 429 ASSERT(ToRegister(instr->result()).is(x0)); |
428 } | 430 } |
429 | 431 |
430 | 432 |
431 void LCodeGen::DoCallNewArray(LCallNewArray* instr) { | 433 void LCodeGen::DoCallNewArray(LCallNewArray* instr) { |
432 ASSERT(instr->IsMarkedAsCall()); | 434 ASSERT(instr->IsMarkedAsCall()); |
433 ASSERT(ToRegister(instr->context()).is(cp)); | 435 ASSERT(ToRegister(instr->context()).is(cp)); |
434 ASSERT(ToRegister(instr->constructor()).is(x1)); | 436 ASSERT(ToRegister(instr->constructor()).is(x1)); |
435 | 437 |
(...skipping 27 matching lines...) Expand all Loading... |
463 __ Bind(&packed_case); | 465 __ Bind(&packed_case); |
464 } | 466 } |
465 | 467 |
466 ArraySingleArgumentConstructorStub stub(isolate(), kind, override_mode); | 468 ArraySingleArgumentConstructorStub stub(isolate(), kind, override_mode); |
467 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); | 469 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); |
468 __ Bind(&done); | 470 __ Bind(&done); |
469 } else { | 471 } else { |
470 ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode); | 472 ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode); |
471 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); | 473 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); |
472 } | 474 } |
| 475 after_push_argument_ = false; |
473 | 476 |
474 ASSERT(ToRegister(instr->result()).is(x0)); | 477 ASSERT(ToRegister(instr->result()).is(x0)); |
475 } | 478 } |
476 | 479 |
477 | 480 |
478 void LCodeGen::CallRuntime(const Runtime::Function* function, | 481 void LCodeGen::CallRuntime(const Runtime::Function* function, |
479 int num_arguments, | 482 int num_arguments, |
480 LInstruction* instr, | 483 LInstruction* instr, |
481 SaveFPRegsMode save_doubles) { | 484 SaveFPRegsMode save_doubles) { |
482 ASSERT(instr != NULL); | 485 ASSERT(instr != NULL); |
483 | 486 |
484 __ CallRuntime(function, num_arguments, save_doubles); | 487 __ CallRuntime(function, num_arguments, save_doubles); |
485 | 488 |
486 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); | 489 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); |
487 } | 490 } |
488 | 491 |
489 | 492 |
490 void LCodeGen::LoadContextFromDeferred(LOperand* context) { | 493 void LCodeGen::LoadContextFromDeferred(LOperand* context) { |
491 if (context->IsRegister()) { | 494 if (context->IsRegister()) { |
492 __ Mov(cp, ToRegister(context)); | 495 __ Mov(cp, ToRegister(context)); |
493 } else if (context->IsStackSlot()) { | 496 } else if (context->IsStackSlot()) { |
494 __ Ldr(cp, ToMemOperand(context)); | 497 __ Ldr(cp, ToMemOperand(context, kMustUseFramePointer)); |
495 } else if (context->IsConstantOperand()) { | 498 } else if (context->IsConstantOperand()) { |
496 HConstant* constant = | 499 HConstant* constant = |
497 chunk_->LookupConstant(LConstantOperand::cast(context)); | 500 chunk_->LookupConstant(LConstantOperand::cast(context)); |
498 __ LoadHeapObject(cp, | 501 __ LoadHeapObject(cp, |
499 Handle<HeapObject>::cast(constant->handle(isolate()))); | 502 Handle<HeapObject>::cast(constant->handle(isolate()))); |
500 } else { | 503 } else { |
501 UNREACHABLE(); | 504 UNREACHABLE(); |
502 } | 505 } |
503 } | 506 } |
504 | 507 |
(...skipping 720 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1225 return Operand(0); | 1228 return Operand(0); |
1226 } | 1229 } |
1227 | 1230 |
1228 | 1231 |
1229 static ptrdiff_t ArgumentsOffsetWithoutFrame(ptrdiff_t index) { | 1232 static ptrdiff_t ArgumentsOffsetWithoutFrame(ptrdiff_t index) { |
1230 ASSERT(index < 0); | 1233 ASSERT(index < 0); |
1231 return -(index + 1) * kPointerSize; | 1234 return -(index + 1) * kPointerSize; |
1232 } | 1235 } |
1233 | 1236 |
1234 | 1237 |
1235 MemOperand LCodeGen::ToMemOperand(LOperand* op) const { | 1238 MemOperand LCodeGen::ToMemOperand(LOperand* op, StackMode stack_mode) const { |
1236 ASSERT(op != NULL); | 1239 ASSERT(op != NULL); |
1237 ASSERT(!op->IsRegister()); | 1240 ASSERT(!op->IsRegister()); |
1238 ASSERT(!op->IsDoubleRegister()); | 1241 ASSERT(!op->IsDoubleRegister()); |
1239 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot()); | 1242 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot()); |
1240 if (NeedsEagerFrame()) { | 1243 if (NeedsEagerFrame()) { |
1241 return MemOperand(fp, StackSlotOffset(op->index())); | 1244 int fp_offset = StackSlotOffset(op->index()); |
| 1245 if (op->index() >= 0) { |
| 1246 // Loads and stores have a bigger reach in positive offset than negative. |
| 1247 // When the load or the store can't be done in one instruction via fp |
| 1248 // (too big negative offset), we try to access via jssp (positive offset). |
| 1249 // We can reference a stack slot from jssp only if jssp references the end |
| 1250 // of the stack slots. It's not the case when: |
| 1251 // - stack_mode != kCanUseStackPointer: this is the case when a deferred |
| 1252 // code saved the registers. |
| 1253 // - after_push_argument_: arguments has been pushed for a call. |
| 1254 // - inlined_arguments_: inlined arguments have been pushed once. All the |
| 1255 // remainder of the function cannot trust jssp any longer. |
| 1256 // - saves_caller_doubles: some double registers have been pushed, jssp |
| 1257 // references the end of the double registers and not the end of the |
| 1258 // stack slots. |
| 1259 // Also, if the offset from fp is small enough to make a load/store in |
| 1260 // one instruction, we use a fp access. |
| 1261 if ((stack_mode == kCanUseStackPointer) && !after_push_argument_ && |
| 1262 !inlined_arguments_ && !is_int9(fp_offset) && |
| 1263 !info()->saves_caller_doubles()) { |
| 1264 int jssp_offset = |
| 1265 (GetStackSlotCount() - op->index() - 1) * kPointerSize; |
| 1266 return MemOperand(masm()->StackPointer(), jssp_offset); |
| 1267 } |
| 1268 } |
| 1269 return MemOperand(fp, fp_offset); |
1242 } else { | 1270 } else { |
1243 // Retrieve parameter without eager stack-frame relative to the | 1271 // Retrieve parameter without eager stack-frame relative to the |
1244 // stack-pointer. | 1272 // stack-pointer. |
1245 return MemOperand(masm()->StackPointer(), | 1273 return MemOperand(masm()->StackPointer(), |
1246 ArgumentsOffsetWithoutFrame(op->index())); | 1274 ArgumentsOffsetWithoutFrame(op->index())); |
1247 } | 1275 } |
1248 } | 1276 } |
1249 | 1277 |
1250 | 1278 |
1251 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const { | 1279 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const { |
(...skipping 369 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1621 LPointerMap* pointers = instr->pointer_map(); | 1649 LPointerMap* pointers = instr->pointer_map(); |
1622 SafepointGenerator safepoint_generator(this, pointers, Safepoint::kLazyDeopt); | 1650 SafepointGenerator safepoint_generator(this, pointers, Safepoint::kLazyDeopt); |
1623 // The number of arguments is stored in argc (receiver) which is x0, as | 1651 // The number of arguments is stored in argc (receiver) which is x0, as |
1624 // expected by InvokeFunction. | 1652 // expected by InvokeFunction. |
1625 ParameterCount actual(argc); | 1653 ParameterCount actual(argc); |
1626 __ InvokeFunction(function, actual, CALL_FUNCTION, safepoint_generator); | 1654 __ InvokeFunction(function, actual, CALL_FUNCTION, safepoint_generator); |
1627 } | 1655 } |
1628 | 1656 |
1629 | 1657 |
1630 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { | 1658 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { |
| 1659 // We push some arguments and they will be pop in an other block. We can't |
| 1660 // trust that jssp references the end of the stack slots until the end of |
| 1661 // the function. |
| 1662 inlined_arguments_ = true; |
1631 Register result = ToRegister(instr->result()); | 1663 Register result = ToRegister(instr->result()); |
1632 | 1664 |
1633 if (instr->hydrogen()->from_inlined()) { | 1665 if (instr->hydrogen()->from_inlined()) { |
1634 // When we are inside an inlined function, the arguments are the last things | 1666 // When we are inside an inlined function, the arguments are the last things |
1635 // that have been pushed on the stack. Therefore the arguments array can be | 1667 // that have been pushed on the stack. Therefore the arguments array can be |
1636 // accessed directly from jssp. | 1668 // accessed directly from jssp. |
1637 // However in the normal case, it is accessed via fp but there are two words | 1669 // However in the normal case, it is accessed via fp but there are two words |
1638 // on the stack between fp and the arguments (the saved lr and fp) and the | 1670 // on the stack between fp and the arguments (the saved lr and fp) and the |
1639 // LAccessArgumentsAt implementation take that into account. | 1671 // LAccessArgumentsAt implementation take that into account. |
1640 // In the inlined case we need to subtract the size of 2 words to jssp to | 1672 // In the inlined case we need to subtract the size of 2 words to jssp to |
(...skipping 348 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1989 // this understanding is correct. | 2021 // this understanding is correct. |
1990 __ Call(code, RelocInfo::CODE_TARGET, TypeFeedbackId::None()); | 2022 __ Call(code, RelocInfo::CODE_TARGET, TypeFeedbackId::None()); |
1991 } else { | 2023 } else { |
1992 ASSERT(instr->target()->IsRegister()); | 2024 ASSERT(instr->target()->IsRegister()); |
1993 Register target = ToRegister(instr->target()); | 2025 Register target = ToRegister(instr->target()); |
1994 generator.BeforeCall(__ CallSize(target)); | 2026 generator.BeforeCall(__ CallSize(target)); |
1995 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag); | 2027 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag); |
1996 __ Call(target); | 2028 __ Call(target); |
1997 } | 2029 } |
1998 generator.AfterCall(); | 2030 generator.AfterCall(); |
| 2031 after_push_argument_ = false; |
1999 } | 2032 } |
2000 | 2033 |
2001 | 2034 |
2002 void LCodeGen::DoCallJSFunction(LCallJSFunction* instr) { | 2035 void LCodeGen::DoCallJSFunction(LCallJSFunction* instr) { |
2003 ASSERT(instr->IsMarkedAsCall()); | 2036 ASSERT(instr->IsMarkedAsCall()); |
2004 ASSERT(ToRegister(instr->function()).is(x1)); | 2037 ASSERT(ToRegister(instr->function()).is(x1)); |
2005 | 2038 |
2006 if (instr->hydrogen()->pass_argument_count()) { | 2039 if (instr->hydrogen()->pass_argument_count()) { |
2007 __ Mov(x0, Operand(instr->arity())); | 2040 __ Mov(x0, Operand(instr->arity())); |
2008 } | 2041 } |
2009 | 2042 |
2010 // Change context. | 2043 // Change context. |
2011 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset)); | 2044 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset)); |
2012 | 2045 |
2013 // Load the code entry address | 2046 // Load the code entry address |
2014 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kCodeEntryOffset)); | 2047 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kCodeEntryOffset)); |
2015 __ Call(x10); | 2048 __ Call(x10); |
2016 | 2049 |
2017 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); | 2050 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); |
| 2051 after_push_argument_ = false; |
2018 } | 2052 } |
2019 | 2053 |
2020 | 2054 |
2021 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { | 2055 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { |
2022 CallRuntime(instr->function(), instr->arity(), instr); | 2056 CallRuntime(instr->function(), instr->arity(), instr); |
| 2057 after_push_argument_ = false; |
2023 } | 2058 } |
2024 | 2059 |
2025 | 2060 |
2026 void LCodeGen::DoCallStub(LCallStub* instr) { | 2061 void LCodeGen::DoCallStub(LCallStub* instr) { |
2027 ASSERT(ToRegister(instr->context()).is(cp)); | 2062 ASSERT(ToRegister(instr->context()).is(cp)); |
2028 ASSERT(ToRegister(instr->result()).is(x0)); | 2063 ASSERT(ToRegister(instr->result()).is(x0)); |
2029 switch (instr->hydrogen()->major_key()) { | 2064 switch (instr->hydrogen()->major_key()) { |
2030 case CodeStub::RegExpExec: { | 2065 case CodeStub::RegExpExec: { |
2031 RegExpExecStub stub(isolate()); | 2066 RegExpExecStub stub(isolate()); |
2032 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 2067 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
2033 break; | 2068 break; |
2034 } | 2069 } |
2035 case CodeStub::SubString: { | 2070 case CodeStub::SubString: { |
2036 SubStringStub stub(isolate()); | 2071 SubStringStub stub(isolate()); |
2037 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 2072 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
2038 break; | 2073 break; |
2039 } | 2074 } |
2040 case CodeStub::StringCompare: { | 2075 case CodeStub::StringCompare: { |
2041 StringCompareStub stub(isolate()); | 2076 StringCompareStub stub(isolate()); |
2042 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 2077 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
2043 break; | 2078 break; |
2044 } | 2079 } |
2045 default: | 2080 default: |
2046 UNREACHABLE(); | 2081 UNREACHABLE(); |
2047 } | 2082 } |
| 2083 after_push_argument_ = false; |
2048 } | 2084 } |
2049 | 2085 |
2050 | 2086 |
2051 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { | 2087 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { |
2052 GenerateOsrPrologue(); | 2088 GenerateOsrPrologue(); |
2053 } | 2089 } |
2054 | 2090 |
2055 | 2091 |
2056 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { | 2092 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { |
2057 Register temp = ToRegister(instr->temp()); | 2093 Register temp = ToRegister(instr->temp()); |
(...skipping 1037 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3095 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); | 3131 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); |
3096 ParameterCount count(instr->arity()); | 3132 ParameterCount count(instr->arity()); |
3097 __ InvokeFunction(x1, count, CALL_FUNCTION, generator); | 3133 __ InvokeFunction(x1, count, CALL_FUNCTION, generator); |
3098 } else { | 3134 } else { |
3099 CallKnownFunction(known_function, | 3135 CallKnownFunction(known_function, |
3100 instr->hydrogen()->formal_parameter_count(), | 3136 instr->hydrogen()->formal_parameter_count(), |
3101 instr->arity(), | 3137 instr->arity(), |
3102 instr, | 3138 instr, |
3103 x1); | 3139 x1); |
3104 } | 3140 } |
| 3141 after_push_argument_ = false; |
3105 } | 3142 } |
3106 | 3143 |
3107 | 3144 |
3108 void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) { | 3145 void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) { |
3109 Register temp1 = ToRegister(instr->temp1()); | 3146 Register temp1 = ToRegister(instr->temp1()); |
3110 Register temp2 = ToRegister(instr->temp2()); | 3147 Register temp2 = ToRegister(instr->temp2()); |
3111 | 3148 |
3112 // Get the frame pointer for the calling frame. | 3149 // Get the frame pointer for the calling frame. |
3113 __ Ldr(temp1, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | 3150 __ Ldr(temp1, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
3114 | 3151 |
(...skipping 1453 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4568 // Nothing to do. | 4605 // Nothing to do. |
4569 } | 4606 } |
4570 | 4607 |
4571 | 4608 |
4572 void LCodeGen::DoPushArgument(LPushArgument* instr) { | 4609 void LCodeGen::DoPushArgument(LPushArgument* instr) { |
4573 LOperand* argument = instr->value(); | 4610 LOperand* argument = instr->value(); |
4574 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) { | 4611 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) { |
4575 Abort(kDoPushArgumentNotImplementedForDoubleType); | 4612 Abort(kDoPushArgumentNotImplementedForDoubleType); |
4576 } else { | 4613 } else { |
4577 __ Push(ToRegister(argument)); | 4614 __ Push(ToRegister(argument)); |
| 4615 after_push_argument_ = true; |
4578 } | 4616 } |
4579 } | 4617 } |
4580 | 4618 |
4581 | 4619 |
4582 void LCodeGen::DoReturn(LReturn* instr) { | 4620 void LCodeGen::DoReturn(LReturn* instr) { |
4583 if (FLAG_trace && info()->IsOptimizing()) { | 4621 if (FLAG_trace && info()->IsOptimizing()) { |
4584 // Push the return value on the stack as the parameter. | 4622 // Push the return value on the stack as the parameter. |
4585 // Runtime::TraceExit returns its parameter in x0. We're leaving the code | 4623 // Runtime::TraceExit returns its parameter in x0. We're leaving the code |
4586 // managed by the register allocator and tearing down the frame, it's | 4624 // managed by the register allocator and tearing down the frame, it's |
4587 // safe to write to the context register. | 4625 // safe to write to the context register. |
(...skipping 1288 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5876 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); | 5914 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); |
5877 // Index is equal to negated out of object property index plus 1. | 5915 // Index is equal to negated out of object property index plus 1. |
5878 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); | 5916 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); |
5879 __ Ldr(result, FieldMemOperand(result, | 5917 __ Ldr(result, FieldMemOperand(result, |
5880 FixedArray::kHeaderSize - kPointerSize)); | 5918 FixedArray::kHeaderSize - kPointerSize)); |
5881 __ Bind(deferred->exit()); | 5919 __ Bind(deferred->exit()); |
5882 __ Bind(&done); | 5920 __ Bind(&done); |
5883 } | 5921 } |
5884 | 5922 |
5885 } } // namespace v8::internal | 5923 } } // namespace v8::internal |
OLD | NEW |