OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_PPC | 7 #if V8_TARGET_ARCH_PPC |
8 | 8 |
9 #include "src/code-factory.h" | 9 #include "src/code-factory.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 184 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
195 bool function_in_register = true; | 195 bool function_in_register = true; |
196 | 196 |
197 // Possibly allocate a local context. | 197 // Possibly allocate a local context. |
198 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; | 198 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; |
199 if (heap_slots > 0) { | 199 if (heap_slots > 0) { |
200 // Argument to NewContext is the function, which is still in r4. | 200 // Argument to NewContext is the function, which is still in r4. |
201 Comment cmnt(masm_, "[ Allocate context"); | 201 Comment cmnt(masm_, "[ Allocate context"); |
202 bool need_write_barrier = true; | 202 bool need_write_barrier = true; |
203 if (FLAG_harmony_scoping && info->scope()->is_script_scope()) { | 203 if (FLAG_harmony_scoping && info->scope()->is_script_scope()) { |
204 __ push(r4); | 204 __ push(r4); |
205 __ Push(info->scope()->GetScopeInfo()); | 205 __ Push(info->scope()->GetScopeInfo(info->isolate())); |
206 __ CallRuntime(Runtime::kNewScriptContext, 2); | 206 __ CallRuntime(Runtime::kNewScriptContext, 2); |
207 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) { | 207 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) { |
208 FastNewContextStub stub(isolate(), heap_slots); | 208 FastNewContextStub stub(isolate(), heap_slots); |
209 __ CallStub(&stub); | 209 __ CallStub(&stub); |
210 // Result of FastNewContextStub is always in new space. | 210 // Result of FastNewContextStub is always in new space. |
211 need_write_barrier = false; | 211 need_write_barrier = false; |
212 } else { | 212 } else { |
213 __ push(r4); | 213 __ push(r4); |
214 __ CallRuntime(Runtime::kNewFunctionContext, 1); | 214 __ CallRuntime(Runtime::kNewFunctionContext, 1); |
215 } | 215 } |
(...skipping 22 matching lines...) Expand all Loading... |
238 } else if (FLAG_debug_code) { | 238 } else if (FLAG_debug_code) { |
239 Label done; | 239 Label done; |
240 __ JumpIfInNewSpace(cp, r3, &done); | 240 __ JumpIfInNewSpace(cp, r3, &done); |
241 __ Abort(kExpectedNewSpaceObject); | 241 __ Abort(kExpectedNewSpaceObject); |
242 __ bind(&done); | 242 __ bind(&done); |
243 } | 243 } |
244 } | 244 } |
245 } | 245 } |
246 } | 246 } |
247 | 247 |
| 248 // Possibly allocate RestParameters |
| 249 int rest_index; |
| 250 Variable* rest_param = scope()->rest_parameter(&rest_index); |
| 251 if (rest_param) { |
| 252 Comment cmnt(masm_, "[ Allocate rest parameter array"); |
| 253 |
| 254 int num_parameters = info->scope()->num_parameters(); |
| 255 int offset = num_parameters * kPointerSize; |
| 256 __ addi(r6, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset)); |
| 257 __ mov(r5, Operand(Smi::FromInt(num_parameters))); |
| 258 __ mov(r4, Operand(Smi::FromInt(rest_index))); |
| 259 __ Push(r6, r5, r4); |
| 260 |
| 261 RestParamAccessStub stub(isolate()); |
| 262 __ CallStub(&stub); |
| 263 |
| 264 SetVar(rest_param, r3, r4, r5); |
| 265 } |
| 266 |
248 Variable* arguments = scope()->arguments(); | 267 Variable* arguments = scope()->arguments(); |
249 if (arguments != NULL) { | 268 if (arguments != NULL) { |
250 // Function uses arguments object. | 269 // Function uses arguments object. |
251 Comment cmnt(masm_, "[ Allocate arguments object"); | 270 Comment cmnt(masm_, "[ Allocate arguments object"); |
252 if (!function_in_register) { | 271 if (!function_in_register) { |
253 // Load this again, if it's used by the local context below. | 272 // Load this again, if it's used by the local context below. |
254 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 273 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
255 } else { | 274 } else { |
256 __ mr(r6, r4); | 275 __ mr(r6, r4); |
257 } | 276 } |
258 // Receiver is just before the parameters on the caller's stack. | 277 // Receiver is just before the parameters on the caller's stack. |
259 int num_parameters = info->scope()->num_parameters(); | 278 int num_parameters = info->scope()->num_parameters(); |
260 int offset = num_parameters * kPointerSize; | 279 int offset = num_parameters * kPointerSize; |
261 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset)); | 280 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset)); |
262 __ LoadSmiLiteral(r4, Smi::FromInt(num_parameters)); | 281 __ LoadSmiLiteral(r4, Smi::FromInt(num_parameters)); |
263 __ Push(r6, r5, r4); | 282 __ Push(r6, r5, r4); |
264 | 283 |
265 // Arguments to ArgumentsAccessStub: | 284 // Arguments to ArgumentsAccessStub: |
266 // function, receiver address, parameter count. | 285 // function, receiver address, parameter count. |
267 // The stub will rewrite receiever and parameter count if the previous | 286 // The stub will rewrite receiever and parameter count if the previous |
268 // stack frame was an arguments adapter frame. | 287 // stack frame was an arguments adapter frame. |
| 288 ArgumentsAccessStub::HasNewTarget has_new_target = |
| 289 IsSubclassConstructor(info->function()->kind()) |
| 290 ? ArgumentsAccessStub::HAS_NEW_TARGET |
| 291 : ArgumentsAccessStub::NO_NEW_TARGET; |
269 ArgumentsAccessStub::Type type; | 292 ArgumentsAccessStub::Type type; |
270 if (is_strict(language_mode())) { | 293 if (is_strict(language_mode()) || !is_simple_parameter_list()) { |
271 type = ArgumentsAccessStub::NEW_STRICT; | 294 type = ArgumentsAccessStub::NEW_STRICT; |
272 } else if (function()->has_duplicate_parameters()) { | 295 } else if (function()->has_duplicate_parameters()) { |
273 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW; | 296 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW; |
274 } else { | 297 } else { |
275 type = ArgumentsAccessStub::NEW_SLOPPY_FAST; | 298 type = ArgumentsAccessStub::NEW_SLOPPY_FAST; |
276 } | 299 } |
277 ArgumentsAccessStub stub(isolate(), type); | 300 ArgumentsAccessStub stub(isolate(), type, has_new_target); |
278 __ CallStub(&stub); | 301 __ CallStub(&stub); |
279 | 302 |
280 SetVar(arguments, r3, r4, r5); | 303 SetVar(arguments, r3, r4, r5); |
281 } | 304 } |
282 | 305 |
283 if (FLAG_trace) { | 306 if (FLAG_trace) { |
284 __ CallRuntime(Runtime::kTraceEnter, 0); | 307 __ CallRuntime(Runtime::kTraceEnter, 0); |
285 } | 308 } |
286 | 309 |
287 // Visit the declarations and body unless there is an illegal | 310 // Visit the declarations and body unless there is an illegal |
(...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
425 | 448 |
426 #ifdef DEBUG | 449 #ifdef DEBUG |
427 // Add a label for checking the size of the code used for returning. | 450 // Add a label for checking the size of the code used for returning. |
428 Label check_exit_codesize; | 451 Label check_exit_codesize; |
429 __ bind(&check_exit_codesize); | 452 __ bind(&check_exit_codesize); |
430 #endif | 453 #endif |
431 // Make sure that the constant pool is not emitted inside of the return | 454 // Make sure that the constant pool is not emitted inside of the return |
432 // sequence. | 455 // sequence. |
433 { | 456 { |
434 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | 457 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
435 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize; | 458 int32_t arg_count = info_->scope()->num_parameters() + 1; |
| 459 if (IsSubclassConstructor(info_->function()->kind())) { |
| 460 arg_count++; |
| 461 } |
| 462 int32_t sp_delta = arg_count * kPointerSize; |
436 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); | 463 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); |
437 __ RecordJSReturn(); | 464 __ RecordJSReturn(); |
438 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT, sp_delta); | 465 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT, sp_delta); |
439 #if V8_TARGET_ARCH_PPC64 | 466 #if V8_TARGET_ARCH_PPC64 |
440 // With 64bit we may need nop() instructions to ensure we have | 467 // With 64bit we may need nop() instructions to ensure we have |
441 // enough space to SetDebugBreakAtReturn() | 468 // enough space to SetDebugBreakAtReturn() |
442 if (is_int16(sp_delta)) { | 469 if (is_int16(sp_delta)) { |
443 #if !V8_OOL_CONSTANT_POOL | 470 #if !V8_OOL_CONSTANT_POOL |
444 masm_->nop(); | 471 masm_->nop(); |
445 #endif | 472 #endif |
(...skipping 2591 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3037 | 3064 |
3038 // r4: the start position of the scope the calls resides in. | 3065 // r4: the start position of the scope the calls resides in. |
3039 __ LoadSmiLiteral(r4, Smi::FromInt(scope()->start_position())); | 3066 __ LoadSmiLiteral(r4, Smi::FromInt(scope()->start_position())); |
3040 | 3067 |
3041 // Do the runtime call. | 3068 // Do the runtime call. |
3042 __ Push(r8, r7, r6, r5, r4); | 3069 __ Push(r8, r7, r6, r5, r4); |
3043 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6); | 3070 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6); |
3044 } | 3071 } |
3045 | 3072 |
3046 | 3073 |
3047 void FullCodeGenerator::EmitLoadSuperConstructor(SuperReference* super_ref) { | 3074 void FullCodeGenerator::EmitLoadSuperConstructor() { |
3048 DCHECK(super_ref != NULL); | |
3049 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 3075 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
3050 __ Push(r3); | 3076 __ Push(r3); |
3051 __ CallRuntime(Runtime::kGetPrototype, 1); | 3077 __ CallRuntime(Runtime::kGetPrototype, 1); |
3052 } | 3078 } |
3053 | 3079 |
3054 | 3080 |
3055 void FullCodeGenerator::VisitCall(Call* expr) { | 3081 void FullCodeGenerator::VisitCall(Call* expr) { |
3056 #ifdef DEBUG | 3082 #ifdef DEBUG |
3057 // We want to verify that RecordJSReturnSite gets called on all paths | 3083 // We want to verify that RecordJSReturnSite gets called on all paths |
3058 // through this function. Avoid early returns. | 3084 // through this function. Avoid early returns. |
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3229 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallNewFeedbackSlot())); | 3255 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallNewFeedbackSlot())); |
3230 | 3256 |
3231 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET); | 3257 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET); |
3232 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); | 3258 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); |
3233 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); | 3259 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); |
3234 context()->Plug(r3); | 3260 context()->Plug(r3); |
3235 } | 3261 } |
3236 | 3262 |
3237 | 3263 |
3238 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) { | 3264 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) { |
3239 SuperReference* super_ref = expr->expression()->AsSuperReference(); | 3265 if (!ValidateSuperCall(expr)) return; |
3240 EmitLoadSuperConstructor(super_ref); | 3266 Variable* new_target_var = scope()->DeclarationScope()->new_target_var(); |
| 3267 GetVar(result_register(), new_target_var); |
| 3268 __ Push(result_register()); |
| 3269 |
| 3270 EmitLoadSuperConstructor(); |
3241 __ push(result_register()); | 3271 __ push(result_register()); |
3242 | 3272 |
3243 Variable* this_var = super_ref->this_var()->var(); | |
3244 | |
3245 GetVar(r3, this_var); | |
3246 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex); | |
3247 Label uninitialized_this; | |
3248 __ beq(&uninitialized_this); | |
3249 __ mov(r3, Operand(this_var->name())); | |
3250 __ push(r3); | |
3251 __ CallRuntime(Runtime::kThrowReferenceError, 1); | |
3252 __ bind(&uninitialized_this); | |
3253 | |
3254 // Push the arguments ("left-to-right") on the stack. | 3273 // Push the arguments ("left-to-right") on the stack. |
3255 ZoneList<Expression*>* args = expr->arguments(); | 3274 ZoneList<Expression*>* args = expr->arguments(); |
3256 int arg_count = args->length(); | 3275 int arg_count = args->length(); |
3257 for (int i = 0; i < arg_count; i++) { | 3276 for (int i = 0; i < arg_count; i++) { |
3258 VisitForStackValue(args->at(i)); | 3277 VisitForStackValue(args->at(i)); |
3259 } | 3278 } |
3260 | 3279 |
3261 // Call the construct call builtin that handles allocation and | 3280 // Call the construct call builtin that handles allocation and |
3262 // constructor invocation. | 3281 // constructor invocation. |
3263 SetSourcePosition(expr->position()); | 3282 SetSourcePosition(expr->position()); |
3264 | 3283 |
3265 // Load function and argument count into r1 and r0. | 3284 // Load function and argument count into r1 and r0. |
3266 __ mov(r3, Operand(arg_count)); | 3285 __ mov(r3, Operand(arg_count)); |
3267 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize)); | 3286 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize)); |
3268 | 3287 |
3269 // Record call targets in unoptimized code. | 3288 // Record call targets in unoptimized code. |
3270 if (FLAG_pretenuring_call_new) { | 3289 if (FLAG_pretenuring_call_new) { |
3271 UNREACHABLE(); | 3290 UNREACHABLE(); |
3272 /* TODO(dslomov): support pretenuring. | 3291 /* TODO(dslomov): support pretenuring. |
3273 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot()); | 3292 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot()); |
3274 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() == | 3293 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() == |
3275 expr->CallNewFeedbackSlot().ToInt() + 1); | 3294 expr->CallNewFeedbackSlot().ToInt() + 1); |
3276 */ | 3295 */ |
3277 } | 3296 } |
3278 | 3297 |
3279 __ Move(r5, FeedbackVector()); | 3298 __ Move(r5, FeedbackVector()); |
3280 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackSlot())); | 3299 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackSlot())); |
3281 | 3300 |
3282 // TODO(dslomov): use a different stub and propagate new.target. | 3301 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET); |
3283 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET); | |
3284 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); | 3302 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); |
3285 | 3303 |
| 3304 __ Drop(1); |
| 3305 |
3286 RecordJSReturnSite(expr); | 3306 RecordJSReturnSite(expr); |
3287 | 3307 |
| 3308 SuperReference* super_ref = expr->expression()->AsSuperReference(); |
| 3309 Variable* this_var = super_ref->this_var()->var(); |
| 3310 GetVar(r4, this_var); |
| 3311 __ CompareRoot(r4, Heap::kTheHoleValueRootIndex); |
| 3312 Label uninitialized_this; |
| 3313 __ beq(&uninitialized_this); |
| 3314 __ mov(r4, Operand(this_var->name())); |
| 3315 __ push(r4); |
| 3316 __ CallRuntime(Runtime::kThrowReferenceError, 1); |
| 3317 __ bind(&uninitialized_this); |
| 3318 |
3288 EmitVariableAssignment(this_var, Token::INIT_CONST); | 3319 EmitVariableAssignment(this_var, Token::INIT_CONST); |
3289 context()->Plug(r3); | 3320 context()->Plug(r3); |
3290 } | 3321 } |
3291 | 3322 |
3292 | 3323 |
3293 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { | 3324 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { |
3294 ZoneList<Expression*>* args = expr->arguments(); | 3325 ZoneList<Expression*>* args = expr->arguments(); |
3295 DCHECK(args->length() == 1); | 3326 DCHECK(args->length() == 1); |
3296 | 3327 |
3297 VisitForAccumulatorValue(args->at(0)); | 3328 VisitForAccumulatorValue(args->at(0)); |
(...skipping 848 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4146 | 4177 |
4147 __ bind(&runtime); | 4178 __ bind(&runtime); |
4148 __ push(r3); | 4179 __ push(r3); |
4149 __ CallRuntime(Runtime::kCall, args->length()); | 4180 __ CallRuntime(Runtime::kCall, args->length()); |
4150 __ bind(&done); | 4181 __ bind(&done); |
4151 | 4182 |
4152 context()->Plug(r3); | 4183 context()->Plug(r3); |
4153 } | 4184 } |
4154 | 4185 |
4155 | 4186 |
| 4187 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) { |
| 4188 Variable* new_target_var = scope()->DeclarationScope()->new_target_var(); |
| 4189 GetVar(result_register(), new_target_var); |
| 4190 __ Push(result_register()); |
| 4191 |
| 4192 EmitLoadSuperConstructor(); |
| 4193 __ mr(r4, result_register()); |
| 4194 __ Push(r4); |
| 4195 |
| 4196 // Check if the calling frame is an arguments adaptor frame. |
| 4197 Label adaptor_frame, args_set_up, runtime; |
| 4198 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
| 4199 __ LoadP(r6, MemOperand(r5, StandardFrameConstants::kContextOffset)); |
| 4200 __ CmpSmiLiteral(r6, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0); |
| 4201 __ beq(&adaptor_frame); |
| 4202 |
| 4203 // default constructor has no arguments, so no adaptor frame means no args. |
| 4204 __ li(r3, Operand::Zero()); |
| 4205 __ b(&args_set_up); |
| 4206 |
| 4207 // Copy arguments from adaptor frame. |
| 4208 { |
| 4209 __ bind(&adaptor_frame); |
| 4210 __ LoadP(r3, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 4211 __ SmiUntag(r3); |
| 4212 |
| 4213 // Subtract 1 from arguments count, for new.target. |
| 4214 __ subi(r3, r3, Operand(1)); |
| 4215 |
| 4216 // Get arguments pointer in r5. |
| 4217 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2)); |
| 4218 __ add(r5, r5, r0); |
| 4219 __ addi(r5, r5, Operand(StandardFrameConstants::kCallerSPOffset)); |
| 4220 |
| 4221 Label loop; |
| 4222 __ mtctr(r3); |
| 4223 __ bind(&loop); |
| 4224 // Pre-decrement in order to skip receiver. |
| 4225 __ LoadPU(r6, MemOperand(r5, -kPointerSize)); |
| 4226 __ Push(r6); |
| 4227 __ bdnz(&loop); |
| 4228 } |
| 4229 |
| 4230 __ bind(&args_set_up); |
| 4231 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL); |
| 4232 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); |
| 4233 |
| 4234 __ Drop(1); |
| 4235 |
| 4236 context()->Plug(result_register()); |
| 4237 } |
| 4238 |
| 4239 |
4156 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) { | 4240 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) { |
4157 RegExpConstructResultStub stub(isolate()); | 4241 RegExpConstructResultStub stub(isolate()); |
4158 ZoneList<Expression*>* args = expr->arguments(); | 4242 ZoneList<Expression*>* args = expr->arguments(); |
4159 DCHECK(args->length() == 3); | 4243 DCHECK(args->length() == 3); |
4160 VisitForStackValue(args->at(0)); | 4244 VisitForStackValue(args->at(0)); |
4161 VisitForStackValue(args->at(1)); | 4245 VisitForStackValue(args->at(1)); |
4162 VisitForAccumulatorValue(args->at(2)); | 4246 VisitForAccumulatorValue(args->at(2)); |
4163 __ Pop(r5, r4); | 4247 __ Pop(r5, r4); |
4164 __ CallStub(&stub); | 4248 __ CallStub(&stub); |
4165 context()->Plug(r3); | 4249 context()->Plug(r3); |
(...skipping 1195 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5361 return ON_STACK_REPLACEMENT; | 5445 return ON_STACK_REPLACEMENT; |
5362 } | 5446 } |
5363 | 5447 |
5364 DCHECK(interrupt_address == | 5448 DCHECK(interrupt_address == |
5365 isolate->builtins()->OsrAfterStackCheck()->entry()); | 5449 isolate->builtins()->OsrAfterStackCheck()->entry()); |
5366 return OSR_AFTER_STACK_CHECK; | 5450 return OSR_AFTER_STACK_CHECK; |
5367 } | 5451 } |
5368 } | 5452 } |
5369 } // namespace v8::internal | 5453 } // namespace v8::internal |
5370 #endif // V8_TARGET_ARCH_PPC | 5454 #endif // V8_TARGET_ARCH_PPC |
OLD | NEW |