Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(3)

Side by Side Diff: src/mips/builtins-mips.cc

Issue 1670143002: Visit the Optimized Code Map on first call rather than closure creation. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fix mips64 rebase error. Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_MIPS 5 #if V8_TARGET_ARCH_MIPS
6 6
7 #include "src/codegen.h" 7 #include "src/codegen.h"
8 #include "src/debug/debug.h" 8 #include "src/debug/debug.h"
9 #include "src/deoptimizer.h" 9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h" 10 #include "src/full-codegen/full-codegen.h"
(...skipping 1210 matching lines...) Expand 10 before | Expand all | Expand 10 after
1221 // This simulates the initial call to bytecode handlers in interpreter entry 1221 // This simulates the initial call to bytecode handlers in interpreter entry
1222 // trampoline. The return will never actually be taken, but our stack walker 1222 // trampoline. The return will never actually be taken, but our stack walker
1223 // uses this address to determine whether a frame is interpreted. 1223 // uses this address to determine whether a frame is interpreted.
1224 __ li(ra, Operand(masm->isolate()->builtins()->InterpreterEntryTrampoline())); 1224 __ li(ra, Operand(masm->isolate()->builtins()->InterpreterEntryTrampoline()));
1225 1225
1226 Generate_EnterBytecodeDispatch(masm); 1226 Generate_EnterBytecodeDispatch(masm);
1227 } 1227 }
1228 1228
1229 1229
1230 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { 1230 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1231 // ----------- S t a t e -------------
1232 // -- a0 : argument count (preserved for callee)
1233 // -- a3 : new target (preserved for callee)
1234 // -- a1 : target function (preserved for callee)
1235 // -----------------------------------
1236 // First lookup code, maybe we don't need to compile!
1237 Label gotta_call_runtime, gotta_call_runtime_no_stack;
1238 Label maybe_call_runtime;
1239 Label try_shared;
1240 Label loop_top, loop_bottom;
1241
1242 Register argument_count = a0;
1243 Register closure = a1;
1244 Register new_target = a3;
1245 __ push(argument_count);
1246 __ push(new_target);
1247 __ push(closure);
1248
1249 Register map = a0;
1250 Register index = a2;
1251 __ lw(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1252 __ lw(map, FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
1253 __ lw(index, FieldMemOperand(map, FixedArray::kLengthOffset));
1254 __ Branch(&gotta_call_runtime, lt, index, Operand(Smi::FromInt(2)));
1255
1256 // Find literals.
1257 // a3 : native context
1258 // a2 : length / index
1259 // a0 : optimized code map
1260 // stack[0] : new target
1261 // stack[4] : closure
1262 Register native_context = a3;
1263 __ lw(native_context, NativeContextMemOperand());
1264
1265 __ bind(&loop_top);
1266 Register temp = a1;
1267 Register array_pointer = t1;
1268
1269 // Does the native context match?
1270 __ sll(at, index, kPointerSizeLog2 - kSmiTagSize);
1271 __ Addu(array_pointer, map, Operand(at));
1272 __ lw(temp, FieldMemOperand(array_pointer,
1273 SharedFunctionInfo::OffsetToPreviousContext()));
1274 __ lw(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1275 __ Branch(&loop_bottom, ne, temp, Operand(native_context));
1276 // OSR id set to none?
1277 __ lw(temp, FieldMemOperand(array_pointer,
1278 SharedFunctionInfo::OffsetToPreviousOsrAstId()));
1279 const int bailout_id = BailoutId::None().ToInt();
1280 __ Branch(&loop_bottom, ne, temp, Operand(Smi::FromInt(bailout_id)));
1281 // Literals available?
1282 __ lw(temp, FieldMemOperand(array_pointer,
1283 SharedFunctionInfo::OffsetToPreviousLiterals()));
1284 __ lw(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1285 __ JumpIfSmi(temp, &gotta_call_runtime);
1286
1287 // Save the literals in the closure.
1288 __ lw(t0, MemOperand(sp, 0));
1289 __ sw(temp, FieldMemOperand(t0, JSFunction::kLiteralsOffset));
1290 __ push(index);
1291 __ RecordWriteField(t0, JSFunction::kLiteralsOffset, temp, index,
1292 kRAHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1293 OMIT_SMI_CHECK);
1294 __ pop(index);
1295
1296 // Code available?
1297 Register entry = t0;
1298 __ lw(entry,
1299 FieldMemOperand(array_pointer,
1300 SharedFunctionInfo::OffsetToPreviousCachedCode()));
1301 __ lw(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1302 __ JumpIfSmi(entry, &maybe_call_runtime);
1303
1304 // Found literals and code. Get them into the closure and return.
1305 __ pop(closure);
1306 // Store code entry in the closure.
1307 __ Addu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1308
1309 Label install_optimized_code_and_tailcall;
1310 __ bind(&install_optimized_code_and_tailcall);
1311 __ sw(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1312 __ RecordWriteCodeEntryField(closure, entry, t1);
1313
1314 // Link the closure into the optimized function list.
1315 // t0 : code entry
1316 // a3 : native context
1317 // a1 : closure
1318 __ lw(t1,
1319 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1320 __ sw(t1, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset));
1321 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, t1, a0,
1322 kRAHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1323 OMIT_SMI_CHECK);
1324 const int function_list_offset =
1325 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
1326 __ sw(closure,
1327 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1328 // Save closure before the write barrier.
1329 __ mov(t1, closure);
1330 __ RecordWriteContextSlot(native_context, function_list_offset, closure, a0,
1331 kRAHasNotBeenSaved, kDontSaveFPRegs);
1332 __ mov(closure, t1);
1333 __ pop(new_target);
1334 __ pop(argument_count);
1335 __ Jump(entry);
1336
1337 __ bind(&loop_bottom);
1338 __ Subu(index, index,
1339 Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
1340 __ Branch(&loop_top, gt, index, Operand(Smi::FromInt(1)));
1341
1342 // We found neither literals nor code.
1343 __ jmp(&gotta_call_runtime);
1344
1345 __ bind(&maybe_call_runtime);
1346 __ pop(closure);
1347
1348 // Last possibility. Check the context free optimized code map entry.
1349 __ lw(entry, FieldMemOperand(map, FixedArray::kHeaderSize +
1350 SharedFunctionInfo::kSharedCodeIndex));
1351 __ lw(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1352 __ JumpIfSmi(entry, &try_shared);
1353
1354 // Store code entry in the closure.
1355 __ Addu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1356 __ jmp(&install_optimized_code_and_tailcall);
1357
1358 __ bind(&try_shared);
1359 __ pop(new_target);
1360 __ pop(argument_count);
1361 // Is the full code valid?
1362 __ lw(entry, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1363 __ lw(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
1364 __ lw(t1, FieldMemOperand(entry, Code::kFlagsOffset));
1365 __ And(t1, t1, Operand(Code::KindField::kMask));
1366 __ srl(t1, t1, Code::KindField::kShift);
1367 __ Branch(&gotta_call_runtime_no_stack, eq, t1, Operand(Code::BUILTIN));
1368 // Yes, install the full code.
1369 __ Addu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1370 __ sw(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1371 __ RecordWriteCodeEntryField(closure, entry, t1);
1372 __ Jump(entry);
1373
1374 __ bind(&gotta_call_runtime);
1375 __ pop(closure);
1376 __ pop(new_target);
1377 __ pop(argument_count);
1378 __ bind(&gotta_call_runtime_no_stack);
1231 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy); 1379 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1232 } 1380 }
1233 1381
1234 1382
1235 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { 1383 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1236 GenerateTailCallToReturnedCode(masm, 1384 GenerateTailCallToReturnedCode(masm,
1237 Runtime::kCompileOptimized_NotConcurrent); 1385 Runtime::kCompileOptimized_NotConcurrent);
1238 } 1386 }
1239 1387
1240 1388
(...skipping 1469 matching lines...) Expand 10 before | Expand all | Expand 10 after
2710 } 2858 }
2711 } 2859 }
2712 2860
2713 2861
2714 #undef __ 2862 #undef __
2715 2863
2716 } // namespace internal 2864 } // namespace internal
2717 } // namespace v8 2865 } // namespace v8
2718 2866
2719 #endif // V8_TARGET_ARCH_MIPS 2867 #endif // V8_TARGET_ARCH_MIPS
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698