OLD | NEW |
---|---|
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1172 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1183 __ Branch(&done, ne, a0, Operand(zero_reg)); | 1183 __ Branch(&done, ne, a0, Operand(zero_reg)); |
1184 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex); | 1184 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex); |
1185 __ push(t2); | 1185 __ push(t2); |
1186 __ Addu(a0, a0, Operand(1)); | 1186 __ Addu(a0, a0, Operand(1)); |
1187 __ bind(&done); | 1187 __ bind(&done); |
1188 } | 1188 } |
1189 | 1189 |
1190 // 2. Get the function to call (passed as receiver) from the stack, check | 1190 // 2. Get the function to call (passed as receiver) from the stack, check |
1191 // if it is a function. | 1191 // if it is a function. |
1192 // a0: actual number of arguments | 1192 // a0: actual number of arguments |
1193 Label non_function; | 1193 Label slow, non_function; |
1194 __ sll(at, a0, kPointerSizeLog2); | 1194 __ sll(at, a0, kPointerSizeLog2); |
1195 __ addu(at, sp, at); | 1195 __ addu(at, sp, at); |
1196 __ lw(a1, MemOperand(at)); | 1196 __ lw(a1, MemOperand(at)); |
1197 __ And(at, a1, Operand(kSmiTagMask)); | 1197 __ And(at, a1, Operand(kSmiTagMask)); |
1198 __ Branch(&non_function, eq, at, Operand(zero_reg)); | 1198 __ Branch(&non_function, eq, at, Operand(zero_reg)); |
1199 __ GetObjectType(a1, a2, a2); | 1199 __ GetObjectType(a1, a2, a2); |
1200 __ Branch(&non_function, ne, a2, Operand(JS_FUNCTION_TYPE)); | 1200 __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE)); |
1201 | 1201 |
1202 // 3a. Patch the first argument if necessary when calling a function. | 1202 // 3a. Patch the first argument if necessary when calling a function. |
1203 // a0: actual number of arguments | 1203 // a0: actual number of arguments |
1204 // a1: function | 1204 // a1: function |
1205 Label shift_arguments; | 1205 Label shift_arguments; |
1206 __ li(t0, Operand(0, RelocInfo::NONE)); // Indicate regular JS_FUNCTION. | |
1206 { Label convert_to_object, use_global_receiver, patch_receiver; | 1207 { Label convert_to_object, use_global_receiver, patch_receiver; |
1207 // Change context eagerly in case we need the global receiver. | 1208 // Change context eagerly in case we need the global receiver. |
1208 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); | 1209 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); |
1209 | 1210 |
1210 // Do not transform the receiver for strict mode functions. | 1211 // Do not transform the receiver for strict mode functions. |
1211 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | 1212 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
1212 __ lw(a3, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset)); | 1213 __ lw(a3, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset)); |
1213 __ And(t0, a3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + | 1214 __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + |
1214 kSmiTagSize))); | 1215 kSmiTagSize))); |
1215 __ Branch(&shift_arguments, ne, t0, Operand(zero_reg)); | 1216 __ Branch(&shift_arguments, ne, t3, Operand(zero_reg)); |
1216 | 1217 |
1217 // Do not transform the receiver for native (Compilerhints already in a3). | 1218 // Do not transform the receiver for native (Compilerhints already in a3). |
1218 __ And(t0, a3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); | 1219 __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); |
1219 __ Branch(&shift_arguments, ne, t0, Operand(zero_reg)); | 1220 __ Branch(&shift_arguments, ne, t3, Operand(zero_reg)); |
1220 | 1221 |
1221 // Compute the receiver in non-strict mode. | 1222 // Compute the receiver in non-strict mode. |
1222 // Load first argument in a2. a2 = -kPointerSize(sp + n_args << 2). | 1223 // Load first argument in a2. a2 = -kPointerSize(sp + n_args << 2). |
1223 __ sll(at, a0, kPointerSizeLog2); | 1224 __ sll(at, a0, kPointerSizeLog2); |
1224 __ addu(a2, sp, at); | 1225 __ addu(a2, sp, at); |
1225 __ lw(a2, MemOperand(a2, -kPointerSize)); | 1226 __ lw(a2, MemOperand(a2, -kPointerSize)); |
1226 // a0: actual number of arguments | 1227 // a0: actual number of arguments |
1227 // a1: function | 1228 // a1: function |
1228 // a2: first argument | 1229 // a2: first argument |
1229 __ JumpIfSmi(a2, &convert_to_object, t2); | 1230 __ JumpIfSmi(a2, &convert_to_object, t2); |
(...skipping 12 matching lines...) Expand all Loading... | |
1242 __ sll(a0, a0, kSmiTagSize); // Smi tagged. | 1243 __ sll(a0, a0, kSmiTagSize); // Smi tagged. |
1243 __ push(a0); | 1244 __ push(a0); |
1244 | 1245 |
1245 __ push(a2); | 1246 __ push(a2); |
1246 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | 1247 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
1247 __ mov(a2, v0); | 1248 __ mov(a2, v0); |
1248 | 1249 |
1249 __ pop(a0); | 1250 __ pop(a0); |
1250 __ sra(a0, a0, kSmiTagSize); // Un-tag. | 1251 __ sra(a0, a0, kSmiTagSize); // Un-tag. |
1251 __ LeaveInternalFrame(); | 1252 __ LeaveInternalFrame(); |
1252 // Restore the function to a1. | 1253 // Restore the function to a1, and the flag to t0. |
1253 __ sll(at, a0, kPointerSizeLog2); | 1254 __ sll(at, a0, kPointerSizeLog2); |
1254 __ addu(at, sp, at); | 1255 __ addu(at, sp, at); |
1255 __ lw(a1, MemOperand(at)); | 1256 __ lw(a1, MemOperand(at)); |
1257 __ li(t0, Operand(0, RelocInfo::NONE)); | |
1256 __ Branch(&patch_receiver); | 1258 __ Branch(&patch_receiver); |
1257 | 1259 |
1258 // Use the global receiver object from the called function as the | 1260 // Use the global receiver object from the called function as the |
1259 // receiver. | 1261 // receiver. |
1260 __ bind(&use_global_receiver); | 1262 __ bind(&use_global_receiver); |
1261 const int kGlobalIndex = | 1263 const int kGlobalIndex = |
1262 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; | 1264 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; |
1263 __ lw(a2, FieldMemOperand(cp, kGlobalIndex)); | 1265 __ lw(a2, FieldMemOperand(cp, kGlobalIndex)); |
1264 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalContextOffset)); | 1266 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalContextOffset)); |
1265 __ lw(a2, FieldMemOperand(a2, kGlobalIndex)); | 1267 __ lw(a2, FieldMemOperand(a2, kGlobalIndex)); |
1266 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalReceiverOffset)); | 1268 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalReceiverOffset)); |
1267 | 1269 |
1268 __ bind(&patch_receiver); | 1270 __ bind(&patch_receiver); |
1269 __ sll(at, a0, kPointerSizeLog2); | 1271 __ sll(at, a0, kPointerSizeLog2); |
1270 __ addu(a3, sp, at); | 1272 __ addu(a3, sp, at); |
1271 __ sw(a2, MemOperand(a3, -kPointerSize)); | 1273 __ sw(a2, MemOperand(a3, -kPointerSize)); |
1272 | 1274 |
1273 __ Branch(&shift_arguments); | 1275 __ Branch(&shift_arguments); |
1274 } | 1276 } |
1275 | 1277 |
1276 // 3b. Patch the first argument when calling a non-function. The | 1278 // 3b. Check for function proxy. |
1279 __ bind(&slow); | |
1280 __ li(t0, Operand(1, RelocInfo::NONE)); // Indicate function proxy. | |
1281 __ Branch(&shift_arguments, eq, a2, Operand(JS_FUNCTION_PROXY_TYPE)); | |
1282 | |
1283 __ bind(&non_function); | |
1284 __ li(t0, Operand(2, RelocInfo::NONE)); // Indicate non-function. | |
1285 | |
1286 // 3c. Patch the first argument when calling a non-function. The | |
1277 // CALL_NON_FUNCTION builtin expects the non-function callee as | 1287 // CALL_NON_FUNCTION builtin expects the non-function callee as |
1278 // receiver, so overwrite the first argument which will ultimately | 1288 // receiver, so overwrite the first argument which will ultimately |
1279 // become the receiver. | 1289 // become the receiver. |
1280 // a0: actual number of arguments | 1290 // a0: actual number of arguments |
1281 // a1: function | 1291 // a1: function |
1282 __ bind(&non_function); | 1292 // t0: call type (0: JS function, 1: function proxy, 2: non-function) |
1283 // Restore the function in case it has been modified. | |
1284 __ sll(at, a0, kPointerSizeLog2); | 1293 __ sll(at, a0, kPointerSizeLog2); |
1285 __ addu(a2, sp, at); | 1294 __ addu(a2, sp, at); |
1286 __ sw(a1, MemOperand(a2, -kPointerSize)); | 1295 __ sw(a1, MemOperand(a2, -kPointerSize)); |
1287 // Clear a1 to indicate a non-function being called. | |
1288 __ mov(a1, zero_reg); | |
1289 | 1296 |
1290 // 4. Shift arguments and return address one slot down on the stack | 1297 // 4. Shift arguments and return address one slot down on the stack |
1291 // (overwriting the original receiver). Adjust argument count to make | 1298 // (overwriting the original receiver). Adjust argument count to make |
1292 // the original first argument the new receiver. | 1299 // the original first argument the new receiver. |
1293 // a0: actual number of arguments | 1300 // a0: actual number of arguments |
1294 // a1: function | 1301 // a1: function |
1302 // t0: call type (0: JS function, 1: function proxy, 2: non-function) | |
1295 __ bind(&shift_arguments); | 1303 __ bind(&shift_arguments); |
1296 { Label loop; | 1304 { Label loop; |
1297 // Calculate the copy start address (destination). Copy end address is sp. | 1305 // Calculate the copy start address (destination). Copy end address is sp. |
1298 __ sll(at, a0, kPointerSizeLog2); | 1306 __ sll(at, a0, kPointerSizeLog2); |
1299 __ addu(a2, sp, at); | 1307 __ addu(a2, sp, at); |
1300 | 1308 |
1301 __ bind(&loop); | 1309 __ bind(&loop); |
1302 __ lw(at, MemOperand(a2, -kPointerSize)); | 1310 __ lw(at, MemOperand(a2, -kPointerSize)); |
1303 __ sw(at, MemOperand(a2)); | 1311 __ sw(at, MemOperand(a2)); |
1304 __ Subu(a2, a2, Operand(kPointerSize)); | 1312 __ Subu(a2, a2, Operand(kPointerSize)); |
1305 __ Branch(&loop, ne, a2, Operand(sp)); | 1313 __ Branch(&loop, ne, a2, Operand(sp)); |
1306 // Adjust the actual number of arguments and remove the top element | 1314 // Adjust the actual number of arguments and remove the top element |
1307 // (which is a copy of the last argument). | 1315 // (which is a copy of the last argument). |
1308 __ Subu(a0, a0, Operand(1)); | 1316 __ Subu(a0, a0, Operand(1)); |
1309 __ Pop(); | 1317 __ Pop(); |
1310 } | 1318 } |
1311 | 1319 |
1312 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin. | 1320 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin, |
1321 // or a function proxy via CALL_FUNCTION_PROXY. | |
1313 // a0: actual number of arguments | 1322 // a0: actual number of arguments |
1314 // a1: function | 1323 // a1: function |
1315 { Label function; | 1324 // t0: call type (0: JS function, 1: function proxy, 2: non-function) |
1316 __ Branch(&function, ne, a1, Operand(zero_reg)); | 1325 { Label function, non_proxy; |
1317 __ mov(a2, zero_reg); // expected arguments is 0 for CALL_NON_FUNCTION | 1326 __ Branch(&function, eq, t0, Operand(zero_reg)); |
1327 // Expected number of arguments is 0 for CALL_NON_FUNCTION. | |
1328 __ mov(a2, zero_reg); | |
1329 __ SetCallKind(t1, CALL_AS_METHOD); | |
1330 __ Branch(&non_proxy, ne, t0, Operand(1)); | |
1331 | |
1332 __ push(a1); // Re-add proxy object as additional argument. | |
1333 __ Addu(a0, a0, Operand(1)); | |
1334 __ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY); | |
1335 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | |
1336 RelocInfo::CODE_TARGET); | |
1337 | |
1338 __ bind(&non_proxy); | |
1318 __ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION); | 1339 __ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION); |
1319 __ SetCallKind(t1, CALL_AS_METHOD); | |
1320 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 1340 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
1321 RelocInfo::CODE_TARGET); | 1341 RelocInfo::CODE_TARGET); |
1322 __ bind(&function); | 1342 __ bind(&function); |
1323 } | 1343 } |
1324 | 1344 |
1325 // 5b. Get the code to call from the function and check that the number of | 1345 // 5b. Get the code to call from the function and check that the number of |
1326 // expected arguments matches what we're providing. If so, jump | 1346 // expected arguments matches what we're providing. If so, jump |
1327 // (tail-call) to the code in register edx without checking arguments. | 1347 // (tail-call) to the code in register edx without checking arguments. |
1328 // a0: actual number of arguments | 1348 // a0: actual number of arguments |
1329 // a1: function | 1349 // a1: function |
(...skipping 22 matching lines...) Expand all Loading... | |
1352 | 1372 |
1353 __ EnterInternalFrame(); | 1373 __ EnterInternalFrame(); |
1354 | 1374 |
1355 __ lw(a0, MemOperand(fp, kFunctionOffset)); // Get the function. | 1375 __ lw(a0, MemOperand(fp, kFunctionOffset)); // Get the function. |
1356 __ push(a0); | 1376 __ push(a0); |
1357 __ lw(a0, MemOperand(fp, kArgsOffset)); // Get the args array. | 1377 __ lw(a0, MemOperand(fp, kArgsOffset)); // Get the args array. |
1358 __ push(a0); | 1378 __ push(a0); |
1359 // Returns (in v0) number of arguments to copy to stack as Smi. | 1379 // Returns (in v0) number of arguments to copy to stack as Smi. |
1360 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); | 1380 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); |
1361 | 1381 |
1362 // Check the stack for overflow. We are not trying need to catch | 1382 // Check the stack for overflow. We are not trying to catch |
1363 // interruptions (e.g. debug break and preemption) here, so the "real stack | 1383 // interruptions (e.g. debug break and preemption) here, so the "real stack |
1364 // limit" is checked. | 1384 // limit" is checked. |
1365 Label okay; | 1385 Label okay; |
1366 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); | 1386 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); |
1367 // Make a2 the space we have left. The stack might already be overflowed | 1387 // Make a2 the space we have left. The stack might already be overflowed |
1368 // here which will cause a2 to become negative. | 1388 // here which will cause a2 to become negative. |
1369 __ subu(a2, sp, a2); | 1389 __ subu(a2, sp, a2); |
1370 // Check if the arguments will overflow the stack. | 1390 // Check if the arguments will overflow the stack. |
1371 __ sll(t0, v0, kPointerSizeLog2 - kSmiTagSize); | 1391 __ sll(t3, v0, kPointerSizeLog2 - kSmiTagSize); |
1372 __ Branch(&okay, gt, a2, Operand(t0)); // Signed comparison. | 1392 __ Branch(&okay, gt, a2, Operand(t3)); // Signed comparison. |
1373 | 1393 |
1374 // Out of stack space. | 1394 // Out of stack space. |
1375 __ lw(a1, MemOperand(fp, kFunctionOffset)); | 1395 __ lw(a1, MemOperand(fp, kFunctionOffset)); |
1376 __ push(a1); | 1396 __ push(a1); |
1377 __ push(v0); | 1397 __ push(v0); |
1378 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); | 1398 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); |
1379 // End of stack check. | 1399 // End of stack check. |
1380 | 1400 |
1381 // Push current limit and index. | 1401 // Push current limit and index. |
1382 __ bind(&okay); | 1402 __ bind(&okay); |
1383 __ push(v0); // Limit. | 1403 __ push(v0); // Limit. |
1384 __ mov(a1, zero_reg); // Initial index. | 1404 __ mov(a1, zero_reg); // Initial index. |
1385 __ push(a1); | 1405 __ push(a1); |
1386 | 1406 |
1407 // Get the receiver. | |
1408 __ lw(a0, MemOperand(fp, kRecvOffset)); | |
1409 | |
1410 // Check that the function is a JS function (otherwise it must be a proxy). | |
1411 Label push_receiver; | |
1412 __ lw(a1, MemOperand(fp, kFunctionOffset)); | |
1413 __ GetObjectType(a1, a2, a2); | |
1414 __ Branch(&push_receiver, ne, a2, Operand(JS_FUNCTION_TYPE)); | |
1415 | |
1387 // Change context eagerly to get the right global object if necessary. | 1416 // Change context eagerly to get the right global object if necessary. |
1388 __ lw(a0, MemOperand(fp, kFunctionOffset)); | 1417 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); |
1389 __ lw(cp, FieldMemOperand(a0, JSFunction::kContextOffset)); | 1418 // Load the shared function info while the function is still in r1. |
Yang
2011/09/15 09:51:14
don't you mean a1 here?
Paul Lind
2011/09/15 15:12:11
Of course :-). Done.
| |
1390 // Load the shared function info while the function is still in a0. | 1419 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
1391 __ lw(a1, FieldMemOperand(a0, JSFunction::kSharedFunctionInfoOffset)); | |
1392 | 1420 |
1393 // Compute the receiver. | 1421 // Compute the receiver. |
1394 Label call_to_object, use_global_receiver, push_receiver; | |
1395 __ lw(a0, MemOperand(fp, kRecvOffset)); | |
1396 | |
1397 // Do not transform the receiver for strict mode functions. | 1422 // Do not transform the receiver for strict mode functions. |
1398 __ lw(a2, FieldMemOperand(a1, SharedFunctionInfo::kCompilerHintsOffset)); | 1423 Label call_to_object, use_global_receiver; |
1399 __ And(t0, a2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + | 1424 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset)); |
1425 __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + | |
1400 kSmiTagSize))); | 1426 kSmiTagSize))); |
1401 __ Branch(&push_receiver, ne, t0, Operand(zero_reg)); | 1427 __ Branch(&push_receiver, ne, t3, Operand(zero_reg)); |
1402 | 1428 |
1403 // Do not transform the receiver for native (Compilerhints already in a2). | 1429 // Do not transform the receiver for native (Compilerhints already in a2). |
1404 __ And(t0, a2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); | 1430 __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); |
1405 __ Branch(&push_receiver, ne, t0, Operand(zero_reg)); | 1431 __ Branch(&push_receiver, ne, t3, Operand(zero_reg)); |
1406 | 1432 |
1407 // Compute the receiver in non-strict mode. | 1433 // Compute the receiver in non-strict mode. |
1408 __ And(t0, a0, Operand(kSmiTagMask)); | 1434 __ And(t3, a0, Operand(kSmiTagMask)); |
1409 __ Branch(&call_to_object, eq, t0, Operand(zero_reg)); | 1435 __ Branch(&call_to_object, eq, t3, Operand(zero_reg)); |
1410 __ LoadRoot(a1, Heap::kNullValueRootIndex); | 1436 __ LoadRoot(a1, Heap::kNullValueRootIndex); |
1411 __ Branch(&use_global_receiver, eq, a0, Operand(a1)); | 1437 __ Branch(&use_global_receiver, eq, a0, Operand(a1)); |
1412 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); | 1438 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); |
1413 __ Branch(&use_global_receiver, eq, a0, Operand(a2)); | 1439 __ Branch(&use_global_receiver, eq, a0, Operand(a2)); |
1414 | 1440 |
1415 // Check if the receiver is already a JavaScript object. | 1441 // Check if the receiver is already a JavaScript object. |
1416 // a0: receiver | 1442 // a0: receiver |
1417 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); | 1443 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); |
1418 __ GetObjectType(a0, a1, a1); | 1444 __ GetObjectType(a0, a1, a1); |
1419 __ Branch(&push_receiver, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE)); | 1445 __ Branch(&push_receiver, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE)); |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1460 // Use inline caching to access the arguments. | 1486 // Use inline caching to access the arguments. |
1461 __ lw(a0, MemOperand(fp, kIndexOffset)); | 1487 __ lw(a0, MemOperand(fp, kIndexOffset)); |
1462 __ Addu(a0, a0, Operand(1 << kSmiTagSize)); | 1488 __ Addu(a0, a0, Operand(1 << kSmiTagSize)); |
1463 __ sw(a0, MemOperand(fp, kIndexOffset)); | 1489 __ sw(a0, MemOperand(fp, kIndexOffset)); |
1464 | 1490 |
1465 // Test if the copy loop has finished copying all the elements from the | 1491 // Test if the copy loop has finished copying all the elements from the |
1466 // arguments object. | 1492 // arguments object. |
1467 __ bind(&entry); | 1493 __ bind(&entry); |
1468 __ lw(a1, MemOperand(fp, kLimitOffset)); | 1494 __ lw(a1, MemOperand(fp, kLimitOffset)); |
1469 __ Branch(&loop, ne, a0, Operand(a1)); | 1495 __ Branch(&loop, ne, a0, Operand(a1)); |
1496 | |
1470 // Invoke the function. | 1497 // Invoke the function. |
1498 Label call_proxy; | |
1471 ParameterCount actual(a0); | 1499 ParameterCount actual(a0); |
1472 __ sra(a0, a0, kSmiTagSize); | 1500 __ sra(a0, a0, kSmiTagSize); |
1473 __ lw(a1, MemOperand(fp, kFunctionOffset)); | 1501 __ lw(a1, MemOperand(fp, kFunctionOffset)); |
1502 __ GetObjectType(a1, a2, a2); | |
1503 __ Branch(&call_proxy, ne, a2, Operand(JS_FUNCTION_TYPE)); | |
1504 | |
1474 __ InvokeFunction(a1, actual, CALL_FUNCTION, | 1505 __ InvokeFunction(a1, actual, CALL_FUNCTION, |
1475 NullCallWrapper(), CALL_AS_METHOD); | 1506 NullCallWrapper(), CALL_AS_METHOD); |
1476 | 1507 |
1477 // Tear down the internal frame and remove function, receiver and args. | 1508 // Tear down the internal frame and remove function, receiver and args. |
1478 __ LeaveInternalFrame(); | 1509 __ LeaveInternalFrame(); |
1479 __ Addu(sp, sp, Operand(3 * kPointerSize)); | 1510 |
1480 __ Ret(); | 1511 __ Ret(USE_DELAY_SLOT); |
1512 __ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot. | |
1513 | |
1514 // Invoke the function proxy. | |
1515 __ bind(&call_proxy); | |
1516 __ push(a1); // Add function proxy as last argument. | |
1517 __ Addu(a0, a0, Operand(1)); | |
1518 __ li(a2, Operand(0, RelocInfo::NONE)); | |
1519 __ SetCallKind(t1, CALL_AS_METHOD); | |
1520 __ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY); | |
1521 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | |
1522 RelocInfo::CODE_TARGET); | |
1523 | |
1524 __ LeaveInternalFrame(); | |
1525 | |
1526 __ Ret(USE_DELAY_SLOT); | |
1527 __ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot. | |
1481 } | 1528 } |
1482 | 1529 |
1483 | 1530 |
1484 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { | 1531 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { |
1485 __ sll(a0, a0, kSmiTagSize); | 1532 __ sll(a0, a0, kSmiTagSize); |
1486 __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 1533 __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
1487 __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit()); | 1534 __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit()); |
1488 __ Addu(fp, sp, Operand(3 * kPointerSize)); | 1535 __ Addu(fp, sp, Operand(3 * kPointerSize)); |
1489 } | 1536 } |
1490 | 1537 |
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1618 __ bind(&dont_adapt_arguments); | 1665 __ bind(&dont_adapt_arguments); |
1619 __ Jump(a3); | 1666 __ Jump(a3); |
1620 } | 1667 } |
1621 | 1668 |
1622 | 1669 |
1623 #undef __ | 1670 #undef __ |
1624 | 1671 |
1625 } } // namespace v8::internal | 1672 } } // namespace v8::internal |
1626 | 1673 |
1627 #endif // V8_TARGET_ARCH_MIPS | 1674 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |