OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/code-stubs.h" | 5 #include "src/code-stubs.h" |
6 | 6 |
7 #include <memory> | 7 #include <memory> |
8 | 8 |
9 #include "src/bailout-reason.h" | 9 #include "src/bailout-reason.h" |
10 #include "src/code-factory.h" | 10 #include "src/code-factory.h" |
(...skipping 1186 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1197 #endif | 1197 #endif |
1198 | 1198 |
1199 return BuildRegExpConstructResult(length, index, input); | 1199 return BuildRegExpConstructResult(length, index, input); |
1200 } | 1200 } |
1201 | 1201 |
1202 | 1202 |
1203 Handle<Code> RegExpConstructResultStub::GenerateCode() { | 1203 Handle<Code> RegExpConstructResultStub::GenerateCode() { |
1204 return DoGenerateCode(this); | 1204 return DoGenerateCode(this); |
1205 } | 1205 } |
1206 | 1206 |
1207 | |
1208 template <> | |
1209 class CodeStubGraphBuilder<KeyedLoadGenericStub> | |
1210 : public CodeStubGraphBuilderBase { | |
1211 public: | |
1212 explicit CodeStubGraphBuilder(CompilationInfo* info, CodeStub* stub) | |
1213 : CodeStubGraphBuilderBase(info, stub) {} | |
1214 | |
1215 typedef KeyedLoadGenericStub::Descriptor Descriptor; | |
1216 | |
1217 protected: | |
1218 virtual HValue* BuildCodeStub(); | |
1219 | |
1220 void BuildElementsKindLimitCheck(HGraphBuilder::IfBuilder* if_builder, | |
1221 HValue* bit_field2, | |
1222 ElementsKind kind); | |
1223 | |
1224 void BuildFastElementLoad(HGraphBuilder::IfBuilder* if_builder, | |
1225 HValue* receiver, | |
1226 HValue* key, | |
1227 HValue* instance_type, | |
1228 HValue* bit_field2, | |
1229 ElementsKind kind); | |
1230 | |
1231 KeyedLoadGenericStub* casted_stub() { | |
1232 return static_cast<KeyedLoadGenericStub*>(stub()); | |
1233 } | |
1234 }; | |
1235 | |
1236 | |
1237 void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildElementsKindLimitCheck( | |
1238 HGraphBuilder::IfBuilder* if_builder, HValue* bit_field2, | |
1239 ElementsKind kind) { | |
1240 ElementsKind next_kind = static_cast<ElementsKind>(kind + 1); | |
1241 HValue* kind_limit = Add<HConstant>( | |
1242 static_cast<int>(Map::ElementsKindBits::encode(next_kind))); | |
1243 | |
1244 if_builder->If<HCompareNumericAndBranch>(bit_field2, kind_limit, Token::LT); | |
1245 if_builder->Then(); | |
1246 } | |
1247 | |
1248 | |
1249 void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildFastElementLoad( | |
1250 HGraphBuilder::IfBuilder* if_builder, HValue* receiver, HValue* key, | |
1251 HValue* instance_type, HValue* bit_field2, ElementsKind kind) { | |
1252 BuildElementsKindLimitCheck(if_builder, bit_field2, kind); | |
1253 | |
1254 IfBuilder js_array_check(this); | |
1255 js_array_check.If<HCompareNumericAndBranch>( | |
1256 instance_type, Add<HConstant>(JS_ARRAY_TYPE), Token::EQ); | |
1257 js_array_check.Then(); | |
1258 Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL, | |
1259 true, kind, | |
1260 LOAD, NEVER_RETURN_HOLE, | |
1261 STANDARD_STORE)); | |
1262 js_array_check.Else(); | |
1263 Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL, | |
1264 false, kind, | |
1265 LOAD, NEVER_RETURN_HOLE, | |
1266 STANDARD_STORE)); | |
1267 js_array_check.End(); | |
1268 } | |
1269 | |
1270 | |
1271 HValue* CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildCodeStub() { | |
1272 HValue* receiver = GetParameter(Descriptor::kReceiver); | |
1273 HValue* key = GetParameter(Descriptor::kName); | |
1274 // Split into a smi/integer case and unique string case. | |
1275 HIfContinuation index_name_split_continuation(graph()->CreateBasicBlock(), | |
1276 graph()->CreateBasicBlock()); | |
1277 | |
1278 BuildKeyedIndexCheck(key, &index_name_split_continuation); | |
1279 | |
1280 IfBuilder index_name_split(this, &index_name_split_continuation); | |
1281 index_name_split.Then(); | |
1282 { | |
1283 // Key is an index (number) | |
1284 key = Pop(); | |
1285 | |
1286 int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) | | |
1287 (1 << Map::kHasIndexedInterceptor); | |
1288 BuildJSObjectCheck(receiver, bit_field_mask); | |
1289 | |
1290 HValue* map = | |
1291 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap()); | |
1292 | |
1293 HValue* instance_type = | |
1294 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType()); | |
1295 | |
1296 HValue* bit_field2 = | |
1297 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2()); | |
1298 | |
1299 IfBuilder kind_if(this); | |
1300 BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2, | |
1301 FAST_HOLEY_ELEMENTS); | |
1302 | |
1303 kind_if.Else(); | |
1304 { | |
1305 BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2, | |
1306 FAST_HOLEY_DOUBLE_ELEMENTS); | |
1307 } | |
1308 kind_if.Else(); | |
1309 | |
1310 // The DICTIONARY_ELEMENTS check generates a "kind_if.Then" | |
1311 BuildElementsKindLimitCheck(&kind_if, bit_field2, DICTIONARY_ELEMENTS); | |
1312 { | |
1313 HValue* elements = AddLoadElements(receiver); | |
1314 | |
1315 HValue* hash = BuildElementIndexHash(key); | |
1316 | |
1317 Push(BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash)); | |
1318 } | |
1319 kind_if.Else(); | |
1320 | |
1321 // The SLOW_SLOPPY_ARGUMENTS_ELEMENTS check generates a "kind_if.Then" | |
1322 STATIC_ASSERT(FAST_SLOPPY_ARGUMENTS_ELEMENTS < | |
1323 SLOW_SLOPPY_ARGUMENTS_ELEMENTS); | |
1324 BuildElementsKindLimitCheck(&kind_if, bit_field2, | |
1325 SLOW_SLOPPY_ARGUMENTS_ELEMENTS); | |
1326 // Non-strict elements are not handled. | |
1327 Add<HDeoptimize>(DeoptimizeReason::kNonStrictElementsInKeyedLoadGenericStub, | |
1328 Deoptimizer::EAGER); | |
1329 Push(graph()->GetConstant0()); | |
1330 | |
1331 kind_if.ElseDeopt( | |
1332 DeoptimizeReason::kElementsKindUnhandledInKeyedLoadGenericStub); | |
1333 | |
1334 kind_if.End(); | |
1335 } | |
1336 index_name_split.Else(); | |
1337 { | |
1338 // Key is a unique string. | |
1339 key = Pop(); | |
1340 | |
1341 int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) | | |
1342 (1 << Map::kHasNamedInterceptor); | |
1343 BuildJSObjectCheck(receiver, bit_field_mask); | |
1344 | |
1345 HIfContinuation continuation; | |
1346 BuildTestForDictionaryProperties(receiver, &continuation); | |
1347 IfBuilder if_dict_properties(this, &continuation); | |
1348 if_dict_properties.Then(); | |
1349 { | |
1350 // Key is string, properties are dictionary mode | |
1351 BuildNonGlobalObjectCheck(receiver); | |
1352 | |
1353 HValue* properties = Add<HLoadNamedField>( | |
1354 receiver, nullptr, HObjectAccess::ForPropertiesPointer()); | |
1355 | |
1356 HValue* hash = | |
1357 Add<HLoadNamedField>(key, nullptr, HObjectAccess::ForNameHashField()); | |
1358 | |
1359 hash = AddUncasted<HShr>(hash, Add<HConstant>(Name::kHashShift)); | |
1360 | |
1361 HValue* value = | |
1362 BuildUncheckedDictionaryElementLoad(receiver, properties, key, hash); | |
1363 Push(value); | |
1364 } | |
1365 if_dict_properties.Else(); | |
1366 { | |
1367 // TODO(dcarney): don't use keyed lookup cache, but convert to use | |
1368 // megamorphic stub cache. | |
1369 UNREACHABLE(); | |
1370 // Key is string, properties are fast mode | |
1371 HValue* hash = BuildKeyedLookupCacheHash(receiver, key); | |
1372 | |
1373 ExternalReference cache_keys_ref = | |
1374 ExternalReference::keyed_lookup_cache_keys(isolate()); | |
1375 HValue* cache_keys = Add<HConstant>(cache_keys_ref); | |
1376 | |
1377 HValue* map = | |
1378 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap()); | |
1379 HValue* base_index = AddUncasted<HMul>(hash, Add<HConstant>(2)); | |
1380 base_index->ClearFlag(HValue::kCanOverflow); | |
1381 | |
1382 HIfContinuation inline_or_runtime_continuation( | |
1383 graph()->CreateBasicBlock(), graph()->CreateBasicBlock()); | |
1384 { | |
1385 IfBuilder lookup_ifs[KeyedLookupCache::kEntriesPerBucket]; | |
1386 for (int probe = 0; probe < KeyedLookupCache::kEntriesPerBucket; | |
1387 ++probe) { | |
1388 IfBuilder* lookup_if = &lookup_ifs[probe]; | |
1389 lookup_if->Initialize(this); | |
1390 int probe_base = probe * KeyedLookupCache::kEntryLength; | |
1391 HValue* map_index = AddUncasted<HAdd>( | |
1392 base_index, | |
1393 Add<HConstant>(probe_base + KeyedLookupCache::kMapIndex)); | |
1394 map_index->ClearFlag(HValue::kCanOverflow); | |
1395 HValue* key_index = AddUncasted<HAdd>( | |
1396 base_index, | |
1397 Add<HConstant>(probe_base + KeyedLookupCache::kKeyIndex)); | |
1398 key_index->ClearFlag(HValue::kCanOverflow); | |
1399 HValue* map_to_check = | |
1400 Add<HLoadKeyed>(cache_keys, map_index, nullptr, nullptr, | |
1401 FAST_ELEMENTS, NEVER_RETURN_HOLE, 0); | |
1402 lookup_if->If<HCompareObjectEqAndBranch>(map_to_check, map); | |
1403 lookup_if->And(); | |
1404 HValue* key_to_check = | |
1405 Add<HLoadKeyed>(cache_keys, key_index, nullptr, nullptr, | |
1406 FAST_ELEMENTS, NEVER_RETURN_HOLE, 0); | |
1407 lookup_if->If<HCompareObjectEqAndBranch>(key_to_check, key); | |
1408 lookup_if->Then(); | |
1409 { | |
1410 ExternalReference cache_field_offsets_ref = | |
1411 ExternalReference::keyed_lookup_cache_field_offsets(isolate()); | |
1412 HValue* cache_field_offsets = | |
1413 Add<HConstant>(cache_field_offsets_ref); | |
1414 HValue* index = AddUncasted<HAdd>(hash, Add<HConstant>(probe)); | |
1415 index->ClearFlag(HValue::kCanOverflow); | |
1416 HValue* property_index = | |
1417 Add<HLoadKeyed>(cache_field_offsets, index, nullptr, cache_keys, | |
1418 INT32_ELEMENTS, NEVER_RETURN_HOLE, 0); | |
1419 Push(property_index); | |
1420 } | |
1421 lookup_if->Else(); | |
1422 } | |
1423 for (int i = 0; i < KeyedLookupCache::kEntriesPerBucket; ++i) { | |
1424 lookup_ifs[i].JoinContinuation(&inline_or_runtime_continuation); | |
1425 } | |
1426 } | |
1427 | |
1428 IfBuilder inline_or_runtime(this, &inline_or_runtime_continuation); | |
1429 inline_or_runtime.Then(); | |
1430 { | |
1431 // Found a cached index, load property inline. | |
1432 Push(Add<HLoadFieldByIndex>(receiver, Pop())); | |
1433 } | |
1434 inline_or_runtime.Else(); | |
1435 { | |
1436 // KeyedLookupCache miss; call runtime. | |
1437 Add<HPushArguments>(receiver, key); | |
1438 Push(Add<HCallRuntime>( | |
1439 Runtime::FunctionForId(Runtime::kKeyedGetProperty), 2)); | |
1440 } | |
1441 inline_or_runtime.End(); | |
1442 } | |
1443 if_dict_properties.End(); | |
1444 } | |
1445 index_name_split.End(); | |
1446 | |
1447 return Pop(); | |
1448 } | |
1449 | |
1450 | |
1451 Handle<Code> KeyedLoadGenericStub::GenerateCode() { | |
1452 return DoGenerateCode(this); | |
1453 } | |
1454 | |
1455 } // namespace internal | 1207 } // namespace internal |
1456 } // namespace v8 | 1208 } // namespace v8 |
OLD | NEW |