| OLD | NEW |
| 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 278 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 289 static void GenerateMiss(MacroAssembler* masm); | 289 static void GenerateMiss(MacroAssembler* masm); |
| 290 static void GenerateMegamorphic(MacroAssembler* masm); | 290 static void GenerateMegamorphic(MacroAssembler* masm); |
| 291 static void GenerateNormal(MacroAssembler* masm); | 291 static void GenerateNormal(MacroAssembler* masm); |
| 292 | 292 |
| 293 // Specialized code generator routines. | 293 // Specialized code generator routines. |
| 294 static void GenerateArrayLength(MacroAssembler* masm); | 294 static void GenerateArrayLength(MacroAssembler* masm); |
| 295 static void GenerateStringLength(MacroAssembler* masm, | 295 static void GenerateStringLength(MacroAssembler* masm, |
| 296 bool support_wrappers); | 296 bool support_wrappers); |
| 297 static void GenerateFunctionPrototype(MacroAssembler* masm); | 297 static void GenerateFunctionPrototype(MacroAssembler* masm); |
| 298 | 298 |
| 299 // Clear the use of the inlined version. | |
| 300 static void ClearInlinedVersion(Address address); | |
| 301 | |
| 302 // The offset from the inlined patch site to the start of the | |
| 303 // inlined load instruction. It is architecture-dependent, and not | |
| 304 // used on ARM. | |
| 305 static const int kOffsetToLoadInstruction; | |
| 306 | |
| 307 private: | 299 private: |
| 308 // Update the inline cache and the global stub cache based on the | 300 // Update the inline cache and the global stub cache based on the |
| 309 // lookup result. | 301 // lookup result. |
| 310 void UpdateCaches(LookupResult* lookup, | 302 void UpdateCaches(LookupResult* lookup, |
| 311 State state, | 303 State state, |
| 312 Handle<Object> object, | 304 Handle<Object> object, |
| 313 Handle<String> name); | 305 Handle<String> name); |
| 314 | 306 |
| 315 // Stub accessors. | 307 // Stub accessors. |
| 316 Code* megamorphic_stub() { | 308 Code* megamorphic_stub() { |
| 317 return isolate()->builtins()->builtin( | 309 return isolate()->builtins()->builtin( |
| 318 Builtins::kLoadIC_Megamorphic); | 310 Builtins::kLoadIC_Megamorphic); |
| 319 } | 311 } |
| 320 static Code* initialize_stub() { | 312 static Code* initialize_stub() { |
| 321 return Isolate::Current()->builtins()->builtin( | 313 return Isolate::Current()->builtins()->builtin( |
| 322 Builtins::kLoadIC_Initialize); | 314 Builtins::kLoadIC_Initialize); |
| 323 } | 315 } |
| 324 Code* pre_monomorphic_stub() { | 316 Code* pre_monomorphic_stub() { |
| 325 return isolate()->builtins()->builtin( | 317 return isolate()->builtins()->builtin( |
| 326 Builtins::kLoadIC_PreMonomorphic); | 318 Builtins::kLoadIC_PreMonomorphic); |
| 327 } | 319 } |
| 328 | 320 |
| 329 static void Clear(Address address, Code* target); | 321 static void Clear(Address address, Code* target); |
| 330 | 322 |
| 331 static bool PatchInlinedLoad(Address address, Object* map, int index); | |
| 332 | |
| 333 static bool PatchInlinedContextualLoad(Address address, | |
| 334 Object* map, | |
| 335 Object* cell, | |
| 336 bool is_dont_delete); | |
| 337 | |
| 338 friend class IC; | 323 friend class IC; |
| 339 }; | 324 }; |
| 340 | 325 |
| 341 | 326 |
| 342 class KeyedLoadIC: public IC { | 327 class KeyedLoadIC: public IC { |
| 343 public: | 328 public: |
| 344 explicit KeyedLoadIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) { | 329 explicit KeyedLoadIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) { |
| 345 ASSERT(target()->is_keyed_load_stub()); | 330 ASSERT(target()->is_keyed_load_stub()); |
| 346 } | 331 } |
| 347 | 332 |
| 348 MUST_USE_RESULT MaybeObject* Load(State state, | 333 MUST_USE_RESULT MaybeObject* Load(State state, |
| 349 Handle<Object> object, | 334 Handle<Object> object, |
| 350 Handle<Object> key); | 335 Handle<Object> key); |
| 351 | 336 |
| 352 // Code generator routines. | 337 // Code generator routines. |
| 353 static void GenerateMiss(MacroAssembler* masm); | 338 static void GenerateMiss(MacroAssembler* masm); |
| 354 static void GenerateRuntimeGetProperty(MacroAssembler* masm); | 339 static void GenerateRuntimeGetProperty(MacroAssembler* masm); |
| 355 static void GenerateInitialize(MacroAssembler* masm) { GenerateMiss(masm); } | 340 static void GenerateInitialize(MacroAssembler* masm) { GenerateMiss(masm); } |
| 356 static void GeneratePreMonomorphic(MacroAssembler* masm) { | 341 static void GeneratePreMonomorphic(MacroAssembler* masm) { |
| 357 GenerateMiss(masm); | 342 GenerateMiss(masm); |
| 358 } | 343 } |
| 359 static void GenerateGeneric(MacroAssembler* masm); | 344 static void GenerateGeneric(MacroAssembler* masm); |
| 360 static void GenerateString(MacroAssembler* masm); | 345 static void GenerateString(MacroAssembler* masm); |
| 361 | 346 |
| 362 static void GenerateIndexedInterceptor(MacroAssembler* masm); | 347 static void GenerateIndexedInterceptor(MacroAssembler* masm); |
| 363 | 348 |
| 364 // Clear the use of the inlined version. | |
| 365 static void ClearInlinedVersion(Address address); | |
| 366 | |
| 367 // Bit mask to be tested against bit field for the cases when | 349 // Bit mask to be tested against bit field for the cases when |
| 368 // generic stub should go into slow case. | 350 // generic stub should go into slow case. |
| 369 // Access check is necessary explicitly since generic stub does not perform | 351 // Access check is necessary explicitly since generic stub does not perform |
| 370 // map checks. | 352 // map checks. |
| 371 static const int kSlowCaseBitFieldMask = | 353 static const int kSlowCaseBitFieldMask = |
| 372 (1 << Map::kIsAccessCheckNeeded) | (1 << Map::kHasIndexedInterceptor); | 354 (1 << Map::kIsAccessCheckNeeded) | (1 << Map::kHasIndexedInterceptor); |
| 373 | 355 |
| 374 private: | 356 private: |
| 375 // Update the inline cache. | 357 // Update the inline cache. |
| 376 void UpdateCaches(LookupResult* lookup, | 358 void UpdateCaches(LookupResult* lookup, |
| (...skipping 23 matching lines...) Expand all Loading... |
| 400 Builtins::kKeyedLoadIC_String); | 382 Builtins::kKeyedLoadIC_String); |
| 401 } | 383 } |
| 402 | 384 |
| 403 Code* indexed_interceptor_stub() { | 385 Code* indexed_interceptor_stub() { |
| 404 return isolate()->builtins()->builtin( | 386 return isolate()->builtins()->builtin( |
| 405 Builtins::kKeyedLoadIC_IndexedInterceptor); | 387 Builtins::kKeyedLoadIC_IndexedInterceptor); |
| 406 } | 388 } |
| 407 | 389 |
| 408 static void Clear(Address address, Code* target); | 390 static void Clear(Address address, Code* target); |
| 409 | 391 |
| 410 // Support for patching the map that is checked in an inlined | |
| 411 // version of keyed load. | |
| 412 static bool PatchInlinedLoad(Address address, Object* map); | |
| 413 | |
| 414 friend class IC; | 392 friend class IC; |
| 415 }; | 393 }; |
| 416 | 394 |
| 417 | 395 |
| 418 class StoreIC: public IC { | 396 class StoreIC: public IC { |
| 419 public: | 397 public: |
| 420 explicit StoreIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) { | 398 explicit StoreIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) { |
| 421 ASSERT(target()->is_store_stub()); | 399 ASSERT(target()->is_store_stub()); |
| 422 } | 400 } |
| 423 | 401 |
| 424 MUST_USE_RESULT MaybeObject* Store(State state, | 402 MUST_USE_RESULT MaybeObject* Store(State state, |
| 425 StrictModeFlag strict_mode, | 403 StrictModeFlag strict_mode, |
| 426 Handle<Object> object, | 404 Handle<Object> object, |
| 427 Handle<String> name, | 405 Handle<String> name, |
| 428 Handle<Object> value); | 406 Handle<Object> value); |
| 429 | 407 |
| 430 // Code generators for stub routines. Only called once at startup. | 408 // Code generators for stub routines. Only called once at startup. |
| 431 static void GenerateInitialize(MacroAssembler* masm) { GenerateMiss(masm); } | 409 static void GenerateInitialize(MacroAssembler* masm) { GenerateMiss(masm); } |
| 432 static void GenerateMiss(MacroAssembler* masm); | 410 static void GenerateMiss(MacroAssembler* masm); |
| 433 static void GenerateMegamorphic(MacroAssembler* masm, | 411 static void GenerateMegamorphic(MacroAssembler* masm, |
| 434 StrictModeFlag strict_mode); | 412 StrictModeFlag strict_mode); |
| 435 static void GenerateArrayLength(MacroAssembler* masm); | 413 static void GenerateArrayLength(MacroAssembler* masm); |
| 436 static void GenerateNormal(MacroAssembler* masm); | 414 static void GenerateNormal(MacroAssembler* masm); |
| 437 static void GenerateGlobalProxy(MacroAssembler* masm, | 415 static void GenerateGlobalProxy(MacroAssembler* masm, |
| 438 StrictModeFlag strict_mode); | 416 StrictModeFlag strict_mode); |
| 439 | 417 |
| 440 // Clear the use of an inlined version. | |
| 441 static void ClearInlinedVersion(Address address); | |
| 442 | |
| 443 // The offset from the inlined patch site to the start of the | |
| 444 // inlined store instruction. | |
| 445 static const int kOffsetToStoreInstruction; | |
| 446 | |
| 447 private: | 418 private: |
| 448 // Update the inline cache and the global stub cache based on the | 419 // Update the inline cache and the global stub cache based on the |
| 449 // lookup result. | 420 // lookup result. |
| 450 void UpdateCaches(LookupResult* lookup, | 421 void UpdateCaches(LookupResult* lookup, |
| 451 State state, | 422 State state, |
| 452 StrictModeFlag strict_mode, | 423 StrictModeFlag strict_mode, |
| 453 Handle<JSObject> receiver, | 424 Handle<JSObject> receiver, |
| 454 Handle<String> name, | 425 Handle<String> name, |
| 455 Handle<Object> value); | 426 Handle<Object> value); |
| 456 | 427 |
| (...skipping 25 matching lines...) Expand all Loading... |
| 482 return isolate()->builtins()->builtin( | 453 return isolate()->builtins()->builtin( |
| 483 Builtins::kStoreIC_GlobalProxy); | 454 Builtins::kStoreIC_GlobalProxy); |
| 484 } | 455 } |
| 485 Code* global_proxy_stub_strict() { | 456 Code* global_proxy_stub_strict() { |
| 486 return isolate()->builtins()->builtin( | 457 return isolate()->builtins()->builtin( |
| 487 Builtins::kStoreIC_GlobalProxy_Strict); | 458 Builtins::kStoreIC_GlobalProxy_Strict); |
| 488 } | 459 } |
| 489 | 460 |
| 490 static void Clear(Address address, Code* target); | 461 static void Clear(Address address, Code* target); |
| 491 | 462 |
| 492 // Support for patching the index and the map that is checked in an | |
| 493 // inlined version of the named store. | |
| 494 static bool PatchInlinedStore(Address address, Object* map, int index); | |
| 495 | |
| 496 friend class IC; | 463 friend class IC; |
| 497 }; | 464 }; |
| 498 | 465 |
| 499 | 466 |
| 500 class KeyedStoreIC: public IC { | 467 class KeyedStoreIC: public IC { |
| 501 public: | 468 public: |
| 502 explicit KeyedStoreIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) { } | 469 explicit KeyedStoreIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) { } |
| 503 | 470 |
| 504 MUST_USE_RESULT MaybeObject* Store(State state, | 471 MUST_USE_RESULT MaybeObject* Store(State state, |
| 505 StrictModeFlag strict_mode, | 472 StrictModeFlag strict_mode, |
| 506 Handle<Object> object, | 473 Handle<Object> object, |
| 507 Handle<Object> name, | 474 Handle<Object> name, |
| 508 Handle<Object> value); | 475 Handle<Object> value); |
| 509 | 476 |
| 510 // Code generators for stub routines. Only called once at startup. | 477 // Code generators for stub routines. Only called once at startup. |
| 511 static void GenerateInitialize(MacroAssembler* masm) { GenerateMiss(masm); } | 478 static void GenerateInitialize(MacroAssembler* masm) { GenerateMiss(masm); } |
| 512 static void GenerateMiss(MacroAssembler* masm); | 479 static void GenerateMiss(MacroAssembler* masm); |
| 513 static void GenerateRuntimeSetProperty(MacroAssembler* masm, | 480 static void GenerateRuntimeSetProperty(MacroAssembler* masm, |
| 514 StrictModeFlag strict_mode); | 481 StrictModeFlag strict_mode); |
| 515 static void GenerateGeneric(MacroAssembler* masm, StrictModeFlag strict_mode); | 482 static void GenerateGeneric(MacroAssembler* masm, StrictModeFlag strict_mode); |
| 516 | 483 |
| 517 // Clear the inlined version so the IC is always hit. | |
| 518 static void ClearInlinedVersion(Address address); | |
| 519 | |
| 520 // Restore the inlined version so the fast case can get hit. | |
| 521 static void RestoreInlinedVersion(Address address); | |
| 522 | |
| 523 private: | 484 private: |
| 524 // Update the inline cache. | 485 // Update the inline cache. |
| 525 void UpdateCaches(LookupResult* lookup, | 486 void UpdateCaches(LookupResult* lookup, |
| 526 State state, | 487 State state, |
| 527 StrictModeFlag strict_mode, | 488 StrictModeFlag strict_mode, |
| 528 Handle<JSObject> receiver, | 489 Handle<JSObject> receiver, |
| 529 Handle<String> name, | 490 Handle<String> name, |
| 530 Handle<Object> value); | 491 Handle<Object> value); |
| 531 | 492 |
| 532 void set_target(Code* code) { | 493 void set_target(Code* code) { |
| (...skipping 24 matching lines...) Expand all Loading... |
| 557 return isolate()->builtins()->builtin( | 518 return isolate()->builtins()->builtin( |
| 558 Builtins::kKeyedStoreIC_Generic); | 519 Builtins::kKeyedStoreIC_Generic); |
| 559 } | 520 } |
| 560 Code* generic_stub_strict() { | 521 Code* generic_stub_strict() { |
| 561 return isolate()->builtins()->builtin( | 522 return isolate()->builtins()->builtin( |
| 562 Builtins::kKeyedStoreIC_Generic_Strict); | 523 Builtins::kKeyedStoreIC_Generic_Strict); |
| 563 } | 524 } |
| 564 | 525 |
| 565 static void Clear(Address address, Code* target); | 526 static void Clear(Address address, Code* target); |
| 566 | 527 |
| 567 // Support for patching the map that is checked in an inlined | |
| 568 // version of keyed store. | |
| 569 // The address is the patch point for the IC call | |
| 570 // (Assembler::kCallTargetAddressOffset before the end of | |
| 571 // the call/return address). | |
| 572 // The map is the new map that the inlined code should check against. | |
| 573 static bool PatchInlinedStore(Address address, Object* map); | |
| 574 | |
| 575 friend class IC; | 528 friend class IC; |
| 576 }; | 529 }; |
| 577 | 530 |
| 578 | 531 |
| 579 // Type Recording BinaryOpIC, that records the types of the inputs and outputs. | 532 // Type Recording BinaryOpIC, that records the types of the inputs and outputs. |
| 580 class TRBinaryOpIC: public IC { | 533 class TRBinaryOpIC: public IC { |
| 581 public: | 534 public: |
| 582 | 535 |
| 583 enum TypeInfo { | 536 enum TypeInfo { |
| 584 UNINITIALIZED, | 537 UNINITIALIZED, |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 641 | 594 |
| 642 Token::Value op_; | 595 Token::Value op_; |
| 643 }; | 596 }; |
| 644 | 597 |
| 645 // Helper for TRBinaryOpIC and CompareIC. | 598 // Helper for TRBinaryOpIC and CompareIC. |
| 646 void PatchInlinedSmiCode(Address address); | 599 void PatchInlinedSmiCode(Address address); |
| 647 | 600 |
| 648 } } // namespace v8::internal | 601 } } // namespace v8::internal |
| 649 | 602 |
| 650 #endif // V8_IC_H_ | 603 #endif // V8_IC_H_ |
| OLD | NEW |