OLD | NEW |
1 // Copyright 2006-2009 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 330 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
341 static void GenerateRuntimeGetProperty(MacroAssembler* masm); | 341 static void GenerateRuntimeGetProperty(MacroAssembler* masm); |
342 static void GenerateInitialize(MacroAssembler* masm) { GenerateMiss(masm); } | 342 static void GenerateInitialize(MacroAssembler* masm) { GenerateMiss(masm); } |
343 static void GeneratePreMonomorphic(MacroAssembler* masm) { | 343 static void GeneratePreMonomorphic(MacroAssembler* masm) { |
344 GenerateMiss(masm); | 344 GenerateMiss(masm); |
345 } | 345 } |
346 static void GenerateGeneric(MacroAssembler* masm); | 346 static void GenerateGeneric(MacroAssembler* masm); |
347 static void GenerateString(MacroAssembler* masm); | 347 static void GenerateString(MacroAssembler* masm); |
348 | 348 |
349 static void GenerateIndexedInterceptor(MacroAssembler* masm); | 349 static void GenerateIndexedInterceptor(MacroAssembler* masm); |
350 | 350 |
| 351 // Generator for loading bytes from a pixel array. |
| 352 static void GeneratePixelArray(MacroAssembler* masm); |
| 353 |
351 // Clear the use of the inlined version. | 354 // Clear the use of the inlined version. |
352 static void ClearInlinedVersion(Address address); | 355 static void ClearInlinedVersion(Address address); |
353 | 356 |
354 // Bit mask to be tested against bit field for the cases when | 357 // Bit mask to be tested against bit field for the cases when |
355 // generic stub should go into slow case. | 358 // generic stub should go into slow case. |
356 // Access check is necessary explicitly since generic stub does not perform | 359 // Access check is necessary explicitly since generic stub does not perform |
357 // map checks. | 360 // map checks. |
358 static const int kSlowCaseBitFieldMask = | 361 static const int kSlowCaseBitFieldMask = |
359 (1 << Map::kIsAccessCheckNeeded) | (1 << Map::kHasIndexedInterceptor); | 362 (1 << Map::kIsAccessCheckNeeded) | (1 << Map::kHasIndexedInterceptor); |
360 | 363 |
(...skipping 14 matching lines...) Expand all Loading... |
375 static Code* generic_stub() { | 378 static Code* generic_stub() { |
376 return Builtins::builtin(Builtins::KeyedLoadIC_Generic); | 379 return Builtins::builtin(Builtins::KeyedLoadIC_Generic); |
377 } | 380 } |
378 static Code* pre_monomorphic_stub() { | 381 static Code* pre_monomorphic_stub() { |
379 return Builtins::builtin(Builtins::KeyedLoadIC_PreMonomorphic); | 382 return Builtins::builtin(Builtins::KeyedLoadIC_PreMonomorphic); |
380 } | 383 } |
381 static Code* string_stub() { | 384 static Code* string_stub() { |
382 return Builtins::builtin(Builtins::KeyedLoadIC_String); | 385 return Builtins::builtin(Builtins::KeyedLoadIC_String); |
383 } | 386 } |
384 | 387 |
| 388 static Code* pixel_array_stub() { |
| 389 return Builtins::builtin(Builtins::KeyedLoadIC_PixelArray); |
| 390 } |
385 static Code* indexed_interceptor_stub() { | 391 static Code* indexed_interceptor_stub() { |
386 return Builtins::builtin(Builtins::KeyedLoadIC_IndexedInterceptor); | 392 return Builtins::builtin(Builtins::KeyedLoadIC_IndexedInterceptor); |
387 } | 393 } |
388 | 394 |
389 static void Clear(Address address, Code* target); | 395 static void Clear(Address address, Code* target); |
390 | 396 |
391 // Support for patching the map that is checked in an inlined | 397 // Support for patching the map that is checked in an inlined |
392 // version of keyed load. | 398 // version of keyed load. |
393 static bool PatchInlinedLoad(Address address, Object* map); | 399 static bool PatchInlinedLoad(Address address, Object* map); |
394 | 400 |
(...skipping 195 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
590 | 596 |
591 Token::Value op_; | 597 Token::Value op_; |
592 }; | 598 }; |
593 | 599 |
594 // Helper for TRBinaryOpIC and CompareIC. | 600 // Helper for TRBinaryOpIC and CompareIC. |
595 void PatchInlinedSmiCode(Address address); | 601 void PatchInlinedSmiCode(Address address); |
596 | 602 |
597 } } // namespace v8::internal | 603 } } // namespace v8::internal |
598 | 604 |
599 #endif // V8_IC_H_ | 605 #endif // V8_IC_H_ |
OLD | NEW |