OLD | NEW |
1 // Copyright 2006-2009 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 329 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
340 static void GenerateRuntimeGetProperty(MacroAssembler* masm); | 340 static void GenerateRuntimeGetProperty(MacroAssembler* masm); |
341 static void GenerateInitialize(MacroAssembler* masm) { GenerateMiss(masm); } | 341 static void GenerateInitialize(MacroAssembler* masm) { GenerateMiss(masm); } |
342 static void GeneratePreMonomorphic(MacroAssembler* masm) { | 342 static void GeneratePreMonomorphic(MacroAssembler* masm) { |
343 GenerateMiss(masm); | 343 GenerateMiss(masm); |
344 } | 344 } |
345 static void GenerateGeneric(MacroAssembler* masm); | 345 static void GenerateGeneric(MacroAssembler* masm); |
346 static void GenerateString(MacroAssembler* masm); | 346 static void GenerateString(MacroAssembler* masm); |
347 | 347 |
348 static void GenerateIndexedInterceptor(MacroAssembler* masm); | 348 static void GenerateIndexedInterceptor(MacroAssembler* masm); |
349 | 349 |
| 350 // Generator for loading bytes from a pixel array. |
| 351 static void GeneratePixelArray(MacroAssembler* masm); |
| 352 |
350 // Clear the use of the inlined version. | 353 // Clear the use of the inlined version. |
351 static void ClearInlinedVersion(Address address); | 354 static void ClearInlinedVersion(Address address); |
352 | 355 |
353 // Bit mask to be tested against bit field for the cases when | 356 // Bit mask to be tested against bit field for the cases when |
354 // generic stub should go into slow case. | 357 // generic stub should go into slow case. |
355 // Access check is necessary explicitly since generic stub does not perform | 358 // Access check is necessary explicitly since generic stub does not perform |
356 // map checks. | 359 // map checks. |
357 static const int kSlowCaseBitFieldMask = | 360 static const int kSlowCaseBitFieldMask = |
358 (1 << Map::kIsAccessCheckNeeded) | (1 << Map::kHasIndexedInterceptor); | 361 (1 << Map::kIsAccessCheckNeeded) | (1 << Map::kHasIndexedInterceptor); |
359 | 362 |
(...skipping 14 matching lines...) Expand all Loading... |
374 static Code* generic_stub() { | 377 static Code* generic_stub() { |
375 return Builtins::builtin(Builtins::KeyedLoadIC_Generic); | 378 return Builtins::builtin(Builtins::KeyedLoadIC_Generic); |
376 } | 379 } |
377 static Code* pre_monomorphic_stub() { | 380 static Code* pre_monomorphic_stub() { |
378 return Builtins::builtin(Builtins::KeyedLoadIC_PreMonomorphic); | 381 return Builtins::builtin(Builtins::KeyedLoadIC_PreMonomorphic); |
379 } | 382 } |
380 static Code* string_stub() { | 383 static Code* string_stub() { |
381 return Builtins::builtin(Builtins::KeyedLoadIC_String); | 384 return Builtins::builtin(Builtins::KeyedLoadIC_String); |
382 } | 385 } |
383 | 386 |
| 387 static Code* pixel_array_stub() { |
| 388 return Builtins::builtin(Builtins::KeyedLoadIC_PixelArray); |
| 389 } |
384 static Code* indexed_interceptor_stub() { | 390 static Code* indexed_interceptor_stub() { |
385 return Builtins::builtin(Builtins::KeyedLoadIC_IndexedInterceptor); | 391 return Builtins::builtin(Builtins::KeyedLoadIC_IndexedInterceptor); |
386 } | 392 } |
387 | 393 |
388 static void Clear(Address address, Code* target); | 394 static void Clear(Address address, Code* target); |
389 | 395 |
390 // Support for patching the map that is checked in an inlined | 396 // Support for patching the map that is checked in an inlined |
391 // version of keyed load. | 397 // version of keyed load. |
392 static bool PatchInlinedLoad(Address address, Object* map); | 398 static bool PatchInlinedLoad(Address address, Object* map); |
393 | 399 |
(...skipping 195 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
589 | 595 |
590 Token::Value op_; | 596 Token::Value op_; |
591 }; | 597 }; |
592 | 598 |
593 // Helper for TRBinaryOpIC and CompareIC. | 599 // Helper for TRBinaryOpIC and CompareIC. |
594 void PatchInlinedSmiCode(Address address); | 600 void PatchInlinedSmiCode(Address address); |
595 | 601 |
596 } } // namespace v8::internal | 602 } } // namespace v8::internal |
597 | 603 |
598 #endif // V8_IC_H_ | 604 #endif // V8_IC_H_ |
OLD | NEW |