OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 23 matching lines...) Expand all Loading... |
34 #include "debug.h" | 34 #include "debug.h" |
35 #include "runtime.h" | 35 #include "runtime.h" |
36 #include "serialize.h" | 36 #include "serialize.h" |
37 | 37 |
38 namespace v8 { | 38 namespace v8 { |
39 namespace internal { | 39 namespace internal { |
40 | 40 |
41 // ------------------------------------------------------------------------- | 41 // ------------------------------------------------------------------------- |
42 // MacroAssembler implementation. | 42 // MacroAssembler implementation. |
43 | 43 |
44 MacroAssembler::MacroAssembler(void* buffer, int size) | 44 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size) |
45 : Assembler(Isolate::Current(), buffer, size), | 45 : Assembler(arg_isolate, buffer, size), |
46 generating_stub_(false), | 46 generating_stub_(false), |
47 allow_stub_calls_(true), | 47 allow_stub_calls_(true) { |
48 code_object_(isolate()->heap()->undefined_value()) { | 48 if (isolate() != NULL) { |
| 49 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(), |
| 50 isolate()); |
| 51 } |
49 } | 52 } |
50 | 53 |
51 | 54 |
52 void MacroAssembler::RecordWriteHelper(Register object, | 55 void MacroAssembler::RecordWriteHelper(Register object, |
53 Register addr, | 56 Register addr, |
54 Register scratch) { | 57 Register scratch) { |
55 if (emit_debug_code()) { | 58 if (emit_debug_code()) { |
56 // Check that the object is not in new space. | 59 // Check that the object is not in new space. |
57 Label not_in_new_space; | 60 Label not_in_new_space; |
58 InNewSpace(object, scratch, not_equal, ¬_in_new_space); | 61 InNewSpace(object, scratch, not_equal, ¬_in_new_space); |
(...skipping 1962 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2021 call(Operand(function)); | 2024 call(Operand(function)); |
2022 if (OS::ActivationFrameAlignment() != 0) { | 2025 if (OS::ActivationFrameAlignment() != 0) { |
2023 mov(esp, Operand(esp, num_arguments * kPointerSize)); | 2026 mov(esp, Operand(esp, num_arguments * kPointerSize)); |
2024 } else { | 2027 } else { |
2025 add(Operand(esp), Immediate(num_arguments * kPointerSize)); | 2028 add(Operand(esp), Immediate(num_arguments * kPointerSize)); |
2026 } | 2029 } |
2027 } | 2030 } |
2028 | 2031 |
2029 | 2032 |
2030 CodePatcher::CodePatcher(byte* address, int size) | 2033 CodePatcher::CodePatcher(byte* address, int size) |
2031 : address_(address), size_(size), masm_(address, size + Assembler::kGap) { | 2034 : address_(address), |
| 2035 size_(size), |
| 2036 masm_(Isolate::Current(), address, size + Assembler::kGap) { |
2032 // Create a new macro assembler pointing to the address of the code to patch. | 2037 // Create a new macro assembler pointing to the address of the code to patch. |
2033 // The size is adjusted with kGap on order for the assembler to generate size | 2038 // The size is adjusted with kGap on order for the assembler to generate size |
2034 // bytes of instructions without failing with buffer size constraints. | 2039 // bytes of instructions without failing with buffer size constraints. |
2035 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 2040 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
2036 } | 2041 } |
2037 | 2042 |
2038 | 2043 |
2039 CodePatcher::~CodePatcher() { | 2044 CodePatcher::~CodePatcher() { |
2040 // Indicate that code has changed. | 2045 // Indicate that code has changed. |
2041 CPU::FlushICache(address_, size_); | 2046 CPU::FlushICache(address_, size_); |
2042 | 2047 |
2043 // Check that the code was patched as expected. | 2048 // Check that the code was patched as expected. |
2044 ASSERT(masm_.pc_ == address_ + size_); | 2049 ASSERT(masm_.pc_ == address_ + size_); |
2045 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 2050 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
2046 } | 2051 } |
2047 | 2052 |
2048 | 2053 |
2049 } } // namespace v8::internal | 2054 } } // namespace v8::internal |
2050 | 2055 |
2051 #endif // V8_TARGET_ARCH_IA32 | 2056 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |