| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1970 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1981 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3)); | 1981 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3)); |
| 1982 and_(scratch1, kFlatAsciiStringMask); | 1982 and_(scratch1, kFlatAsciiStringMask); |
| 1983 and_(scratch2, kFlatAsciiStringMask); | 1983 and_(scratch2, kFlatAsciiStringMask); |
| 1984 lea(scratch1, Operand(scratch1, scratch2, times_8, 0)); | 1984 lea(scratch1, Operand(scratch1, scratch2, times_8, 0)); |
| 1985 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3)); | 1985 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3)); |
| 1986 j(not_equal, failure); | 1986 j(not_equal, failure); |
| 1987 } | 1987 } |
| 1988 | 1988 |
| 1989 | 1989 |
| 1990 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) { | 1990 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) { |
| 1991 // Reserve space for Isolate address which is always passed as last parameter | 1991 int frame_alignment = OS::ActivationFrameAlignment(); |
| 1992 num_arguments += 1; | 1992 if (frame_alignment != 0) { |
| 1993 | |
| 1994 int frameAlignment = OS::ActivationFrameAlignment(); | |
| 1995 if (frameAlignment != 0) { | |
| 1996 // Make stack end at alignment and make room for num_arguments words | 1993 // Make stack end at alignment and make room for num_arguments words |
| 1997 // and the original value of esp. | 1994 // and the original value of esp. |
| 1998 mov(scratch, esp); | 1995 mov(scratch, esp); |
| 1999 sub(Operand(esp), Immediate((num_arguments + 1) * kPointerSize)); | 1996 sub(Operand(esp), Immediate((num_arguments + 1) * kPointerSize)); |
| 2000 ASSERT(IsPowerOf2(frameAlignment)); | 1997 ASSERT(IsPowerOf2(frame_alignment)); |
| 2001 and_(esp, -frameAlignment); | 1998 and_(esp, -frame_alignment); |
| 2002 mov(Operand(esp, num_arguments * kPointerSize), scratch); | 1999 mov(Operand(esp, num_arguments * kPointerSize), scratch); |
| 2003 } else { | 2000 } else { |
| 2004 sub(Operand(esp), Immediate(num_arguments * kPointerSize)); | 2001 sub(Operand(esp), Immediate(num_arguments * kPointerSize)); |
| 2005 } | 2002 } |
| 2006 } | 2003 } |
| 2007 | 2004 |
| 2008 | 2005 |
| 2009 void MacroAssembler::CallCFunction(ExternalReference function, | 2006 void MacroAssembler::CallCFunction(ExternalReference function, |
| 2010 int num_arguments) { | 2007 int num_arguments) { |
| 2011 // Trashing eax is ok as it will be the return value. | 2008 // Trashing eax is ok as it will be the return value. |
| 2012 mov(Operand(eax), Immediate(function)); | 2009 mov(Operand(eax), Immediate(function)); |
| 2013 CallCFunction(eax, num_arguments); | 2010 CallCFunction(eax, num_arguments); |
| 2014 } | 2011 } |
| 2015 | 2012 |
| 2016 | 2013 |
| 2017 void MacroAssembler::CallCFunction(Register function, | 2014 void MacroAssembler::CallCFunction(Register function, |
| 2018 int num_arguments) { | 2015 int num_arguments) { |
| 2019 // Pass current isolate address as additional parameter. | |
| 2020 mov(Operand(esp, num_arguments * kPointerSize), | |
| 2021 Immediate(ExternalReference::isolate_address())); | |
| 2022 num_arguments += 1; | |
| 2023 | |
| 2024 // Check stack alignment. | 2016 // Check stack alignment. |
| 2025 if (emit_debug_code()) { | 2017 if (emit_debug_code()) { |
| 2026 CheckStackAlignment(); | 2018 CheckStackAlignment(); |
| 2027 } | 2019 } |
| 2028 | 2020 |
| 2029 call(Operand(function)); | 2021 call(Operand(function)); |
| 2030 if (OS::ActivationFrameAlignment() != 0) { | 2022 if (OS::ActivationFrameAlignment() != 0) { |
| 2031 mov(esp, Operand(esp, num_arguments * kPointerSize)); | 2023 mov(esp, Operand(esp, num_arguments * kPointerSize)); |
| 2032 } else { | 2024 } else { |
| 2033 add(Operand(esp), Immediate(num_arguments * sizeof(int32_t))); | 2025 add(Operand(esp), Immediate(num_arguments * kPointerSize)); |
| 2034 } | 2026 } |
| 2035 } | 2027 } |
| 2036 | 2028 |
| 2037 | 2029 |
| 2038 CodePatcher::CodePatcher(byte* address, int size) | 2030 CodePatcher::CodePatcher(byte* address, int size) |
| 2039 : address_(address), size_(size), masm_(address, size + Assembler::kGap) { | 2031 : address_(address), size_(size), masm_(address, size + Assembler::kGap) { |
| 2040 // Create a new macro assembler pointing to the address of the code to patch. | 2032 // Create a new macro assembler pointing to the address of the code to patch. |
| 2041 // The size is adjusted with kGap on order for the assembler to generate size | 2033 // The size is adjusted with kGap on order for the assembler to generate size |
| 2042 // bytes of instructions without failing with buffer size constraints. | 2034 // bytes of instructions without failing with buffer size constraints. |
| 2043 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 2035 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
| 2044 } | 2036 } |
| 2045 | 2037 |
| 2046 | 2038 |
| 2047 CodePatcher::~CodePatcher() { | 2039 CodePatcher::~CodePatcher() { |
| 2048 // Indicate that code has changed. | 2040 // Indicate that code has changed. |
| 2049 CPU::FlushICache(address_, size_); | 2041 CPU::FlushICache(address_, size_); |
| 2050 | 2042 |
| 2051 // Check that the code was patched as expected. | 2043 // Check that the code was patched as expected. |
| 2052 ASSERT(masm_.pc_ == address_ + size_); | 2044 ASSERT(masm_.pc_ == address_ + size_); |
| 2053 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 2045 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
| 2054 } | 2046 } |
| 2055 | 2047 |
| 2056 | 2048 |
| 2057 } } // namespace v8::internal | 2049 } } // namespace v8::internal |
| 2058 | 2050 |
| 2059 #endif // V8_TARGET_ARCH_IA32 | 2051 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |