OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 11 matching lines...) Expand all Loading... |
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | 27 |
28 #include <stdlib.h> | 28 #include <stdlib.h> |
29 | 29 |
30 #include "src/v8.h" | 30 #include "src/v8.h" |
31 #include "test/cctest/cctest.h" | 31 #include "test/cctest/cctest.h" |
| 32 #include "test/cctest/heap/heap-utils.h" |
32 | 33 |
33 #include "src/macro-assembler.h" | 34 #include "src/macro-assembler.h" |
34 | 35 |
35 #include "src/arm/macro-assembler-arm.h" | 36 #include "src/arm/macro-assembler-arm.h" |
36 #include "src/arm/simulator-arm.h" | 37 #include "src/arm/simulator-arm.h" |
37 | 38 |
38 | 39 |
39 using namespace v8::internal; | 40 using namespace v8::internal; |
40 | 41 |
41 typedef void* (*F)(int x, int y, int p2, int p3, int p4); | 42 typedef void* (*F)(int x, int y, int p2, int p3, int p4); |
(...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
219 CodeDesc desc; | 220 CodeDesc desc; |
220 masm->GetCode(&desc); | 221 masm->GetCode(&desc); |
221 Handle<Code> code = isolate->factory()->NewCode( | 222 Handle<Code> code = isolate->factory()->NewCode( |
222 desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); | 223 desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); |
223 | 224 |
224 // Call the function from C++. | 225 // Call the function from C++. |
225 F5 f = FUNCTION_CAST<F5>(code->entry()); | 226 F5 f = FUNCTION_CAST<F5>(code->entry()); |
226 CHECK(!CALL_GENERATED_CODE(isolate, f, 0, 0, 0, 0, 0)); | 227 CHECK(!CALL_GENERATED_CODE(isolate, f, 0, 0, 0, 0, 0)); |
227 } | 228 } |
228 | 229 |
| 230 template <typename Fun> |
| 231 void AssembleFunction(Isolate* isolate, size_t actual_size, byte* buffer, |
| 232 Fun f) { |
| 233 MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size), |
| 234 v8::internal::CodeObjectRequired::kYes); |
| 235 MacroAssembler* masm = &assembler; |
| 236 f(masm); |
| 237 __ bx(lr); |
| 238 |
| 239 CodeDesc desc; |
| 240 masm->GetCode(&desc); |
| 241 } |
| 242 |
| 243 TEST(AllocateMacrosNoGCRequired) { |
| 244 typedef intptr_t (*F0)(); |
| 245 |
| 246 // Allocate an executable page of memory. |
| 247 size_t actual_size; |
| 248 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate( |
| 249 Assembler::kMinimalBufferSize, &actual_size, true)); |
| 250 CHECK(buffer); |
| 251 Isolate* isolate = CcTest::i_isolate(); |
| 252 HandleScope handles(isolate); |
| 253 |
| 254 AllocationFlags const kDoubleAligned = |
| 255 static_cast<AllocationFlags>(DOUBLE_ALIGNMENT); |
| 256 AllocationFlags const kNoAllocationFlags = |
| 257 static_cast<AllocationFlags>(NO_ALLOCATION_FLAGS); |
| 258 |
| 259 #define CHECK_TAGGED(result) CHECK_EQ(result& kHeapObjectTag, 1); |
| 260 #define CHECK_DOUBLE_ALIGNED(result) \ |
| 261 do { \ |
| 262 CHECK_TAGGED((result)); \ |
| 263 CHECK_EQ((result)&kDoubleAlignmentMaskTagged, 0); \ |
| 264 /* Check that the filler was written in the correct place */ \ |
| 265 CHECK_EQ(*reinterpret_cast<v8::internal::Map***>( \ |
| 266 (result) - (kHeapObjectTag + kPointerSize)), \ |
| 267 isolate->factory()->one_pointer_filler_map().location()); \ |
| 268 } while (false) |
| 269 |
| 270 heap::GcAndSweep(isolate->heap(), AllocationSpace::NEW_SPACE); |
| 271 { |
| 272 AssembleFunction(isolate, actual_size, buffer, [](MacroAssembler* masm) { |
| 273 Label gc_required, success; |
| 274 __ Allocate(kPointerSize, r0, r1, r2, &gc_required, kNoAllocationFlags); |
| 275 __ jmp(&success); |
| 276 __ bind(&gc_required); |
| 277 __ Abort(kNoReason); |
| 278 __ bind(&success); |
| 279 }); |
| 280 F0 f = FUNCTION_CAST<F0>(buffer); |
| 281 intptr_t test_result = reinterpret_cast<intptr_t>( |
| 282 CALL_GENERATED_CODE(isolate, f, 0, 0, 0, 0, 0)); |
| 283 CHECK_TAGGED(test_result); |
| 284 } |
| 285 |
| 286 { |
| 287 AssembleFunction(isolate, actual_size, buffer, [](MacroAssembler* masm) { |
| 288 Label gc_required, success; |
| 289 __ Allocate(kDoubleSize, r0, r1, r2, &gc_required, kDoubleAligned); |
| 290 __ jmp(&success); |
| 291 __ bind(&gc_required); |
| 292 __ Abort(kNoReason); |
| 293 __ bind(&success); |
| 294 }); |
| 295 heap::MakeSureNewSpaceTopIsNotDoubleAligned(isolate->heap()); |
| 296 F0 f = FUNCTION_CAST<F0>(buffer); |
| 297 intptr_t test_result = reinterpret_cast<intptr_t>( |
| 298 CALL_GENERATED_CODE(isolate, f, 0, 0, 0, 0, 0)); |
| 299 CHECK_DOUBLE_ALIGNED(test_result); |
| 300 } |
| 301 { |
| 302 AssembleFunction(isolate, actual_size, buffer, [](MacroAssembler* masm) { |
| 303 Label gc_required, success; |
| 304 __ mov(r3, Operand(kPointerSize)); |
| 305 __ Allocate(r3, r0, r1, r2, &gc_required, kNoAllocationFlags); |
| 306 __ jmp(&success); |
| 307 __ bind(&gc_required); |
| 308 __ Abort(kNoReason); |
| 309 __ bind(&success); |
| 310 }); |
| 311 F0 f = FUNCTION_CAST<F0>(buffer); |
| 312 intptr_t test_result = reinterpret_cast<intptr_t>( |
| 313 CALL_GENERATED_CODE(isolate, f, 0, 0, 0, 0, 0)); |
| 314 CHECK_TAGGED(test_result); |
| 315 } |
| 316 |
| 317 { |
| 318 AssembleFunction(isolate, actual_size, buffer, [](MacroAssembler* masm) { |
| 319 Label gc_required, success; |
| 320 __ mov(r3, Operand(kDoubleSize)); |
| 321 __ Allocate(r3, r0, r1, r2, &gc_required, kDoubleAligned); |
| 322 __ jmp(&success); |
| 323 __ bind(&gc_required); |
| 324 __ Abort(kNoReason); |
| 325 __ bind(&success); |
| 326 }); |
| 327 heap::MakeSureNewSpaceTopIsNotDoubleAligned(isolate->heap()); |
| 328 F0 f = FUNCTION_CAST<F0>(buffer); |
| 329 intptr_t test_result = reinterpret_cast<intptr_t>( |
| 330 CALL_GENERATED_CODE(isolate, f, 0, 0, 0, 0, 0)); |
| 331 CHECK_DOUBLE_ALIGNED(test_result); |
| 332 } |
| 333 |
| 334 #undef CHECK_TAGGED |
| 335 #undef CHECK_DOUBLE_ALIGNED |
| 336 } |
| 337 |
229 #undef __ | 338 #undef __ |
OLD | NEW |