OLD | NEW |
1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 247 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
258 set_modrm(1, rsp); | 258 set_modrm(1, rsp); |
259 set_disp8(disp); | 259 set_disp8(disp); |
260 } else { | 260 } else { |
261 set_modrm(2, rsp); | 261 set_modrm(2, rsp); |
262 set_disp32(disp); | 262 set_disp32(disp); |
263 } | 263 } |
264 } | 264 } |
265 | 265 |
266 | 266 |
267 // ----------------------------------------------------------------------------- | 267 // ----------------------------------------------------------------------------- |
268 // Implementation of Assembler | 268 // Implementation of Assembler. |
269 | 269 |
270 #ifdef GENERATED_CODE_COVERAGE | 270 #ifdef GENERATED_CODE_COVERAGE |
271 static void InitCoverageLog(); | 271 static void InitCoverageLog(); |
272 #endif | 272 #endif |
273 | 273 |
274 byte* Assembler::spare_buffer_ = NULL; | 274 byte* Assembler::spare_buffer_ = NULL; |
275 | 275 |
276 Assembler::Assembler(void* buffer, int buffer_size) | 276 Assembler::Assembler(void* buffer, int buffer_size) |
277 : code_targets_(100) { | 277 : code_targets_(100) { |
278 if (buffer == NULL) { | 278 if (buffer == NULL) { |
279 // do our own buffer management | 279 // Do our own buffer management. |
280 if (buffer_size <= kMinimalBufferSize) { | 280 if (buffer_size <= kMinimalBufferSize) { |
281 buffer_size = kMinimalBufferSize; | 281 buffer_size = kMinimalBufferSize; |
282 | 282 |
283 if (spare_buffer_ != NULL) { | 283 if (spare_buffer_ != NULL) { |
284 buffer = spare_buffer_; | 284 buffer = spare_buffer_; |
285 spare_buffer_ = NULL; | 285 spare_buffer_ = NULL; |
286 } | 286 } |
287 } | 287 } |
288 if (buffer == NULL) { | 288 if (buffer == NULL) { |
289 buffer_ = NewArray<byte>(buffer_size); | 289 buffer_ = NewArray<byte>(buffer_size); |
290 } else { | 290 } else { |
291 buffer_ = static_cast<byte*>(buffer); | 291 buffer_ = static_cast<byte*>(buffer); |
292 } | 292 } |
293 buffer_size_ = buffer_size; | 293 buffer_size_ = buffer_size; |
294 own_buffer_ = true; | 294 own_buffer_ = true; |
295 } else { | 295 } else { |
296 // use externally provided buffer instead | 296 // Use externally provided buffer instead. |
297 ASSERT(buffer_size > 0); | 297 ASSERT(buffer_size > 0); |
298 buffer_ = static_cast<byte*>(buffer); | 298 buffer_ = static_cast<byte*>(buffer); |
299 buffer_size_ = buffer_size; | 299 buffer_size_ = buffer_size; |
300 own_buffer_ = false; | 300 own_buffer_ = false; |
301 } | 301 } |
302 | 302 |
303 // Clear the buffer in debug mode unless it was provided by the | 303 // Clear the buffer in debug mode unless it was provided by the |
304 // caller in which case we can't be sure it's okay to overwrite | 304 // caller in which case we can't be sure it's okay to overwrite |
305 // existing code in it. | 305 // existing code in it. |
306 #ifdef DEBUG | 306 #ifdef DEBUG |
307 if (own_buffer_) { | 307 if (own_buffer_) { |
308 memset(buffer_, 0xCC, buffer_size); // int3 | 308 memset(buffer_, 0xCC, buffer_size); // int3 |
309 } | 309 } |
310 #endif | 310 #endif |
311 | 311 |
312 // setup buffer pointers | 312 // Setup buffer pointers. |
313 ASSERT(buffer_ != NULL); | 313 ASSERT(buffer_ != NULL); |
314 pc_ = buffer_; | 314 pc_ = buffer_; |
315 reloc_info_writer.Reposition(buffer_ + buffer_size, pc_); | 315 reloc_info_writer.Reposition(buffer_ + buffer_size, pc_); |
316 | 316 |
317 last_pc_ = NULL; | 317 last_pc_ = NULL; |
318 current_statement_position_ = RelocInfo::kNoPosition; | 318 current_statement_position_ = RelocInfo::kNoPosition; |
319 current_position_ = RelocInfo::kNoPosition; | 319 current_position_ = RelocInfo::kNoPosition; |
320 written_statement_position_ = current_statement_position_; | 320 written_statement_position_ = current_statement_position_; |
321 written_position_ = current_position_; | 321 written_position_ = current_position_; |
322 #ifdef GENERATED_CODE_COVERAGE | 322 #ifdef GENERATED_CODE_COVERAGE |
323 InitCoverageLog(); | 323 InitCoverageLog(); |
324 #endif | 324 #endif |
325 } | 325 } |
326 | 326 |
327 | 327 |
328 Assembler::~Assembler() { | 328 Assembler::~Assembler() { |
329 if (own_buffer_) { | 329 if (own_buffer_) { |
330 if (spare_buffer_ == NULL && buffer_size_ == kMinimalBufferSize) { | 330 if (spare_buffer_ == NULL && buffer_size_ == kMinimalBufferSize) { |
331 spare_buffer_ = buffer_; | 331 spare_buffer_ = buffer_; |
332 } else { | 332 } else { |
333 DeleteArray(buffer_); | 333 DeleteArray(buffer_); |
334 } | 334 } |
335 } | 335 } |
336 } | 336 } |
337 | 337 |
338 | 338 |
339 void Assembler::GetCode(CodeDesc* desc) { | 339 void Assembler::GetCode(CodeDesc* desc) { |
340 // finalize code | 340 // Finalize code (at this point overflow() may be true, but the gap ensures |
341 // (at this point overflow() may be true, but the gap ensures that | 341 // that we are still not overlapping instructions and relocation info). |
342 // we are still not overlapping instructions and relocation info) | 342 ASSERT(pc_ <= reloc_info_writer.pos()); // No overlap. |
343 ASSERT(pc_ <= reloc_info_writer.pos()); // no overlap | 343 // Setup code descriptor. |
344 // setup desc | |
345 desc->buffer = buffer_; | 344 desc->buffer = buffer_; |
346 desc->buffer_size = buffer_size_; | 345 desc->buffer_size = buffer_size_; |
347 desc->instr_size = pc_offset(); | 346 desc->instr_size = pc_offset(); |
348 ASSERT(desc->instr_size > 0); // Zero-size code objects upset the system. | 347 ASSERT(desc->instr_size > 0); // Zero-size code objects upset the system. |
349 desc->reloc_size = | 348 desc->reloc_size = |
350 static_cast<int>((buffer_ + buffer_size_) - reloc_info_writer.pos()); | 349 static_cast<int>((buffer_ + buffer_size_) - reloc_info_writer.pos()); |
351 desc->origin = this; | 350 desc->origin = this; |
352 | 351 |
353 Counters::reloc_info_size.Increment(desc->reloc_size); | 352 Counters::reloc_info_size.Increment(desc->reloc_size); |
354 } | 353 } |
355 | 354 |
356 | 355 |
357 void Assembler::Align(int m) { | 356 void Assembler::Align(int m) { |
358 ASSERT(IsPowerOf2(m)); | 357 ASSERT(IsPowerOf2(m)); |
359 while ((pc_offset() & (m - 1)) != 0) { | 358 while ((pc_offset() & (m - 1)) != 0) { |
360 nop(); | 359 nop(); |
361 } | 360 } |
362 } | 361 } |
363 | 362 |
364 | 363 |
365 void Assembler::bind_to(Label* L, int pos) { | 364 void Assembler::bind_to(Label* L, int pos) { |
366 ASSERT(!L->is_bound()); // Label may only be bound once. | 365 ASSERT(!L->is_bound()); // Label may only be bound once. |
367 last_pc_ = NULL; | 366 last_pc_ = NULL; |
368 ASSERT(0 <= pos && pos <= pc_offset()); // Position must be valid. | 367 ASSERT(0 <= pos && pos <= pc_offset()); // Position must be valid. |
369 if (L->is_linked()) { | 368 if (L->is_linked()) { |
370 int current = L->pos(); | 369 int current = L->pos(); |
371 int next = long_at(current); | 370 int next = long_at(current); |
372 while (next != current) { | 371 while (next != current) { |
373 // relative address, relative to point after address | 372 // Relative address, relative to point after address. |
374 int imm32 = pos - (current + sizeof(int32_t)); | 373 int imm32 = pos - (current + sizeof(int32_t)); |
375 long_at_put(current, imm32); | 374 long_at_put(current, imm32); |
376 current = next; | 375 current = next; |
377 next = long_at(next); | 376 next = long_at(next); |
378 } | 377 } |
379 // Fix up last fixup on linked list. | 378 // Fix up last fixup on linked list. |
380 int last_imm32 = pos - (current + sizeof(int32_t)); | 379 int last_imm32 = pos - (current + sizeof(int32_t)); |
381 long_at_put(current, last_imm32); | 380 long_at_put(current, last_imm32); |
382 } | 381 } |
383 L->bind_to(pos); | 382 L->bind_to(pos); |
384 } | 383 } |
385 | 384 |
386 | 385 |
387 void Assembler::bind(Label* L) { | 386 void Assembler::bind(Label* L) { |
388 bind_to(L, pc_offset()); | 387 bind_to(L, pc_offset()); |
389 } | 388 } |
390 | 389 |
391 | 390 |
392 void Assembler::GrowBuffer() { | 391 void Assembler::GrowBuffer() { |
393 ASSERT(buffer_overflow()); // should not call this otherwise | 392 ASSERT(buffer_overflow()); |
394 if (!own_buffer_) FATAL("external code buffer is too small"); | 393 if (!own_buffer_) FATAL("external code buffer is too small"); |
395 | 394 |
396 // compute new buffer size | 395 // Compute new buffer size. |
397 CodeDesc desc; // the new buffer | 396 CodeDesc desc; // the new buffer |
398 if (buffer_size_ < 4*KB) { | 397 if (buffer_size_ < 4*KB) { |
399 desc.buffer_size = 4*KB; | 398 desc.buffer_size = 4*KB; |
400 } else { | 399 } else { |
401 desc.buffer_size = 2*buffer_size_; | 400 desc.buffer_size = 2*buffer_size_; |
402 } | 401 } |
403 // Some internal data structures overflow for very large buffers, | 402 // Some internal data structures overflow for very large buffers, |
404 // they must ensure that kMaximalBufferSize is not too large. | 403 // they must ensure that kMaximalBufferSize is not too large. |
405 if ((desc.buffer_size > kMaximalBufferSize) || | 404 if ((desc.buffer_size > kMaximalBufferSize) || |
406 (desc.buffer_size > Heap::MaxOldGenerationSize())) { | 405 (desc.buffer_size > Heap::MaxOldGenerationSize())) { |
407 V8::FatalProcessOutOfMemory("Assembler::GrowBuffer"); | 406 V8::FatalProcessOutOfMemory("Assembler::GrowBuffer"); |
408 } | 407 } |
409 | 408 |
410 // setup new buffer | 409 // Setup new buffer. |
411 desc.buffer = NewArray<byte>(desc.buffer_size); | 410 desc.buffer = NewArray<byte>(desc.buffer_size); |
412 desc.instr_size = pc_offset(); | 411 desc.instr_size = pc_offset(); |
413 desc.reloc_size = | 412 desc.reloc_size = |
414 static_cast<int>((buffer_ + buffer_size_) - (reloc_info_writer.pos())); | 413 static_cast<int>((buffer_ + buffer_size_) - (reloc_info_writer.pos())); |
415 | 414 |
416 // Clear the buffer in debug mode. Use 'int3' instructions to make | 415 // Clear the buffer in debug mode. Use 'int3' instructions to make |
417 // sure to get into problems if we ever run uninitialized code. | 416 // sure to get into problems if we ever run uninitialized code. |
418 #ifdef DEBUG | 417 #ifdef DEBUG |
419 memset(desc.buffer, 0xCC, desc.buffer_size); | 418 memset(desc.buffer, 0xCC, desc.buffer_size); |
420 #endif | 419 #endif |
421 | 420 |
422 // copy the data | 421 // Copy the data. |
423 intptr_t pc_delta = desc.buffer - buffer_; | 422 intptr_t pc_delta = desc.buffer - buffer_; |
424 intptr_t rc_delta = (desc.buffer + desc.buffer_size) - | 423 intptr_t rc_delta = (desc.buffer + desc.buffer_size) - |
425 (buffer_ + buffer_size_); | 424 (buffer_ + buffer_size_); |
426 memmove(desc.buffer, buffer_, desc.instr_size); | 425 memmove(desc.buffer, buffer_, desc.instr_size); |
427 memmove(rc_delta + reloc_info_writer.pos(), | 426 memmove(rc_delta + reloc_info_writer.pos(), |
428 reloc_info_writer.pos(), desc.reloc_size); | 427 reloc_info_writer.pos(), desc.reloc_size); |
429 | 428 |
430 // switch buffers | 429 // Switch buffers. |
431 if (spare_buffer_ == NULL && buffer_size_ == kMinimalBufferSize) { | 430 if (spare_buffer_ == NULL && buffer_size_ == kMinimalBufferSize) { |
432 spare_buffer_ = buffer_; | 431 spare_buffer_ = buffer_; |
433 } else { | 432 } else { |
434 DeleteArray(buffer_); | 433 DeleteArray(buffer_); |
435 } | 434 } |
436 buffer_ = desc.buffer; | 435 buffer_ = desc.buffer; |
437 buffer_size_ = desc.buffer_size; | 436 buffer_size_ = desc.buffer_size; |
438 pc_ += pc_delta; | 437 pc_ += pc_delta; |
439 if (last_pc_ != NULL) { | 438 if (last_pc_ != NULL) { |
440 last_pc_ += pc_delta; | 439 last_pc_ += pc_delta; |
441 } | 440 } |
442 reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta, | 441 reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta, |
443 reloc_info_writer.last_pc() + pc_delta); | 442 reloc_info_writer.last_pc() + pc_delta); |
444 | 443 |
445 // relocate runtime entries | 444 // Relocate runtime entries. |
446 for (RelocIterator it(desc); !it.done(); it.next()) { | 445 for (RelocIterator it(desc); !it.done(); it.next()) { |
447 RelocInfo::Mode rmode = it.rinfo()->rmode(); | 446 RelocInfo::Mode rmode = it.rinfo()->rmode(); |
448 if (rmode == RelocInfo::INTERNAL_REFERENCE) { | 447 if (rmode == RelocInfo::INTERNAL_REFERENCE) { |
449 intptr_t* p = reinterpret_cast<intptr_t*>(it.rinfo()->pc()); | 448 intptr_t* p = reinterpret_cast<intptr_t*>(it.rinfo()->pc()); |
450 if (*p != 0) { // 0 means uninitialized. | 449 if (*p != 0) { // 0 means uninitialized. |
451 *p += pc_delta; | 450 *p += pc_delta; |
452 } | 451 } |
453 } | 452 } |
454 } | 453 } |
455 | 454 |
456 ASSERT(!buffer_overflow()); | 455 ASSERT(!buffer_overflow()); |
457 } | 456 } |
458 | 457 |
459 | 458 |
460 void Assembler::emit_operand(int code, const Operand& adr) { | 459 void Assembler::emit_operand(int code, const Operand& adr) { |
461 ASSERT(is_uint3(code)); | 460 ASSERT(is_uint3(code)); |
462 const unsigned length = adr.len_; | 461 const unsigned length = adr.len_; |
463 ASSERT(length > 0); | 462 ASSERT(length > 0); |
464 | 463 |
465 // Emit updated ModR/M byte containing the given register. | 464 // Emit updated ModR/M byte containing the given register. |
466 ASSERT((adr.buf_[0] & 0x38) == 0); | 465 ASSERT((adr.buf_[0] & 0x38) == 0); |
467 pc_[0] = adr.buf_[0] | code << 3; | 466 pc_[0] = adr.buf_[0] | code << 3; |
468 | 467 |
469 // Emit the rest of the encoded operand. | 468 // Emit the rest of the encoded operand. |
470 for (unsigned i = 1; i < length; i++) pc_[i] = adr.buf_[i]; | 469 for (unsigned i = 1; i < length; i++) pc_[i] = adr.buf_[i]; |
471 pc_ += length; | 470 pc_ += length; |
472 } | 471 } |
473 | 472 |
474 | 473 |
475 // Assembler Instruction implementations | 474 // Assembler Instruction implementations. |
476 | 475 |
477 void Assembler::arithmetic_op(byte opcode, Register reg, const Operand& op) { | 476 void Assembler::arithmetic_op(byte opcode, Register reg, const Operand& op) { |
478 EnsureSpace ensure_space(this); | 477 EnsureSpace ensure_space(this); |
479 last_pc_ = pc_; | 478 last_pc_ = pc_; |
480 emit_rex_64(reg, op); | 479 emit_rex_64(reg, op); |
481 emit(opcode); | 480 emit(opcode); |
482 emit_operand(reg, op); | 481 emit_operand(reg, op); |
483 } | 482 } |
484 | 483 |
485 | 484 |
(...skipping 263 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
749 emit_rex_64(src, dst); | 748 emit_rex_64(src, dst); |
750 emit(0x0F); | 749 emit(0x0F); |
751 emit(0xAB); | 750 emit(0xAB); |
752 emit_operand(src, dst); | 751 emit_operand(src, dst); |
753 } | 752 } |
754 | 753 |
755 | 754 |
756 void Assembler::call(Label* L) { | 755 void Assembler::call(Label* L) { |
757 EnsureSpace ensure_space(this); | 756 EnsureSpace ensure_space(this); |
758 last_pc_ = pc_; | 757 last_pc_ = pc_; |
759 // 1110 1000 #32-bit disp | 758 // 1110 1000 #32-bit disp. |
760 emit(0xE8); | 759 emit(0xE8); |
761 if (L->is_bound()) { | 760 if (L->is_bound()) { |
762 int offset = L->pos() - pc_offset() - sizeof(int32_t); | 761 int offset = L->pos() - pc_offset() - sizeof(int32_t); |
763 ASSERT(offset <= 0); | 762 ASSERT(offset <= 0); |
764 emitl(offset); | 763 emitl(offset); |
765 } else if (L->is_linked()) { | 764 } else if (L->is_linked()) { |
766 emitl(L->pos()); | 765 emitl(L->pos()); |
767 L->link_to(pc_offset() - sizeof(int32_t)); | 766 L->link_to(pc_offset() - sizeof(int32_t)); |
768 } else { | 767 } else { |
769 ASSERT(L->is_unused()); | 768 ASSERT(L->is_unused()); |
770 int32_t current = pc_offset(); | 769 int32_t current = pc_offset(); |
771 emitl(current); | 770 emitl(current); |
772 L->link_to(current); | 771 L->link_to(current); |
773 } | 772 } |
774 } | 773 } |
775 | 774 |
776 | 775 |
777 void Assembler::call(Handle<Code> target, RelocInfo::Mode rmode) { | 776 void Assembler::call(Handle<Code> target, RelocInfo::Mode rmode) { |
778 EnsureSpace ensure_space(this); | 777 EnsureSpace ensure_space(this); |
779 last_pc_ = pc_; | 778 last_pc_ = pc_; |
780 // 1110 1000 #32-bit disp | 779 // 1110 1000 #32-bit disp. |
781 emit(0xE8); | 780 emit(0xE8); |
782 emit_code_target(target, rmode); | 781 emit_code_target(target, rmode); |
783 } | 782 } |
784 | 783 |
785 | 784 |
786 void Assembler::call(Register adr) { | 785 void Assembler::call(Register adr) { |
787 EnsureSpace ensure_space(this); | 786 EnsureSpace ensure_space(this); |
788 last_pc_ = pc_; | 787 last_pc_ = pc_; |
789 // Opcode: FF /2 r64 | 788 // Opcode: FF /2 r64. |
790 if (adr.high_bit()) { | 789 if (adr.high_bit()) { |
791 emit_rex_64(adr); | 790 emit_rex_64(adr); |
792 } | 791 } |
793 emit(0xFF); | 792 emit(0xFF); |
794 emit_modrm(0x2, adr); | 793 emit_modrm(0x2, adr); |
795 } | 794 } |
796 | 795 |
797 | 796 |
798 void Assembler::call(const Operand& op) { | 797 void Assembler::call(const Operand& op) { |
799 EnsureSpace ensure_space(this); | 798 EnsureSpace ensure_space(this); |
800 last_pc_ = pc_; | 799 last_pc_ = pc_; |
801 // Opcode: FF /2 m64 | 800 // Opcode: FF /2 m64. |
802 emit_rex_64(op); | 801 emit_rex_64(op); |
803 emit(0xFF); | 802 emit(0xFF); |
804 emit_operand(2, op); | 803 emit_operand(2, op); |
805 } | 804 } |
806 | 805 |
807 | 806 |
808 void Assembler::clc() { | 807 void Assembler::clc() { |
809 EnsureSpace ensure_space(this); | 808 EnsureSpace ensure_space(this); |
810 last_pc_ = pc_; | 809 last_pc_ = pc_; |
811 emit(0xF8); | 810 emit(0xF8); |
(...skipping 10 matching lines...) Expand all Loading... |
822 if (cc == always) { | 821 if (cc == always) { |
823 movq(dst, src); | 822 movq(dst, src); |
824 } else if (cc == never) { | 823 } else if (cc == never) { |
825 return; | 824 return; |
826 } | 825 } |
827 // No need to check CpuInfo for CMOV support, it's a required part of the | 826 // No need to check CpuInfo for CMOV support, it's a required part of the |
828 // 64-bit architecture. | 827 // 64-bit architecture. |
829 ASSERT(cc >= 0); // Use mov for unconditional moves. | 828 ASSERT(cc >= 0); // Use mov for unconditional moves. |
830 EnsureSpace ensure_space(this); | 829 EnsureSpace ensure_space(this); |
831 last_pc_ = pc_; | 830 last_pc_ = pc_; |
832 // Opcode: REX.W 0f 40 + cc /r | 831 // Opcode: REX.W 0f 40 + cc /r. |
833 emit_rex_64(dst, src); | 832 emit_rex_64(dst, src); |
834 emit(0x0f); | 833 emit(0x0f); |
835 emit(0x40 + cc); | 834 emit(0x40 + cc); |
836 emit_modrm(dst, src); | 835 emit_modrm(dst, src); |
837 } | 836 } |
838 | 837 |
839 | 838 |
840 void Assembler::cmovq(Condition cc, Register dst, const Operand& src) { | 839 void Assembler::cmovq(Condition cc, Register dst, const Operand& src) { |
841 if (cc == always) { | 840 if (cc == always) { |
842 movq(dst, src); | 841 movq(dst, src); |
843 } else if (cc == never) { | 842 } else if (cc == never) { |
844 return; | 843 return; |
845 } | 844 } |
846 ASSERT(cc >= 0); | 845 ASSERT(cc >= 0); |
847 EnsureSpace ensure_space(this); | 846 EnsureSpace ensure_space(this); |
848 last_pc_ = pc_; | 847 last_pc_ = pc_; |
849 // Opcode: REX.W 0f 40 + cc /r | 848 // Opcode: REX.W 0f 40 + cc /r. |
850 emit_rex_64(dst, src); | 849 emit_rex_64(dst, src); |
851 emit(0x0f); | 850 emit(0x0f); |
852 emit(0x40 + cc); | 851 emit(0x40 + cc); |
853 emit_operand(dst, src); | 852 emit_operand(dst, src); |
854 } | 853 } |
855 | 854 |
856 | 855 |
857 void Assembler::cmovl(Condition cc, Register dst, Register src) { | 856 void Assembler::cmovl(Condition cc, Register dst, Register src) { |
858 if (cc == always) { | 857 if (cc == always) { |
859 movl(dst, src); | 858 movl(dst, src); |
860 } else if (cc == never) { | 859 } else if (cc == never) { |
861 return; | 860 return; |
862 } | 861 } |
863 ASSERT(cc >= 0); | 862 ASSERT(cc >= 0); |
864 EnsureSpace ensure_space(this); | 863 EnsureSpace ensure_space(this); |
865 last_pc_ = pc_; | 864 last_pc_ = pc_; |
866 // Opcode: 0f 40 + cc /r | 865 // Opcode: 0f 40 + cc /r. |
867 emit_optional_rex_32(dst, src); | 866 emit_optional_rex_32(dst, src); |
868 emit(0x0f); | 867 emit(0x0f); |
869 emit(0x40 + cc); | 868 emit(0x40 + cc); |
870 emit_modrm(dst, src); | 869 emit_modrm(dst, src); |
871 } | 870 } |
872 | 871 |
873 | 872 |
874 void Assembler::cmovl(Condition cc, Register dst, const Operand& src) { | 873 void Assembler::cmovl(Condition cc, Register dst, const Operand& src) { |
875 if (cc == always) { | 874 if (cc == always) { |
876 movl(dst, src); | 875 movl(dst, src); |
877 } else if (cc == never) { | 876 } else if (cc == never) { |
878 return; | 877 return; |
879 } | 878 } |
880 ASSERT(cc >= 0); | 879 ASSERT(cc >= 0); |
881 EnsureSpace ensure_space(this); | 880 EnsureSpace ensure_space(this); |
882 last_pc_ = pc_; | 881 last_pc_ = pc_; |
883 // Opcode: 0f 40 + cc /r | 882 // Opcode: 0f 40 + cc /r. |
884 emit_optional_rex_32(dst, src); | 883 emit_optional_rex_32(dst, src); |
885 emit(0x0f); | 884 emit(0x0f); |
886 emit(0x40 + cc); | 885 emit(0x40 + cc); |
887 emit_operand(dst, src); | 886 emit_operand(dst, src); |
888 } | 887 } |
889 | 888 |
890 | 889 |
891 void Assembler::cmpb_al(Immediate imm8) { | 890 void Assembler::cmpb_al(Immediate imm8) { |
892 ASSERT(is_int8(imm8.value_) || is_uint8(imm8.value_)); | 891 ASSERT(is_int8(imm8.value_) || is_uint8(imm8.value_)); |
893 EnsureSpace ensure_space(this); | 892 EnsureSpace ensure_space(this); |
(...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1103 } | 1102 } |
1104 EnsureSpace ensure_space(this); | 1103 EnsureSpace ensure_space(this); |
1105 last_pc_ = pc_; | 1104 last_pc_ = pc_; |
1106 ASSERT(is_uint4(cc)); | 1105 ASSERT(is_uint4(cc)); |
1107 if (L->is_bound()) { | 1106 if (L->is_bound()) { |
1108 const int short_size = 2; | 1107 const int short_size = 2; |
1109 const int long_size = 6; | 1108 const int long_size = 6; |
1110 int offs = L->pos() - pc_offset(); | 1109 int offs = L->pos() - pc_offset(); |
1111 ASSERT(offs <= 0); | 1110 ASSERT(offs <= 0); |
1112 if (is_int8(offs - short_size)) { | 1111 if (is_int8(offs - short_size)) { |
1113 // 0111 tttn #8-bit disp | 1112 // 0111 tttn #8-bit disp. |
1114 emit(0x70 | cc); | 1113 emit(0x70 | cc); |
1115 emit((offs - short_size) & 0xFF); | 1114 emit((offs - short_size) & 0xFF); |
1116 } else { | 1115 } else { |
1117 // 0000 1111 1000 tttn #32-bit disp | 1116 // 0000 1111 1000 tttn #32-bit disp. |
1118 emit(0x0F); | 1117 emit(0x0F); |
1119 emit(0x80 | cc); | 1118 emit(0x80 | cc); |
1120 emitl(offs - long_size); | 1119 emitl(offs - long_size); |
1121 } | 1120 } |
1122 } else if (L->is_linked()) { | 1121 } else if (L->is_linked()) { |
1123 // 0000 1111 1000 tttn #32-bit disp | 1122 // 0000 1111 1000 tttn #32-bit disp. |
1124 emit(0x0F); | 1123 emit(0x0F); |
1125 emit(0x80 | cc); | 1124 emit(0x80 | cc); |
1126 emitl(L->pos()); | 1125 emitl(L->pos()); |
1127 L->link_to(pc_offset() - sizeof(int32_t)); | 1126 L->link_to(pc_offset() - sizeof(int32_t)); |
1128 } else { | 1127 } else { |
1129 ASSERT(L->is_unused()); | 1128 ASSERT(L->is_unused()); |
1130 emit(0x0F); | 1129 emit(0x0F); |
1131 emit(0x80 | cc); | 1130 emit(0x80 | cc); |
1132 int32_t current = pc_offset(); | 1131 int32_t current = pc_offset(); |
1133 emitl(current); | 1132 emitl(current); |
1134 L->link_to(current); | 1133 L->link_to(current); |
1135 } | 1134 } |
1136 } | 1135 } |
1137 | 1136 |
1138 | 1137 |
1139 void Assembler::j(Condition cc, | 1138 void Assembler::j(Condition cc, |
1140 Handle<Code> target, | 1139 Handle<Code> target, |
1141 RelocInfo::Mode rmode) { | 1140 RelocInfo::Mode rmode) { |
1142 EnsureSpace ensure_space(this); | 1141 EnsureSpace ensure_space(this); |
1143 last_pc_ = pc_; | 1142 last_pc_ = pc_; |
1144 ASSERT(is_uint4(cc)); | 1143 ASSERT(is_uint4(cc)); |
1145 // 0000 1111 1000 tttn #32-bit disp | 1144 // 0000 1111 1000 tttn #32-bit disp. |
1146 emit(0x0F); | 1145 emit(0x0F); |
1147 emit(0x80 | cc); | 1146 emit(0x80 | cc); |
1148 emit_code_target(target, rmode); | 1147 emit_code_target(target, rmode); |
1149 } | 1148 } |
1150 | 1149 |
1151 | 1150 |
1152 void Assembler::jmp(Label* L) { | 1151 void Assembler::jmp(Label* L) { |
1153 EnsureSpace ensure_space(this); | 1152 EnsureSpace ensure_space(this); |
1154 last_pc_ = pc_; | 1153 last_pc_ = pc_; |
1155 if (L->is_bound()) { | 1154 if (L->is_bound()) { |
1156 int offs = L->pos() - pc_offset() - 1; | 1155 int offs = L->pos() - pc_offset() - 1; |
1157 ASSERT(offs <= 0); | 1156 ASSERT(offs <= 0); |
1158 if (is_int8(offs - sizeof(int8_t))) { | 1157 if (is_int8(offs - sizeof(int8_t))) { |
1159 // 1110 1011 #8-bit disp | 1158 // 1110 1011 #8-bit disp. |
1160 emit(0xEB); | 1159 emit(0xEB); |
1161 emit((offs - sizeof(int8_t)) & 0xFF); | 1160 emit((offs - sizeof(int8_t)) & 0xFF); |
1162 } else { | 1161 } else { |
1163 // 1110 1001 #32-bit disp | 1162 // 1110 1001 #32-bit disp. |
1164 emit(0xE9); | 1163 emit(0xE9); |
1165 emitl(offs - sizeof(int32_t)); | 1164 emitl(offs - sizeof(int32_t)); |
1166 } | 1165 } |
1167 } else if (L->is_linked()) { | 1166 } else if (L->is_linked()) { |
1168 // 1110 1001 #32-bit disp | 1167 // 1110 1001 #32-bit disp. |
1169 emit(0xE9); | 1168 emit(0xE9); |
1170 emitl(L->pos()); | 1169 emitl(L->pos()); |
1171 L->link_to(pc_offset() - sizeof(int32_t)); | 1170 L->link_to(pc_offset() - sizeof(int32_t)); |
1172 } else { | 1171 } else { |
1173 // 1110 1001 #32-bit disp | 1172 // 1110 1001 #32-bit disp. |
1174 ASSERT(L->is_unused()); | 1173 ASSERT(L->is_unused()); |
1175 emit(0xE9); | 1174 emit(0xE9); |
1176 int32_t current = pc_offset(); | 1175 int32_t current = pc_offset(); |
1177 emitl(current); | 1176 emitl(current); |
1178 L->link_to(current); | 1177 L->link_to(current); |
1179 } | 1178 } |
1180 } | 1179 } |
1181 | 1180 |
1182 | 1181 |
1183 void Assembler::jmp(Handle<Code> target, RelocInfo::Mode rmode) { | 1182 void Assembler::jmp(Handle<Code> target, RelocInfo::Mode rmode) { |
1184 EnsureSpace ensure_space(this); | 1183 EnsureSpace ensure_space(this); |
1185 last_pc_ = pc_; | 1184 last_pc_ = pc_; |
1186 // 1110 1001 #32-bit disp | 1185 // 1110 1001 #32-bit disp. |
1187 emit(0xE9); | 1186 emit(0xE9); |
1188 emit_code_target(target, rmode); | 1187 emit_code_target(target, rmode); |
1189 } | 1188 } |
1190 | 1189 |
1191 | 1190 |
1192 void Assembler::jmp(Register target) { | 1191 void Assembler::jmp(Register target) { |
1193 EnsureSpace ensure_space(this); | 1192 EnsureSpace ensure_space(this); |
1194 last_pc_ = pc_; | 1193 last_pc_ = pc_; |
1195 // Opcode FF/4 r64 | 1194 // Opcode FF/4 r64. |
1196 if (target.high_bit()) { | 1195 if (target.high_bit()) { |
1197 emit_rex_64(target); | 1196 emit_rex_64(target); |
1198 } | 1197 } |
1199 emit(0xFF); | 1198 emit(0xFF); |
1200 emit_modrm(0x4, target); | 1199 emit_modrm(0x4, target); |
1201 } | 1200 } |
1202 | 1201 |
1203 | 1202 |
1204 void Assembler::jmp(const Operand& src) { | 1203 void Assembler::jmp(const Operand& src) { |
1205 EnsureSpace ensure_space(this); | 1204 EnsureSpace ensure_space(this); |
1206 last_pc_ = pc_; | 1205 last_pc_ = pc_; |
1207 // Opcode FF/4 m64 | 1206 // Opcode FF/4 m64. |
1208 emit_optional_rex_32(src); | 1207 emit_optional_rex_32(src); |
1209 emit(0xFF); | 1208 emit(0xFF); |
1210 emit_operand(0x4, src); | 1209 emit_operand(0x4, src); |
1211 } | 1210 } |
1212 | 1211 |
1213 | 1212 |
1214 void Assembler::lea(Register dst, const Operand& src) { | 1213 void Assembler::lea(Register dst, const Operand& src) { |
1215 EnsureSpace ensure_space(this); | 1214 EnsureSpace ensure_space(this); |
1216 last_pc_ = pc_; | 1215 last_pc_ = pc_; |
1217 emit_rex_64(dst, src); | 1216 emit_rex_64(dst, src); |
(...skipping 188 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1406 void Assembler::movq(const Operand& dst, Immediate value) { | 1405 void Assembler::movq(const Operand& dst, Immediate value) { |
1407 EnsureSpace ensure_space(this); | 1406 EnsureSpace ensure_space(this); |
1408 last_pc_ = pc_; | 1407 last_pc_ = pc_; |
1409 emit_rex_64(dst); | 1408 emit_rex_64(dst); |
1410 emit(0xC7); | 1409 emit(0xC7); |
1411 emit_operand(0, dst); | 1410 emit_operand(0, dst); |
1412 emit(value); | 1411 emit(value); |
1413 } | 1412 } |
1414 | 1413 |
1415 | 1414 |
1416 /* | 1415 // Loads the ip-relative location of the src label into the target location |
1417 * Loads the ip-relative location of the src label into the target | 1416 // (as a 32-bit offset sign extended to 64-bit). |
1418 * location (as a 32-bit offset sign extended to 64-bit). | |
1419 */ | |
1420 void Assembler::movl(const Operand& dst, Label* src) { | 1417 void Assembler::movl(const Operand& dst, Label* src) { |
1421 EnsureSpace ensure_space(this); | 1418 EnsureSpace ensure_space(this); |
1422 last_pc_ = pc_; | 1419 last_pc_ = pc_; |
1423 emit_optional_rex_32(dst); | 1420 emit_optional_rex_32(dst); |
1424 emit(0xC7); | 1421 emit(0xC7); |
1425 emit_operand(0, dst); | 1422 emit_operand(0, dst); |
1426 if (src->is_bound()) { | 1423 if (src->is_bound()) { |
1427 int offset = src->pos() - pc_offset() - sizeof(int32_t); | 1424 int offset = src->pos() - pc_offset() - sizeof(int32_t); |
1428 ASSERT(offset <= 0); | 1425 ASSERT(offset <= 0); |
1429 emitl(offset); | 1426 emitl(offset); |
(...skipping 569 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1999 emit(mask); | 1996 emit(mask); |
2000 } else { | 1997 } else { |
2001 emit_rex_64(dst); | 1998 emit_rex_64(dst); |
2002 emit(0xF7); | 1999 emit(0xF7); |
2003 emit_modrm(0, dst); | 2000 emit_modrm(0, dst); |
2004 emit(mask); | 2001 emit(mask); |
2005 } | 2002 } |
2006 } | 2003 } |
2007 | 2004 |
2008 | 2005 |
2009 // FPU instructions | 2006 // FPU instructions. |
2010 | 2007 |
2011 | 2008 |
2012 void Assembler::fld(int i) { | 2009 void Assembler::fld(int i) { |
2013 EnsureSpace ensure_space(this); | 2010 EnsureSpace ensure_space(this); |
2014 last_pc_ = pc_; | 2011 last_pc_ = pc_; |
2015 emit_farith(0xD9, 0xC0, i); | 2012 emit_farith(0xD9, 0xC0, i); |
2016 } | 2013 } |
2017 | 2014 |
2018 | 2015 |
2019 void Assembler::fld1() { | 2016 void Assembler::fld1() { |
(...skipping 350 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2370 } | 2367 } |
2371 | 2368 |
2372 | 2369 |
2373 void Assembler::emit_farith(int b1, int b2, int i) { | 2370 void Assembler::emit_farith(int b1, int b2, int i) { |
2374 ASSERT(is_uint8(b1) && is_uint8(b2)); // wrong opcode | 2371 ASSERT(is_uint8(b1) && is_uint8(b2)); // wrong opcode |
2375 ASSERT(is_uint3(i)); // illegal stack offset | 2372 ASSERT(is_uint3(i)); // illegal stack offset |
2376 emit(b1); | 2373 emit(b1); |
2377 emit(b2 + i); | 2374 emit(b2 + i); |
2378 } | 2375 } |
2379 | 2376 |
2380 // SSE 2 operations | 2377 // SSE 2 operations. |
2381 | 2378 |
2382 void Assembler::movsd(const Operand& dst, XMMRegister src) { | 2379 void Assembler::movsd(const Operand& dst, XMMRegister src) { |
2383 EnsureSpace ensure_space(this); | 2380 EnsureSpace ensure_space(this); |
2384 last_pc_ = pc_; | 2381 last_pc_ = pc_; |
2385 emit(0xF2); // double | 2382 emit(0xF2); // double |
2386 emit_optional_rex_32(src, dst); | 2383 emit_optional_rex_32(src, dst); |
2387 emit(0x0F); | 2384 emit(0x0F); |
2388 emit(0x11); // store | 2385 emit(0x11); // store |
2389 emit_sse_operand(src, dst); | 2386 emit_sse_operand(src, dst); |
2390 } | 2387 } |
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2520 | 2517 |
2521 void Assembler::emit_sse_operand(XMMRegister dst, XMMRegister src) { | 2518 void Assembler::emit_sse_operand(XMMRegister dst, XMMRegister src) { |
2522 emit(0xC0 | (dst.low_bits() << 3) | src.low_bits()); | 2519 emit(0xC0 | (dst.low_bits() << 3) | src.low_bits()); |
2523 } | 2520 } |
2524 | 2521 |
2525 void Assembler::emit_sse_operand(XMMRegister dst, Register src) { | 2522 void Assembler::emit_sse_operand(XMMRegister dst, Register src) { |
2526 emit(0xC0 | (dst.low_bits() << 3) | src.low_bits()); | 2523 emit(0xC0 | (dst.low_bits() << 3) | src.low_bits()); |
2527 } | 2524 } |
2528 | 2525 |
2529 | 2526 |
2530 // Relocation information implementations | 2527 // Relocation information implementations. |
2531 | 2528 |
2532 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { | 2529 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { |
2533 ASSERT(rmode != RelocInfo::NONE); | 2530 ASSERT(rmode != RelocInfo::NONE); |
2534 // Don't record external references unless the heap will be serialized. | 2531 // Don't record external references unless the heap will be serialized. |
2535 if (rmode == RelocInfo::EXTERNAL_REFERENCE && | 2532 if (rmode == RelocInfo::EXTERNAL_REFERENCE && |
2536 !Serializer::enabled() && | 2533 !Serializer::enabled() && |
2537 !FLAG_debug_code) { | 2534 !FLAG_debug_code) { |
2538 return; | 2535 return; |
2539 } | 2536 } |
2540 RelocInfo rinfo(pc_, rmode, data); | 2537 RelocInfo rinfo(pc_, rmode, data); |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2588 written_position_ = current_position_; | 2585 written_position_ = current_position_; |
2589 } | 2586 } |
2590 } | 2587 } |
2591 | 2588 |
2592 | 2589 |
2593 const int RelocInfo::kApplyMask = RelocInfo::kCodeTargetMask | | 2590 const int RelocInfo::kApplyMask = RelocInfo::kCodeTargetMask | |
2594 1 << RelocInfo::INTERNAL_REFERENCE | | 2591 1 << RelocInfo::INTERNAL_REFERENCE | |
2595 1 << RelocInfo::JS_RETURN; | 2592 1 << RelocInfo::JS_RETURN; |
2596 | 2593 |
2597 } } // namespace v8::internal | 2594 } } // namespace v8::internal |
OLD | NEW |