OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 660 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
671 } | 671 } |
672 } | 672 } |
673 | 673 |
674 | 674 |
675 void MacroAssembler::AssertNumber(Register object) { | 675 void MacroAssembler::AssertNumber(Register object) { |
676 if (emit_debug_code()) { | 676 if (emit_debug_code()) { |
677 Label ok; | 677 Label ok; |
678 JumpIfSmi(object, &ok); | 678 JumpIfSmi(object, &ok); |
679 cmp(FieldOperand(object, HeapObject::kMapOffset), | 679 cmp(FieldOperand(object, HeapObject::kMapOffset), |
680 isolate()->factory()->heap_number_map()); | 680 isolate()->factory()->heap_number_map()); |
681 Check(equal, kOperandNotANumber); | 681 Check(equal, "Operand not a number"); |
682 bind(&ok); | 682 bind(&ok); |
683 } | 683 } |
684 } | 684 } |
685 | 685 |
686 | 686 |
687 void MacroAssembler::AssertSmi(Register object) { | 687 void MacroAssembler::AssertSmi(Register object) { |
688 if (emit_debug_code()) { | 688 if (emit_debug_code()) { |
689 test(object, Immediate(kSmiTagMask)); | 689 test(object, Immediate(kSmiTagMask)); |
690 Check(equal, kOperandIsNotASmi); | 690 Check(equal, "Operand is not a smi"); |
691 } | 691 } |
692 } | 692 } |
693 | 693 |
694 | 694 |
695 void MacroAssembler::AssertString(Register object) { | 695 void MacroAssembler::AssertString(Register object) { |
696 if (emit_debug_code()) { | 696 if (emit_debug_code()) { |
697 test(object, Immediate(kSmiTagMask)); | 697 test(object, Immediate(kSmiTagMask)); |
698 Check(not_equal, kOperandIsASmiAndNotAString); | 698 Check(not_equal, "Operand is a smi and not a string"); |
699 push(object); | 699 push(object); |
700 mov(object, FieldOperand(object, HeapObject::kMapOffset)); | 700 mov(object, FieldOperand(object, HeapObject::kMapOffset)); |
701 CmpInstanceType(object, FIRST_NONSTRING_TYPE); | 701 CmpInstanceType(object, FIRST_NONSTRING_TYPE); |
702 pop(object); | 702 pop(object); |
703 Check(below, kOperandIsNotAString); | 703 Check(below, "Operand is not a string"); |
704 } | 704 } |
705 } | 705 } |
706 | 706 |
707 | 707 |
708 void MacroAssembler::AssertName(Register object) { | 708 void MacroAssembler::AssertName(Register object) { |
709 if (emit_debug_code()) { | 709 if (emit_debug_code()) { |
710 test(object, Immediate(kSmiTagMask)); | 710 test(object, Immediate(kSmiTagMask)); |
711 Check(not_equal, kOperandIsASmiAndNotAName); | 711 Check(not_equal, "Operand is a smi and not a name"); |
712 push(object); | 712 push(object); |
713 mov(object, FieldOperand(object, HeapObject::kMapOffset)); | 713 mov(object, FieldOperand(object, HeapObject::kMapOffset)); |
714 CmpInstanceType(object, LAST_NAME_TYPE); | 714 CmpInstanceType(object, LAST_NAME_TYPE); |
715 pop(object); | 715 pop(object); |
716 Check(below_equal, kOperandIsNotAName); | 716 Check(below_equal, "Operand is not a name"); |
717 } | 717 } |
718 } | 718 } |
719 | 719 |
720 | 720 |
721 void MacroAssembler::AssertNotSmi(Register object) { | 721 void MacroAssembler::AssertNotSmi(Register object) { |
722 if (emit_debug_code()) { | 722 if (emit_debug_code()) { |
723 test(object, Immediate(kSmiTagMask)); | 723 test(object, Immediate(kSmiTagMask)); |
724 Check(not_equal, kOperandIsASmi); | 724 Check(not_equal, "Operand is a smi"); |
725 } | 725 } |
726 } | 726 } |
727 | 727 |
728 | 728 |
729 void MacroAssembler::EnterFrame(StackFrame::Type type) { | 729 void MacroAssembler::EnterFrame(StackFrame::Type type) { |
730 push(ebp); | 730 push(ebp); |
731 mov(ebp, esp); | 731 mov(ebp, esp); |
732 push(esi); | 732 push(esi); |
733 push(Immediate(Smi::FromInt(type))); | 733 push(Immediate(Smi::FromInt(type))); |
734 push(Immediate(CodeObject())); | 734 push(Immediate(CodeObject())); |
735 if (emit_debug_code()) { | 735 if (emit_debug_code()) { |
736 cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value())); | 736 cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value())); |
737 Check(not_equal, kCodeObjectNotProperlyPatched); | 737 Check(not_equal, "code object not properly patched"); |
738 } | 738 } |
739 } | 739 } |
740 | 740 |
741 | 741 |
742 void MacroAssembler::LeaveFrame(StackFrame::Type type) { | 742 void MacroAssembler::LeaveFrame(StackFrame::Type type) { |
743 if (emit_debug_code()) { | 743 if (emit_debug_code()) { |
744 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset), | 744 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset), |
745 Immediate(Smi::FromInt(type))); | 745 Immediate(Smi::FromInt(type))); |
746 Check(equal, kStackFrameTypesMustMatch); | 746 Check(equal, "stack frame types must match"); |
747 } | 747 } |
748 leave(); | 748 leave(); |
749 } | 749 } |
750 | 750 |
751 | 751 |
752 void MacroAssembler::EnterExitFramePrologue() { | 752 void MacroAssembler::EnterExitFramePrologue() { |
753 // Set up the frame structure on the stack. | 753 // Set up the frame structure on the stack. |
754 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize); | 754 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize); |
755 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize); | 755 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize); |
756 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize); | 756 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize); |
(...skipping 260 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1017 ASSERT(!holder_reg.is(scratch1)); | 1017 ASSERT(!holder_reg.is(scratch1)); |
1018 ASSERT(!holder_reg.is(scratch2)); | 1018 ASSERT(!holder_reg.is(scratch2)); |
1019 ASSERT(!scratch1.is(scratch2)); | 1019 ASSERT(!scratch1.is(scratch2)); |
1020 | 1020 |
1021 // Load current lexical context from the stack frame. | 1021 // Load current lexical context from the stack frame. |
1022 mov(scratch1, Operand(ebp, StandardFrameConstants::kContextOffset)); | 1022 mov(scratch1, Operand(ebp, StandardFrameConstants::kContextOffset)); |
1023 | 1023 |
1024 // When generating debug code, make sure the lexical context is set. | 1024 // When generating debug code, make sure the lexical context is set. |
1025 if (emit_debug_code()) { | 1025 if (emit_debug_code()) { |
1026 cmp(scratch1, Immediate(0)); | 1026 cmp(scratch1, Immediate(0)); |
1027 Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext); | 1027 Check(not_equal, "we should not have an empty lexical context"); |
1028 } | 1028 } |
1029 // Load the native context of the current context. | 1029 // Load the native context of the current context. |
1030 int offset = | 1030 int offset = |
1031 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize; | 1031 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize; |
1032 mov(scratch1, FieldOperand(scratch1, offset)); | 1032 mov(scratch1, FieldOperand(scratch1, offset)); |
1033 mov(scratch1, FieldOperand(scratch1, GlobalObject::kNativeContextOffset)); | 1033 mov(scratch1, FieldOperand(scratch1, GlobalObject::kNativeContextOffset)); |
1034 | 1034 |
1035 // Check the context is a native context. | 1035 // Check the context is a native context. |
1036 if (emit_debug_code()) { | 1036 if (emit_debug_code()) { |
1037 // Read the first word and compare to native_context_map. | 1037 // Read the first word and compare to native_context_map. |
1038 cmp(FieldOperand(scratch1, HeapObject::kMapOffset), | 1038 cmp(FieldOperand(scratch1, HeapObject::kMapOffset), |
1039 isolate()->factory()->native_context_map()); | 1039 isolate()->factory()->native_context_map()); |
1040 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext); | 1040 Check(equal, "JSGlobalObject::native_context should be a native context."); |
1041 } | 1041 } |
1042 | 1042 |
1043 // Check if both contexts are the same. | 1043 // Check if both contexts are the same. |
1044 cmp(scratch1, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset)); | 1044 cmp(scratch1, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset)); |
1045 j(equal, &same_contexts); | 1045 j(equal, &same_contexts); |
1046 | 1046 |
1047 // Compare security tokens, save holder_reg on the stack so we can use it | 1047 // Compare security tokens, save holder_reg on the stack so we can use it |
1048 // as a temporary register. | 1048 // as a temporary register. |
1049 // | 1049 // |
1050 // Check that the security token in the calling global object is | 1050 // Check that the security token in the calling global object is |
1051 // compatible with the security token in the receiving global | 1051 // compatible with the security token in the receiving global |
1052 // object. | 1052 // object. |
1053 mov(scratch2, | 1053 mov(scratch2, |
1054 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset)); | 1054 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset)); |
1055 | 1055 |
1056 // Check the context is a native context. | 1056 // Check the context is a native context. |
1057 if (emit_debug_code()) { | 1057 if (emit_debug_code()) { |
1058 cmp(scratch2, isolate()->factory()->null_value()); | 1058 cmp(scratch2, isolate()->factory()->null_value()); |
1059 Check(not_equal, kJSGlobalProxyContextShouldNotBeNull); | 1059 Check(not_equal, "JSGlobalProxy::context() should not be null."); |
1060 | 1060 |
1061 // Read the first word and compare to native_context_map(), | 1061 // Read the first word and compare to native_context_map(), |
1062 cmp(FieldOperand(scratch2, HeapObject::kMapOffset), | 1062 cmp(FieldOperand(scratch2, HeapObject::kMapOffset), |
1063 isolate()->factory()->native_context_map()); | 1063 isolate()->factory()->native_context_map()); |
1064 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext); | 1064 Check(equal, "JSGlobalObject::native_context should be a native context."); |
1065 } | 1065 } |
1066 | 1066 |
1067 int token_offset = Context::kHeaderSize + | 1067 int token_offset = Context::kHeaderSize + |
1068 Context::SECURITY_TOKEN_INDEX * kPointerSize; | 1068 Context::SECURITY_TOKEN_INDEX * kPointerSize; |
1069 mov(scratch1, FieldOperand(scratch1, token_offset)); | 1069 mov(scratch1, FieldOperand(scratch1, token_offset)); |
1070 cmp(scratch1, FieldOperand(scratch2, token_offset)); | 1070 cmp(scratch1, FieldOperand(scratch2, token_offset)); |
1071 j(not_equal, miss); | 1071 j(not_equal, miss); |
1072 | 1072 |
1073 bind(&same_contexts); | 1073 bind(&same_contexts); |
1074 } | 1074 } |
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1199 ExternalReference allocation_top = | 1199 ExternalReference allocation_top = |
1200 AllocationUtils::GetAllocationTopReference(isolate(), flags); | 1200 AllocationUtils::GetAllocationTopReference(isolate(), flags); |
1201 | 1201 |
1202 // Just return if allocation top is already known. | 1202 // Just return if allocation top is already known. |
1203 if ((flags & RESULT_CONTAINS_TOP) != 0) { | 1203 if ((flags & RESULT_CONTAINS_TOP) != 0) { |
1204 // No use of scratch if allocation top is provided. | 1204 // No use of scratch if allocation top is provided. |
1205 ASSERT(scratch.is(no_reg)); | 1205 ASSERT(scratch.is(no_reg)); |
1206 #ifdef DEBUG | 1206 #ifdef DEBUG |
1207 // Assert that result actually contains top on entry. | 1207 // Assert that result actually contains top on entry. |
1208 cmp(result, Operand::StaticVariable(allocation_top)); | 1208 cmp(result, Operand::StaticVariable(allocation_top)); |
1209 Check(equal, kUnexpectedAllocationTop); | 1209 Check(equal, "Unexpected allocation top"); |
1210 #endif | 1210 #endif |
1211 return; | 1211 return; |
1212 } | 1212 } |
1213 | 1213 |
1214 // Move address of new object to result. Use scratch register if available. | 1214 // Move address of new object to result. Use scratch register if available. |
1215 if (scratch.is(no_reg)) { | 1215 if (scratch.is(no_reg)) { |
1216 mov(result, Operand::StaticVariable(allocation_top)); | 1216 mov(result, Operand::StaticVariable(allocation_top)); |
1217 } else { | 1217 } else { |
1218 mov(scratch, Immediate(allocation_top)); | 1218 mov(scratch, Immediate(allocation_top)); |
1219 mov(result, Operand(scratch, 0)); | 1219 mov(result, Operand(scratch, 0)); |
1220 } | 1220 } |
1221 } | 1221 } |
1222 | 1222 |
1223 | 1223 |
1224 void MacroAssembler::UpdateAllocationTopHelper(Register result_end, | 1224 void MacroAssembler::UpdateAllocationTopHelper(Register result_end, |
1225 Register scratch, | 1225 Register scratch, |
1226 AllocationFlags flags) { | 1226 AllocationFlags flags) { |
1227 if (emit_debug_code()) { | 1227 if (emit_debug_code()) { |
1228 test(result_end, Immediate(kObjectAlignmentMask)); | 1228 test(result_end, Immediate(kObjectAlignmentMask)); |
1229 Check(zero, kUnalignedAllocationInNewSpace); | 1229 Check(zero, "Unaligned allocation in new space"); |
1230 } | 1230 } |
1231 | 1231 |
1232 ExternalReference allocation_top = | 1232 ExternalReference allocation_top = |
1233 AllocationUtils::GetAllocationTopReference(isolate(), flags); | 1233 AllocationUtils::GetAllocationTopReference(isolate(), flags); |
1234 | 1234 |
1235 // Update new top. Use scratch if available. | 1235 // Update new top. Use scratch if available. |
1236 if (scratch.is(no_reg)) { | 1236 if (scratch.is(no_reg)) { |
1237 mov(Operand::StaticVariable(allocation_top), result_end); | 1237 mov(Operand::StaticVariable(allocation_top), result_end); |
1238 } else { | 1238 } else { |
1239 mov(Operand(scratch, 0), result_end); | 1239 mov(Operand(scratch, 0), result_end); |
(...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1451 | 1451 |
1452 | 1452 |
1453 void MacroAssembler::UndoAllocationInNewSpace(Register object) { | 1453 void MacroAssembler::UndoAllocationInNewSpace(Register object) { |
1454 ExternalReference new_space_allocation_top = | 1454 ExternalReference new_space_allocation_top = |
1455 ExternalReference::new_space_allocation_top_address(isolate()); | 1455 ExternalReference::new_space_allocation_top_address(isolate()); |
1456 | 1456 |
1457 // Make sure the object has no tag before resetting top. | 1457 // Make sure the object has no tag before resetting top. |
1458 and_(object, Immediate(~kHeapObjectTagMask)); | 1458 and_(object, Immediate(~kHeapObjectTagMask)); |
1459 #ifdef DEBUG | 1459 #ifdef DEBUG |
1460 cmp(object, Operand::StaticVariable(new_space_allocation_top)); | 1460 cmp(object, Operand::StaticVariable(new_space_allocation_top)); |
1461 Check(below, kUndoAllocationOfNonAllocatedMemory); | 1461 Check(below, "Undo allocation of non allocated memory"); |
1462 #endif | 1462 #endif |
1463 mov(Operand::StaticVariable(new_space_allocation_top), object); | 1463 mov(Operand::StaticVariable(new_space_allocation_top), object); |
1464 } | 1464 } |
1465 | 1465 |
1466 | 1466 |
1467 void MacroAssembler::AllocateHeapNumber(Register result, | 1467 void MacroAssembler::AllocateHeapNumber(Register result, |
1468 Register scratch1, | 1468 Register scratch1, |
1469 Register scratch2, | 1469 Register scratch2, |
1470 Label* gc_required) { | 1470 Label* gc_required) { |
1471 // Allocate heap number in new space. | 1471 // Allocate heap number in new space. |
(...skipping 583 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2055 | 2055 |
2056 Label promote_scheduled_exception; | 2056 Label promote_scheduled_exception; |
2057 Label delete_allocated_handles; | 2057 Label delete_allocated_handles; |
2058 Label leave_exit_frame; | 2058 Label leave_exit_frame; |
2059 | 2059 |
2060 bind(&prologue); | 2060 bind(&prologue); |
2061 // No more valid handles (the result handle was the last one). Restore | 2061 // No more valid handles (the result handle was the last one). Restore |
2062 // previous handle scope. | 2062 // previous handle scope. |
2063 mov(Operand::StaticVariable(next_address), ebx); | 2063 mov(Operand::StaticVariable(next_address), ebx); |
2064 sub(Operand::StaticVariable(level_address), Immediate(1)); | 2064 sub(Operand::StaticVariable(level_address), Immediate(1)); |
2065 Assert(above_equal, kInvalidHandleScopeLevel); | 2065 Assert(above_equal, "Invalid HandleScope level"); |
2066 cmp(edi, Operand::StaticVariable(limit_address)); | 2066 cmp(edi, Operand::StaticVariable(limit_address)); |
2067 j(not_equal, &delete_allocated_handles); | 2067 j(not_equal, &delete_allocated_handles); |
2068 bind(&leave_exit_frame); | 2068 bind(&leave_exit_frame); |
2069 | 2069 |
2070 // Check if the function scheduled an exception. | 2070 // Check if the function scheduled an exception. |
2071 ExternalReference scheduled_exception_address = | 2071 ExternalReference scheduled_exception_address = |
2072 ExternalReference::scheduled_exception_address(isolate()); | 2072 ExternalReference::scheduled_exception_address(isolate()); |
2073 cmp(Operand::StaticVariable(scheduled_exception_address), | 2073 cmp(Operand::StaticVariable(scheduled_exception_address), |
2074 Immediate(isolate()->factory()->the_hole_value())); | 2074 Immediate(isolate()->factory()->the_hole_value())); |
2075 j(not_equal, &promote_scheduled_exception); | 2075 j(not_equal, &promote_scheduled_exception); |
(...skipping 21 matching lines...) Expand all Loading... |
2097 | 2097 |
2098 cmp(return_value, isolate()->factory()->true_value()); | 2098 cmp(return_value, isolate()->factory()->true_value()); |
2099 j(equal, &ok, Label::kNear); | 2099 j(equal, &ok, Label::kNear); |
2100 | 2100 |
2101 cmp(return_value, isolate()->factory()->false_value()); | 2101 cmp(return_value, isolate()->factory()->false_value()); |
2102 j(equal, &ok, Label::kNear); | 2102 j(equal, &ok, Label::kNear); |
2103 | 2103 |
2104 cmp(return_value, isolate()->factory()->null_value()); | 2104 cmp(return_value, isolate()->factory()->null_value()); |
2105 j(equal, &ok, Label::kNear); | 2105 j(equal, &ok, Label::kNear); |
2106 | 2106 |
2107 Abort(kAPICallReturnedInvalidObject); | 2107 Abort("API call returned invalid object"); |
2108 | 2108 |
2109 bind(&ok); | 2109 bind(&ok); |
2110 #endif | 2110 #endif |
2111 | 2111 |
2112 LeaveApiExitFrame(); | 2112 LeaveApiExitFrame(); |
2113 ret(stack_space * kPointerSize); | 2113 ret(stack_space * kPointerSize); |
2114 | 2114 |
2115 bind(&promote_scheduled_exception); | 2115 bind(&promote_scheduled_exception); |
2116 TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1); | 2116 TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1); |
2117 | 2117 |
(...skipping 265 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2383 mov(dst, esi); | 2383 mov(dst, esi); |
2384 } | 2384 } |
2385 | 2385 |
2386 // We should not have found a with context by walking the context chain | 2386 // We should not have found a with context by walking the context chain |
2387 // (i.e., the static scope chain and runtime context chain do not agree). | 2387 // (i.e., the static scope chain and runtime context chain do not agree). |
2388 // A variable occurring in such a scope should have slot type LOOKUP and | 2388 // A variable occurring in such a scope should have slot type LOOKUP and |
2389 // not CONTEXT. | 2389 // not CONTEXT. |
2390 if (emit_debug_code()) { | 2390 if (emit_debug_code()) { |
2391 cmp(FieldOperand(dst, HeapObject::kMapOffset), | 2391 cmp(FieldOperand(dst, HeapObject::kMapOffset), |
2392 isolate()->factory()->with_context_map()); | 2392 isolate()->factory()->with_context_map()); |
2393 Check(not_equal, kVariableResolvedToWithContext); | 2393 Check(not_equal, "Variable resolved to with context."); |
2394 } | 2394 } |
2395 } | 2395 } |
2396 | 2396 |
2397 | 2397 |
2398 void MacroAssembler::LoadTransitionedArrayMapConditional( | 2398 void MacroAssembler::LoadTransitionedArrayMapConditional( |
2399 ElementsKind expected_kind, | 2399 ElementsKind expected_kind, |
2400 ElementsKind transitioned_kind, | 2400 ElementsKind transitioned_kind, |
2401 Register map_in_out, | 2401 Register map_in_out, |
2402 Register scratch, | 2402 Register scratch, |
2403 Label* no_map_match) { | 2403 Label* no_map_match) { |
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2470 | 2470 |
2471 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, | 2471 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, |
2472 Register map) { | 2472 Register map) { |
2473 // Load the initial map. The global functions all have initial maps. | 2473 // Load the initial map. The global functions all have initial maps. |
2474 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); | 2474 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); |
2475 if (emit_debug_code()) { | 2475 if (emit_debug_code()) { |
2476 Label ok, fail; | 2476 Label ok, fail; |
2477 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK); | 2477 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK); |
2478 jmp(&ok); | 2478 jmp(&ok); |
2479 bind(&fail); | 2479 bind(&fail); |
2480 Abort(kGlobalFunctionsMustHaveInitialMap); | 2480 Abort("Global functions must have initial map"); |
2481 bind(&ok); | 2481 bind(&ok); |
2482 } | 2482 } |
2483 } | 2483 } |
2484 | 2484 |
2485 | 2485 |
2486 // Store the value in register src in the safepoint register stack | 2486 // Store the value in register src in the safepoint register stack |
2487 // slot for register dst. | 2487 // slot for register dst. |
2488 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) { | 2488 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) { |
2489 mov(SafepointRegisterSlot(dst), src); | 2489 mov(SafepointRegisterSlot(dst), src); |
2490 } | 2490 } |
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2571 | 2571 |
2572 // The top-of-stack (tos) is 7 if there is one item pushed. | 2572 // The top-of-stack (tos) is 7 if there is one item pushed. |
2573 int tos = (8 - depth) % 8; | 2573 int tos = (8 - depth) % 8; |
2574 const int kTopMask = 0x3800; | 2574 const int kTopMask = 0x3800; |
2575 push(eax); | 2575 push(eax); |
2576 fwait(); | 2576 fwait(); |
2577 fnstsw_ax(); | 2577 fnstsw_ax(); |
2578 and_(eax, kTopMask); | 2578 and_(eax, kTopMask); |
2579 shr(eax, 11); | 2579 shr(eax, 11); |
2580 cmp(eax, Immediate(tos)); | 2580 cmp(eax, Immediate(tos)); |
2581 Check(equal, kUnexpectedFPUStackDepthAfterInstruction); | 2581 Check(equal, "Unexpected FPU stack depth after instruction"); |
2582 fnclex(); | 2582 fnclex(); |
2583 pop(eax); | 2583 pop(eax); |
2584 } | 2584 } |
2585 | 2585 |
2586 | 2586 |
2587 void MacroAssembler::Drop(int stack_elements) { | 2587 void MacroAssembler::Drop(int stack_elements) { |
2588 if (stack_elements > 0) { | 2588 if (stack_elements > 0) { |
2589 add(esp, Immediate(stack_elements * kPointerSize)); | 2589 add(esp, Immediate(stack_elements * kPointerSize)); |
2590 } | 2590 } |
2591 } | 2591 } |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2654 Label skip; | 2654 Label skip; |
2655 j(NegateCondition(cc), &skip); | 2655 j(NegateCondition(cc), &skip); |
2656 pushfd(); | 2656 pushfd(); |
2657 DecrementCounter(counter, value); | 2657 DecrementCounter(counter, value); |
2658 popfd(); | 2658 popfd(); |
2659 bind(&skip); | 2659 bind(&skip); |
2660 } | 2660 } |
2661 } | 2661 } |
2662 | 2662 |
2663 | 2663 |
2664 void MacroAssembler::Assert(Condition cc, BailoutReason reason) { | 2664 void MacroAssembler::Assert(Condition cc, const char* msg) { |
2665 if (emit_debug_code()) Check(cc, reason); | 2665 if (emit_debug_code()) Check(cc, msg); |
2666 } | 2666 } |
2667 | 2667 |
2668 | 2668 |
2669 void MacroAssembler::AssertFastElements(Register elements) { | 2669 void MacroAssembler::AssertFastElements(Register elements) { |
2670 if (emit_debug_code()) { | 2670 if (emit_debug_code()) { |
2671 Factory* factory = isolate()->factory(); | 2671 Factory* factory = isolate()->factory(); |
2672 Label ok; | 2672 Label ok; |
2673 cmp(FieldOperand(elements, HeapObject::kMapOffset), | 2673 cmp(FieldOperand(elements, HeapObject::kMapOffset), |
2674 Immediate(factory->fixed_array_map())); | 2674 Immediate(factory->fixed_array_map())); |
2675 j(equal, &ok); | 2675 j(equal, &ok); |
2676 cmp(FieldOperand(elements, HeapObject::kMapOffset), | 2676 cmp(FieldOperand(elements, HeapObject::kMapOffset), |
2677 Immediate(factory->fixed_double_array_map())); | 2677 Immediate(factory->fixed_double_array_map())); |
2678 j(equal, &ok); | 2678 j(equal, &ok); |
2679 cmp(FieldOperand(elements, HeapObject::kMapOffset), | 2679 cmp(FieldOperand(elements, HeapObject::kMapOffset), |
2680 Immediate(factory->fixed_cow_array_map())); | 2680 Immediate(factory->fixed_cow_array_map())); |
2681 j(equal, &ok); | 2681 j(equal, &ok); |
2682 Abort(kJSObjectWithFastElementsMapHasSlowElements); | 2682 Abort("JSObject with fast elements map has slow elements"); |
2683 bind(&ok); | 2683 bind(&ok); |
2684 } | 2684 } |
2685 } | 2685 } |
2686 | 2686 |
2687 | 2687 |
2688 void MacroAssembler::Check(Condition cc, BailoutReason reason) { | 2688 void MacroAssembler::Check(Condition cc, const char* msg) { |
2689 Label L; | 2689 Label L; |
2690 j(cc, &L); | 2690 j(cc, &L); |
2691 Abort(reason); | 2691 Abort(msg); |
2692 // will not return here | 2692 // will not return here |
2693 bind(&L); | 2693 bind(&L); |
2694 } | 2694 } |
2695 | 2695 |
2696 | 2696 |
2697 void MacroAssembler::CheckStackAlignment() { | 2697 void MacroAssembler::CheckStackAlignment() { |
2698 int frame_alignment = OS::ActivationFrameAlignment(); | 2698 int frame_alignment = OS::ActivationFrameAlignment(); |
2699 int frame_alignment_mask = frame_alignment - 1; | 2699 int frame_alignment_mask = frame_alignment - 1; |
2700 if (frame_alignment > kPointerSize) { | 2700 if (frame_alignment > kPointerSize) { |
2701 ASSERT(IsPowerOf2(frame_alignment)); | 2701 ASSERT(IsPowerOf2(frame_alignment)); |
2702 Label alignment_as_expected; | 2702 Label alignment_as_expected; |
2703 test(esp, Immediate(frame_alignment_mask)); | 2703 test(esp, Immediate(frame_alignment_mask)); |
2704 j(zero, &alignment_as_expected); | 2704 j(zero, &alignment_as_expected); |
2705 // Abort if stack is not aligned. | 2705 // Abort if stack is not aligned. |
2706 int3(); | 2706 int3(); |
2707 bind(&alignment_as_expected); | 2707 bind(&alignment_as_expected); |
2708 } | 2708 } |
2709 } | 2709 } |
2710 | 2710 |
2711 | 2711 |
2712 void MacroAssembler::Abort(BailoutReason reason) { | 2712 void MacroAssembler::Abort(const char* msg) { |
2713 // We want to pass the msg string like a smi to avoid GC | 2713 // We want to pass the msg string like a smi to avoid GC |
2714 // problems, however msg is not guaranteed to be aligned | 2714 // problems, however msg is not guaranteed to be aligned |
2715 // properly. Instead, we pass an aligned pointer that is | 2715 // properly. Instead, we pass an aligned pointer that is |
2716 // a proper v8 smi, but also pass the alignment difference | 2716 // a proper v8 smi, but also pass the alignment difference |
2717 // from the real pointer as a smi. | 2717 // from the real pointer as a smi. |
2718 const char* msg = GetBailoutReason(reason); | |
2719 intptr_t p1 = reinterpret_cast<intptr_t>(msg); | 2718 intptr_t p1 = reinterpret_cast<intptr_t>(msg); |
2720 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag; | 2719 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag; |
2721 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi()); | 2720 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi()); |
2722 #ifdef DEBUG | 2721 #ifdef DEBUG |
2723 if (msg != NULL) { | 2722 if (msg != NULL) { |
2724 RecordComment("Abort message: "); | 2723 RecordComment("Abort message: "); |
2725 RecordComment(msg); | 2724 RecordComment(msg); |
2726 } | 2725 } |
2727 #endif | 2726 #endif |
2728 | 2727 |
(...skipping 383 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3112 // Value is a data object, and it is white. Mark it black. Since we know | 3111 // Value is a data object, and it is white. Mark it black. Since we know |
3113 // that the object is white we can make it black by flipping one bit. | 3112 // that the object is white we can make it black by flipping one bit. |
3114 or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch); | 3113 or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch); |
3115 | 3114 |
3116 and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask)); | 3115 and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask)); |
3117 add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset), | 3116 add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset), |
3118 length); | 3117 length); |
3119 if (emit_debug_code()) { | 3118 if (emit_debug_code()) { |
3120 mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset)); | 3119 mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset)); |
3121 cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset)); | 3120 cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset)); |
3122 Check(less_equal, kLiveBytesCountOverflowChunkSize); | 3121 Check(less_equal, "Live Bytes Count overflow chunk size"); |
3123 } | 3122 } |
3124 | 3123 |
3125 bind(&done); | 3124 bind(&done); |
3126 } | 3125 } |
3127 | 3126 |
3128 | 3127 |
3129 void MacroAssembler::EnumLength(Register dst, Register map) { | 3128 void MacroAssembler::EnumLength(Register dst, Register map) { |
3130 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0); | 3129 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0); |
3131 mov(dst, FieldOperand(map, Map::kBitField3Offset)); | 3130 mov(dst, FieldOperand(map, Map::kBitField3Offset)); |
3132 and_(dst, Immediate(Smi::FromInt(Map::EnumLengthBits::kMask))); | 3131 and_(dst, Immediate(Smi::FromInt(Map::EnumLengthBits::kMask))); |
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3187 j(greater, &no_memento_available); | 3186 j(greater, &no_memento_available); |
3188 cmp(MemOperand(scratch_reg, -AllocationMemento::kSize), | 3187 cmp(MemOperand(scratch_reg, -AllocationMemento::kSize), |
3189 Immediate(Handle<Map>(isolate()->heap()->allocation_memento_map()))); | 3188 Immediate(Handle<Map>(isolate()->heap()->allocation_memento_map()))); |
3190 bind(&no_memento_available); | 3189 bind(&no_memento_available); |
3191 } | 3190 } |
3192 | 3191 |
3193 | 3192 |
3194 } } // namespace v8::internal | 3193 } } // namespace v8::internal |
3195 | 3194 |
3196 #endif // V8_TARGET_ARCH_IA32 | 3195 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |