OLD | NEW |
1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 28 matching lines...) Expand all Loading... |
39 | 39 |
40 MacroAssembler::MacroAssembler(void* buffer, int size) | 40 MacroAssembler::MacroAssembler(void* buffer, int size) |
41 : Assembler(buffer, size), | 41 : Assembler(buffer, size), |
42 unresolved_(0), | 42 unresolved_(0), |
43 generating_stub_(false), | 43 generating_stub_(false), |
44 allow_stub_calls_(true), | 44 allow_stub_calls_(true), |
45 code_object_(Heap::undefined_value()) { | 45 code_object_(Heap::undefined_value()) { |
46 } | 46 } |
47 | 47 |
48 | 48 |
| 49 void MacroAssembler::LoadRoot(Register destination, |
| 50 Heap::RootListIndex index) { |
| 51 movq(destination, Operand(r13, index << kPointerSizeLog2)); |
| 52 } |
| 53 |
| 54 |
| 55 void MacroAssembler::PushRoot(Heap::RootListIndex index) { |
| 56 push(Operand(r13, index << kPointerSizeLog2)); |
| 57 } |
| 58 |
| 59 |
| 60 void MacroAssembler::CompareRoot(Register with, |
| 61 Heap::RootListIndex index) { |
| 62 cmpq(with, Operand(r13, index << kPointerSizeLog2)); |
| 63 } |
| 64 |
49 | 65 |
50 static void RecordWriteHelper(MacroAssembler* masm, | 66 static void RecordWriteHelper(MacroAssembler* masm, |
51 Register object, | 67 Register object, |
52 Register addr, | 68 Register addr, |
53 Register scratch) { | 69 Register scratch) { |
54 Label fast; | 70 Label fast; |
55 | 71 |
56 // Compute the page start address from the heap object pointer, and reuse | 72 // Compute the page start address from the heap object pointer, and reuse |
57 // the 'object' register for it. | 73 // the 'object' register for it. |
58 ASSERT(is_int32(~Page::kPageAlignmentMask)); | 74 ASSERT(is_int32(~Page::kPageAlignmentMask)); |
(...skipping 210 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
269 void MacroAssembler::StubReturn(int argc) { | 285 void MacroAssembler::StubReturn(int argc) { |
270 ASSERT(argc >= 1 && generating_stub()); | 286 ASSERT(argc >= 1 && generating_stub()); |
271 ret((argc - 1) * kPointerSize); | 287 ret((argc - 1) * kPointerSize); |
272 } | 288 } |
273 | 289 |
274 | 290 |
275 void MacroAssembler::IllegalOperation(int num_arguments) { | 291 void MacroAssembler::IllegalOperation(int num_arguments) { |
276 if (num_arguments > 0) { | 292 if (num_arguments > 0) { |
277 addq(rsp, Immediate(num_arguments * kPointerSize)); | 293 addq(rsp, Immediate(num_arguments * kPointerSize)); |
278 } | 294 } |
279 movq(rax, Factory::undefined_value(), RelocInfo::EMBEDDED_OBJECT); | 295 LoadRoot(rax, Heap::kUndefinedValueRootIndex); |
280 } | 296 } |
281 | 297 |
282 | 298 |
283 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) { | 299 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) { |
284 CallRuntime(Runtime::FunctionForId(id), num_arguments); | 300 CallRuntime(Runtime::FunctionForId(id), num_arguments); |
285 } | 301 } |
286 | 302 |
287 | 303 |
288 void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) { | 304 void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) { |
289 // If the expected number of arguments of the runtime function is | 305 // If the expected number of arguments of the runtime function is |
(...skipping 331 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
621 Immediate(1 << Map::kHasNonInstancePrototype)); | 637 Immediate(1 << Map::kHasNonInstancePrototype)); |
622 j(not_zero, &non_instance); | 638 j(not_zero, &non_instance); |
623 | 639 |
624 // Get the prototype or initial map from the function. | 640 // Get the prototype or initial map from the function. |
625 movq(result, | 641 movq(result, |
626 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); | 642 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); |
627 | 643 |
628 // If the prototype or initial map is the hole, don't return it and | 644 // If the prototype or initial map is the hole, don't return it and |
629 // simply miss the cache instead. This will allow us to allocate a | 645 // simply miss the cache instead. This will allow us to allocate a |
630 // prototype object on-demand in the runtime system. | 646 // prototype object on-demand in the runtime system. |
631 Cmp(result, Factory::the_hole_value()); | 647 CompareRoot(result, Heap::kTheHoleValueRootIndex); |
632 j(equal, miss); | 648 j(equal, miss); |
633 | 649 |
634 // If the function does not have an initial map, we're done. | 650 // If the function does not have an initial map, we're done. |
635 Label done; | 651 Label done; |
636 CmpObjectType(result, MAP_TYPE, kScratchRegister); | 652 CmpObjectType(result, MAP_TYPE, kScratchRegister); |
637 j(not_equal, &done); | 653 j(not_equal, &done); |
638 | 654 |
639 // Get the prototype from the initial map. | 655 // Get the prototype from the initial map. |
640 movq(result, FieldOperand(result, Map::kPrototypeOffset)); | 656 movq(result, FieldOperand(result, Map::kPrototypeOffset)); |
641 jmp(&done); | 657 jmp(&done); |
(...skipping 533 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1175 // Compare security tokens. | 1191 // Compare security tokens. |
1176 // Check that the security token in the calling global object is | 1192 // Check that the security token in the calling global object is |
1177 // compatible with the security token in the receiving global | 1193 // compatible with the security token in the receiving global |
1178 // object. | 1194 // object. |
1179 | 1195 |
1180 // Check the context is a global context. | 1196 // Check the context is a global context. |
1181 if (FLAG_debug_code) { | 1197 if (FLAG_debug_code) { |
1182 // Preserve original value of holder_reg. | 1198 // Preserve original value of holder_reg. |
1183 push(holder_reg); | 1199 push(holder_reg); |
1184 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset)); | 1200 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset)); |
1185 Cmp(holder_reg, Factory::null_value()); | 1201 CompareRoot(holder_reg, Heap::kNullValueRootIndex); |
1186 Check(not_equal, "JSGlobalProxy::context() should not be null."); | 1202 Check(not_equal, "JSGlobalProxy::context() should not be null."); |
1187 | 1203 |
1188 // Read the first word and compare to global_context_map(), | 1204 // Read the first word and compare to global_context_map(), |
1189 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset)); | 1205 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset)); |
1190 Cmp(holder_reg, Factory::global_context_map()); | 1206 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex); |
1191 Check(equal, "JSGlobalObject::global_context should be a global context."); | 1207 Check(equal, "JSGlobalObject::global_context should be a global context."); |
1192 pop(holder_reg); | 1208 pop(holder_reg); |
1193 } | 1209 } |
1194 | 1210 |
1195 movq(kScratchRegister, | 1211 movq(kScratchRegister, |
1196 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset)); | 1212 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset)); |
1197 int token_offset = Context::kHeaderSize + | 1213 int token_offset = Context::kHeaderSize + |
1198 Context::SECURITY_TOKEN_INDEX * kPointerSize; | 1214 Context::SECURITY_TOKEN_INDEX * kPointerSize; |
1199 movq(scratch, FieldOperand(scratch, token_offset)); | 1215 movq(scratch, FieldOperand(scratch, token_offset)); |
1200 cmpq(scratch, FieldOperand(kScratchRegister, token_offset)); | 1216 cmpq(scratch, FieldOperand(kScratchRegister, token_offset)); |
(...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1350 movq(kScratchRegister, new_space_allocation_top); | 1366 movq(kScratchRegister, new_space_allocation_top); |
1351 #ifdef DEBUG | 1367 #ifdef DEBUG |
1352 cmpq(object, Operand(kScratchRegister, 0)); | 1368 cmpq(object, Operand(kScratchRegister, 0)); |
1353 Check(below, "Undo allocation of non allocated memory"); | 1369 Check(below, "Undo allocation of non allocated memory"); |
1354 #endif | 1370 #endif |
1355 movq(Operand(kScratchRegister, 0), object); | 1371 movq(Operand(kScratchRegister, 0), object); |
1356 } | 1372 } |
1357 | 1373 |
1358 | 1374 |
1359 } } // namespace v8::internal | 1375 } } // namespace v8::internal |
OLD | NEW |