Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(575)

Side by Side Diff: src/ia32/macro-assembler-ia32.cc

Issue 8139027: Version 3.6.5 (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: '' Created 9 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/macro-assembler-ia32.h ('k') | src/ia32/regexp-macro-assembler-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 26 matching lines...) Expand all
37 37
38 namespace v8 { 38 namespace v8 {
39 namespace internal { 39 namespace internal {
40 40
41 // ------------------------------------------------------------------------- 41 // -------------------------------------------------------------------------
42 // MacroAssembler implementation. 42 // MacroAssembler implementation.
43 43
44 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size) 44 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
45 : Assembler(arg_isolate, buffer, size), 45 : Assembler(arg_isolate, buffer, size),
46 generating_stub_(false), 46 generating_stub_(false),
47 allow_stub_calls_(true) { 47 allow_stub_calls_(true),
48 has_frame_(false) {
48 if (isolate() != NULL) { 49 if (isolate() != NULL) {
49 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(), 50 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
50 isolate()); 51 isolate());
51 } 52 }
52 } 53 }
53 54
54 55
55 void MacroAssembler::RecordWriteHelper(Register object, 56 void MacroAssembler::InNewSpace(
56 Register addr, 57 Register object,
57 Register scratch) { 58 Register scratch,
58 if (emit_debug_code()) { 59 Condition cc,
59 // Check that the object is not in new space. 60 Label* condition_met,
60 Label not_in_new_space; 61 Label::Distance condition_met_distance) {
61 InNewSpace(object, scratch, not_equal, &not_in_new_space); 62 ASSERT(cc == equal || cc == not_equal);
62 Abort("new-space object passed to RecordWriteHelper"); 63 if (scratch.is(object)) {
63 bind(&not_in_new_space); 64 and_(scratch, Immediate(~Page::kPageAlignmentMask));
65 } else {
66 mov(scratch, Immediate(~Page::kPageAlignmentMask));
67 and_(scratch, object);
64 } 68 }
65 69 // Check that we can use a test_b.
66 // Compute the page start address from the heap object pointer, and reuse 70 ASSERT(MemoryChunk::IN_FROM_SPACE < 8);
67 // the 'object' register for it. 71 ASSERT(MemoryChunk::IN_TO_SPACE < 8);
68 and_(object, ~Page::kPageAlignmentMask); 72 int mask = (1 << MemoryChunk::IN_FROM_SPACE)
69 73 | (1 << MemoryChunk::IN_TO_SPACE);
70 // Compute number of region covering addr. See Page::GetRegionNumberForAddress 74 // If non-zero, the page belongs to new-space.
71 // method for more details. 75 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
72 shr(addr, Page::kRegionSizeLog2); 76 static_cast<uint8_t>(mask));
73 and_(addr, Page::kPageAlignmentMask >> Page::kRegionSizeLog2); 77 j(cc, condition_met, condition_met_distance);
74
75 // Set dirty mark for region.
76 // Bit tests with a memory operand should be avoided on Intel processors,
77 // as they usually have long latency and multiple uops. We load the bit base
78 // operand to a register at first and store it back after bit set.
79 mov(scratch, Operand(object, Page::kDirtyFlagOffset));
80 bts(Operand(scratch), addr);
81 mov(Operand(object, Page::kDirtyFlagOffset), scratch);
82 } 78 }
83 79
84 80
81 void MacroAssembler::RememberedSetHelper(
82 Register object, // Only used for debug checks.
83 Register addr,
84 Register scratch,
85 SaveFPRegsMode save_fp,
86 MacroAssembler::RememberedSetFinalAction and_then) {
87 Label done;
88 if (FLAG_debug_code) {
89 Label ok;
90 JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
91 int3();
92 bind(&ok);
93 }
94 // Load store buffer top.
95 ExternalReference store_buffer =
96 ExternalReference::store_buffer_top(isolate());
97 mov(scratch, Operand::StaticVariable(store_buffer));
98 // Store pointer to buffer.
99 mov(Operand(scratch, 0), addr);
100 // Increment buffer top.
101 add(scratch, Immediate(kPointerSize));
102 // Write back new top of buffer.
103 mov(Operand::StaticVariable(store_buffer), scratch);
104 // Call stub on end of buffer.
105 // Check for end of buffer.
106 test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
107 if (and_then == kReturnAtEnd) {
108 Label buffer_overflowed;
109 j(not_equal, &buffer_overflowed, Label::kNear);
110 ret(0);
111 bind(&buffer_overflowed);
112 } else {
113 ASSERT(and_then == kFallThroughAtEnd);
114 j(equal, &done, Label::kNear);
115 }
116 StoreBufferOverflowStub store_buffer_overflow =
117 StoreBufferOverflowStub(save_fp);
118 CallStub(&store_buffer_overflow);
119 if (and_then == kReturnAtEnd) {
120 ret(0);
121 } else {
122 ASSERT(and_then == kFallThroughAtEnd);
123 bind(&done);
124 }
125 }
126
127
85 void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg, 128 void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
86 XMMRegister scratch_reg, 129 XMMRegister scratch_reg,
87 Register result_reg) { 130 Register result_reg) {
88 Label done; 131 Label done;
89 ExternalReference zero_ref = ExternalReference::address_of_zero(); 132 ExternalReference zero_ref = ExternalReference::address_of_zero();
90 movdbl(scratch_reg, Operand::StaticVariable(zero_ref)); 133 movdbl(scratch_reg, Operand::StaticVariable(zero_ref));
91 Set(result_reg, Immediate(0)); 134 Set(result_reg, Immediate(0));
92 ucomisd(input_reg, scratch_reg); 135 ucomisd(input_reg, scratch_reg);
93 j(below, &done, Label::kNear); 136 j(below, &done, Label::kNear);
94 ExternalReference half_ref = ExternalReference::address_of_one_half(); 137 ExternalReference half_ref = ExternalReference::address_of_one_half();
(...skipping 10 matching lines...) Expand all
105 void MacroAssembler::ClampUint8(Register reg) { 148 void MacroAssembler::ClampUint8(Register reg) {
106 Label done; 149 Label done;
107 test(reg, Immediate(0xFFFFFF00)); 150 test(reg, Immediate(0xFFFFFF00));
108 j(zero, &done, Label::kNear); 151 j(zero, &done, Label::kNear);
109 setcc(negative, reg); // 1 if negative, 0 if positive. 152 setcc(negative, reg); // 1 if negative, 0 if positive.
110 dec_b(reg); // 0 if negative, 255 if positive. 153 dec_b(reg); // 0 if negative, 255 if positive.
111 bind(&done); 154 bind(&done);
112 } 155 }
113 156
114 157
115 void MacroAssembler::InNewSpace(Register object, 158 void MacroAssembler::RecordWriteArray(Register object,
116 Register scratch, 159 Register value,
117 Condition cc, 160 Register index,
118 Label* branch, 161 SaveFPRegsMode save_fp,
119 Label::Distance branch_near) { 162 RememberedSetAction remembered_set_action,
120 ASSERT(cc == equal || cc == not_equal); 163 SmiCheck smi_check) {
121 if (Serializer::enabled()) { 164 // First, check if a write barrier is even needed. The tests below
122 // Can't do arithmetic on external references if it might get serialized. 165 // catch stores of Smis.
123 mov(scratch, Operand(object)); 166 Label done;
124 // The mask isn't really an address. We load it as an external reference in 167
125 // case the size of the new space is different between the snapshot maker 168 // Skip barrier if writing a smi.
126 // and the running system. 169 if (smi_check == INLINE_SMI_CHECK) {
127 and_(Operand(scratch), 170 ASSERT_EQ(0, kSmiTag);
128 Immediate(ExternalReference::new_space_mask(isolate()))); 171 test(value, Immediate(kSmiTagMask));
129 cmp(Operand(scratch), 172 j(zero, &done);
130 Immediate(ExternalReference::new_space_start(isolate()))); 173 }
131 j(cc, branch, branch_near); 174
132 } else { 175 // Array access: calculate the destination address in the same manner as
133 int32_t new_space_start = reinterpret_cast<int32_t>( 176 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
134 ExternalReference::new_space_start(isolate()).address()); 177 // into an array of words.
135 lea(scratch, Operand(object, -new_space_start)); 178 Register dst = index;
136 and_(scratch, isolate()->heap()->NewSpaceMask()); 179 lea(dst, Operand(object, index, times_half_pointer_size,
137 j(cc, branch, branch_near); 180 FixedArray::kHeaderSize - kHeapObjectTag));
181
182 RecordWrite(
183 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
184
185 bind(&done);
186
187 // Clobber clobbered input registers when running with the debug-code flag
188 // turned on to provoke errors.
189 if (emit_debug_code()) {
190 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
191 mov(index, Immediate(BitCast<int32_t>(kZapValue)));
138 } 192 }
139 } 193 }
140 194
141 195
142 void MacroAssembler::RecordWrite(Register object, 196 void MacroAssembler::RecordWriteField(
143 int offset, 197 Register object,
144 Register value, 198 int offset,
145 Register scratch) { 199 Register value,
200 Register dst,
201 SaveFPRegsMode save_fp,
202 RememberedSetAction remembered_set_action,
203 SmiCheck smi_check) {
146 // First, check if a write barrier is even needed. The tests below 204 // First, check if a write barrier is even needed. The tests below
147 // catch stores of Smis and stores into young gen. 205 // catch stores of Smis.
148 Label done; 206 Label done;
149 207
150 // Skip barrier if writing a smi. 208 // Skip barrier if writing a smi.
151 STATIC_ASSERT(kSmiTag == 0); 209 if (smi_check == INLINE_SMI_CHECK) {
152 JumpIfSmi(value, &done, Label::kNear); 210 JumpIfSmi(value, &done, Label::kNear);
211 }
153 212
154 InNewSpace(object, value, equal, &done, Label::kNear); 213 // Although the object register is tagged, the offset is relative to the start
214 // of the object, so so offset must be a multiple of kPointerSize.
215 ASSERT(IsAligned(offset, kPointerSize));
155 216
156 // The offset is relative to a tagged or untagged HeapObject pointer, 217 lea(dst, FieldOperand(object, offset));
157 // so either offset or offset + kHeapObjectTag must be a 218 if (emit_debug_code()) {
158 // multiple of kPointerSize. 219 Label ok;
159 ASSERT(IsAligned(offset, kPointerSize) || 220 test_b(dst, (1 << kPointerSizeLog2) - 1);
160 IsAligned(offset + kHeapObjectTag, kPointerSize)); 221 j(zero, &ok, Label::kNear);
222 int3();
223 bind(&ok);
224 }
161 225
162 Register dst = scratch; 226 RecordWrite(
163 if (offset != 0) { 227 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
164 lea(dst, Operand(object, offset));
165 } else {
166 // Array access: calculate the destination address in the same manner as
167 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
168 // into an array of words.
169 STATIC_ASSERT(kSmiTagSize == 1);
170 STATIC_ASSERT(kSmiTag == 0);
171 lea(dst, Operand(object, dst, times_half_pointer_size,
172 FixedArray::kHeaderSize - kHeapObjectTag));
173 }
174 RecordWriteHelper(object, dst, value);
175 228
176 bind(&done); 229 bind(&done);
177 230
178 // Clobber all input registers when running with the debug-code flag 231 // Clobber clobbered input registers when running with the debug-code flag
179 // turned on to provoke errors. 232 // turned on to provoke errors.
180 if (emit_debug_code()) { 233 if (emit_debug_code()) {
181 mov(object, Immediate(BitCast<int32_t>(kZapValue)));
182 mov(value, Immediate(BitCast<int32_t>(kZapValue))); 234 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
183 mov(scratch, Immediate(BitCast<int32_t>(kZapValue))); 235 mov(dst, Immediate(BitCast<int32_t>(kZapValue)));
184 } 236 }
185 } 237 }
186 238
187 239
188 void MacroAssembler::RecordWrite(Register object, 240 void MacroAssembler::RecordWrite(Register object,
189 Register address, 241 Register address,
190 Register value) { 242 Register value,
243 SaveFPRegsMode fp_mode,
244 RememberedSetAction remembered_set_action,
245 SmiCheck smi_check) {
246 ASSERT(!object.is(value));
247 ASSERT(!object.is(address));
248 ASSERT(!value.is(address));
249 if (emit_debug_code()) {
250 AbortIfSmi(object);
251 }
252
253 if (remembered_set_action == OMIT_REMEMBERED_SET &&
254 !FLAG_incremental_marking) {
255 return;
256 }
257
258 if (FLAG_debug_code) {
259 Label ok;
260 cmp(value, Operand(address, 0));
261 j(equal, &ok, Label::kNear);
262 int3();
263 bind(&ok);
264 }
265
191 // First, check if a write barrier is even needed. The tests below 266 // First, check if a write barrier is even needed. The tests below
192 // catch stores of Smis and stores into young gen. 267 // catch stores of Smis and stores into young gen.
193 Label done; 268 Label done;
194 269
195 // Skip barrier if writing a smi. 270 if (smi_check == INLINE_SMI_CHECK) {
196 STATIC_ASSERT(kSmiTag == 0); 271 // Skip barrier if writing a smi.
197 JumpIfSmi(value, &done, Label::kNear); 272 JumpIfSmi(value, &done, Label::kNear);
273 }
198 274
199 InNewSpace(object, value, equal, &done); 275 CheckPageFlag(value,
276 value, // Used as scratch.
277 MemoryChunk::kPointersToHereAreInterestingMask,
278 zero,
279 &done,
280 Label::kNear);
281 CheckPageFlag(object,
282 value, // Used as scratch.
283 MemoryChunk::kPointersFromHereAreInterestingMask,
284 zero,
285 &done,
286 Label::kNear);
200 287
201 RecordWriteHelper(object, address, value); 288 RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode);
289 CallStub(&stub);
202 290
203 bind(&done); 291 bind(&done);
204 292
205 // Clobber all input registers when running with the debug-code flag 293 // Clobber clobbered registers when running with the debug-code flag
206 // turned on to provoke errors. 294 // turned on to provoke errors.
207 if (emit_debug_code()) { 295 if (emit_debug_code()) {
208 mov(object, Immediate(BitCast<int32_t>(kZapValue)));
209 mov(address, Immediate(BitCast<int32_t>(kZapValue))); 296 mov(address, Immediate(BitCast<int32_t>(kZapValue)));
210 mov(value, Immediate(BitCast<int32_t>(kZapValue))); 297 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
211 } 298 }
212 } 299 }
213 300
214 301
215 #ifdef ENABLE_DEBUGGER_SUPPORT 302 #ifdef ENABLE_DEBUGGER_SUPPORT
216 void MacroAssembler::DebugBreak() { 303 void MacroAssembler::DebugBreak() {
217 Set(eax, Immediate(0)); 304 Set(eax, Immediate(0));
218 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate()))); 305 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
219 CEntryStub ces(1); 306 CEntryStub ces(1);
220 call(ces.GetCode(), RelocInfo::DEBUG_BREAK); 307 call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
221 } 308 }
222 #endif 309 #endif
223 310
224 311
225 void MacroAssembler::Set(Register dst, const Immediate& x) { 312 void MacroAssembler::Set(Register dst, const Immediate& x) {
226 if (x.is_zero()) { 313 if (x.is_zero()) {
227 xor_(dst, Operand(dst)); // Shorter than mov. 314 xor_(dst, dst); // Shorter than mov.
228 } else { 315 } else {
229 mov(dst, x); 316 mov(dst, x);
230 } 317 }
231 } 318 }
232 319
233 320
234 void MacroAssembler::Set(const Operand& dst, const Immediate& x) { 321 void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
235 mov(dst, x); 322 mov(dst, x);
236 } 323 }
237 324
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
280 367
281 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) { 368 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
282 cmpb(FieldOperand(map, Map::kInstanceTypeOffset), 369 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
283 static_cast<int8_t>(type)); 370 static_cast<int8_t>(type));
284 } 371 }
285 372
286 373
287 void MacroAssembler::CheckFastElements(Register map, 374 void MacroAssembler::CheckFastElements(Register map,
288 Label* fail, 375 Label* fail,
289 Label::Distance distance) { 376 Label::Distance distance) {
290 STATIC_ASSERT(FAST_ELEMENTS == 0); 377 STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
378 STATIC_ASSERT(FAST_ELEMENTS == 1);
291 cmpb(FieldOperand(map, Map::kBitField2Offset), 379 cmpb(FieldOperand(map, Map::kBitField2Offset),
292 Map::kMaximumBitField2FastElementValue); 380 Map::kMaximumBitField2FastElementValue);
293 j(above, fail, distance); 381 j(above, fail, distance);
294 } 382 }
295 383
296 384
385 void MacroAssembler::CheckFastObjectElements(Register map,
386 Label* fail,
387 Label::Distance distance) {
388 STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
389 STATIC_ASSERT(FAST_ELEMENTS == 1);
390 cmpb(FieldOperand(map, Map::kBitField2Offset),
391 Map::kMaximumBitField2FastSmiOnlyElementValue);
392 j(below_equal, fail, distance);
393 cmpb(FieldOperand(map, Map::kBitField2Offset),
394 Map::kMaximumBitField2FastElementValue);
395 j(above, fail, distance);
396 }
397
398
399 void MacroAssembler::CheckFastSmiOnlyElements(Register map,
400 Label* fail,
401 Label::Distance distance) {
402 STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
403 cmpb(FieldOperand(map, Map::kBitField2Offset),
404 Map::kMaximumBitField2FastSmiOnlyElementValue);
405 j(above, fail, distance);
406 }
407
408
409 void MacroAssembler::StoreNumberToDoubleElements(
410 Register maybe_number,
411 Register elements,
412 Register key,
413 Register scratch1,
414 XMMRegister scratch2,
415 Label* fail,
416 bool specialize_for_processor) {
417 Label smi_value, done, maybe_nan, not_nan, is_nan, have_double_value;
418 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
419
420 CheckMap(maybe_number,
421 isolate()->factory()->heap_number_map(),
422 fail,
423 DONT_DO_SMI_CHECK);
424
425 // Double value, canonicalize NaN.
426 uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32);
427 cmp(FieldOperand(maybe_number, offset),
428 Immediate(kNaNOrInfinityLowerBoundUpper32));
429 j(greater_equal, &maybe_nan, Label::kNear);
430
431 bind(&not_nan);
432 ExternalReference canonical_nan_reference =
433 ExternalReference::address_of_canonical_non_hole_nan();
434 if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
435 CpuFeatures::Scope use_sse2(SSE2);
436 movdbl(scratch2, FieldOperand(maybe_number, HeapNumber::kValueOffset));
437 bind(&have_double_value);
438 movdbl(FieldOperand(elements, key, times_4, FixedDoubleArray::kHeaderSize),
439 scratch2);
440 } else {
441 fld_d(FieldOperand(maybe_number, HeapNumber::kValueOffset));
442 bind(&have_double_value);
443 fstp_d(FieldOperand(elements, key, times_4, FixedDoubleArray::kHeaderSize));
444 }
445 jmp(&done);
446
447 bind(&maybe_nan);
448 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
449 // it's an Infinity, and the non-NaN code path applies.
450 j(greater, &is_nan, Label::kNear);
451 cmp(FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
452 j(zero, &not_nan);
453 bind(&is_nan);
454 if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
455 CpuFeatures::Scope use_sse2(SSE2);
456 movdbl(scratch2, Operand::StaticVariable(canonical_nan_reference));
457 } else {
458 fld_d(Operand::StaticVariable(canonical_nan_reference));
459 }
460 jmp(&have_double_value, Label::kNear);
461
462 bind(&smi_value);
463 // Value is a smi. Convert to a double and store.
464 // Preserve original value.
465 mov(scratch1, maybe_number);
466 SmiUntag(scratch1);
467 if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
468 CpuFeatures::Scope fscope(SSE2);
469 cvtsi2sd(scratch2, scratch1);
470 movdbl(FieldOperand(elements, key, times_4, FixedDoubleArray::kHeaderSize),
471 scratch2);
472 } else {
473 push(scratch1);
474 fild_s(Operand(esp, 0));
475 pop(scratch1);
476 fstp_d(FieldOperand(elements, key, times_4, FixedDoubleArray::kHeaderSize));
477 }
478 bind(&done);
479 }
480
481
297 void MacroAssembler::CheckMap(Register obj, 482 void MacroAssembler::CheckMap(Register obj,
298 Handle<Map> map, 483 Handle<Map> map,
299 Label* fail, 484 Label* fail,
300 SmiCheckType smi_check_type) { 485 SmiCheckType smi_check_type) {
301 if (smi_check_type == DO_SMI_CHECK) { 486 if (smi_check_type == DO_SMI_CHECK) {
302 JumpIfSmi(obj, fail); 487 JumpIfSmi(obj, fail);
303 } 488 }
304 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map)); 489 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
305 j(not_equal, fail); 490 j(not_equal, fail);
306 } 491 }
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
338 Label* fail) { 523 Label* fail) {
339 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset)); 524 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
340 IsInstanceJSObjectType(map, scratch, fail); 525 IsInstanceJSObjectType(map, scratch, fail);
341 } 526 }
342 527
343 528
344 void MacroAssembler::IsInstanceJSObjectType(Register map, 529 void MacroAssembler::IsInstanceJSObjectType(Register map,
345 Register scratch, 530 Register scratch,
346 Label* fail) { 531 Label* fail) {
347 movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset)); 532 movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
348 sub(Operand(scratch), Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); 533 sub(scratch, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
349 cmp(scratch, 534 cmp(scratch,
350 LAST_NONCALLABLE_SPEC_OBJECT_TYPE - FIRST_NONCALLABLE_SPEC_OBJECT_TYPE); 535 LAST_NONCALLABLE_SPEC_OBJECT_TYPE - FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
351 j(above, fail); 536 j(above, fail);
352 } 537 }
353 538
354 539
355 void MacroAssembler::FCmp() { 540 void MacroAssembler::FCmp() {
356 if (CpuFeatures::IsSupported(CMOV)) { 541 if (CpuFeatures::IsSupported(CMOV)) {
357 fucomip(); 542 fucomip();
358 ffree(0); 543 ffree(0);
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
395 580
396 581
397 void MacroAssembler::AbortIfSmi(Register object) { 582 void MacroAssembler::AbortIfSmi(Register object) {
398 test(object, Immediate(kSmiTagMask)); 583 test(object, Immediate(kSmiTagMask));
399 Assert(not_equal, "Operand is a smi"); 584 Assert(not_equal, "Operand is a smi");
400 } 585 }
401 586
402 587
403 void MacroAssembler::EnterFrame(StackFrame::Type type) { 588 void MacroAssembler::EnterFrame(StackFrame::Type type) {
404 push(ebp); 589 push(ebp);
405 mov(ebp, Operand(esp)); 590 mov(ebp, esp);
406 push(esi); 591 push(esi);
407 push(Immediate(Smi::FromInt(type))); 592 push(Immediate(Smi::FromInt(type)));
408 push(Immediate(CodeObject())); 593 push(Immediate(CodeObject()));
409 if (emit_debug_code()) { 594 if (emit_debug_code()) {
410 cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value())); 595 cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
411 Check(not_equal, "code object not properly patched"); 596 Check(not_equal, "code object not properly patched");
412 } 597 }
413 } 598 }
414 599
415 600
416 void MacroAssembler::LeaveFrame(StackFrame::Type type) { 601 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
417 if (emit_debug_code()) { 602 if (emit_debug_code()) {
418 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset), 603 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
419 Immediate(Smi::FromInt(type))); 604 Immediate(Smi::FromInt(type)));
420 Check(equal, "stack frame types must match"); 605 Check(equal, "stack frame types must match");
421 } 606 }
422 leave(); 607 leave();
423 } 608 }
424 609
425 610
426 void MacroAssembler::EnterExitFramePrologue() { 611 void MacroAssembler::EnterExitFramePrologue() {
427 // Setup the frame structure on the stack. 612 // Setup the frame structure on the stack.
428 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize); 613 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
429 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize); 614 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
430 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize); 615 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
431 push(ebp); 616 push(ebp);
432 mov(ebp, Operand(esp)); 617 mov(ebp, esp);
433 618
434 // Reserve room for entry stack pointer and push the code object. 619 // Reserve room for entry stack pointer and push the code object.
435 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize); 620 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
436 push(Immediate(0)); // Saved entry sp, patched before call. 621 push(Immediate(0)); // Saved entry sp, patched before call.
437 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot. 622 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
438 623
439 // Save the frame pointer and the context in top. 624 // Save the frame pointer and the context in top.
440 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, 625 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
441 isolate()); 626 isolate());
442 ExternalReference context_address(Isolate::kContextAddress, 627 ExternalReference context_address(Isolate::kContextAddress,
443 isolate()); 628 isolate());
444 mov(Operand::StaticVariable(c_entry_fp_address), ebp); 629 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
445 mov(Operand::StaticVariable(context_address), esi); 630 mov(Operand::StaticVariable(context_address), esi);
446 } 631 }
447 632
448 633
449 void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) { 634 void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
450 // Optionally save all XMM registers. 635 // Optionally save all XMM registers.
451 if (save_doubles) { 636 if (save_doubles) {
452 CpuFeatures::Scope scope(SSE2); 637 CpuFeatures::Scope scope(SSE2);
453 int space = XMMRegister::kNumRegisters * kDoubleSize + argc * kPointerSize; 638 int space = XMMRegister::kNumRegisters * kDoubleSize + argc * kPointerSize;
454 sub(Operand(esp), Immediate(space)); 639 sub(esp, Immediate(space));
455 const int offset = -2 * kPointerSize; 640 const int offset = -2 * kPointerSize;
456 for (int i = 0; i < XMMRegister::kNumRegisters; i++) { 641 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
457 XMMRegister reg = XMMRegister::from_code(i); 642 XMMRegister reg = XMMRegister::from_code(i);
458 movdbl(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg); 643 movdbl(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
459 } 644 }
460 } else { 645 } else {
461 sub(Operand(esp), Immediate(argc * kPointerSize)); 646 sub(esp, Immediate(argc * kPointerSize));
462 } 647 }
463 648
464 // Get the required frame alignment for the OS. 649 // Get the required frame alignment for the OS.
465 const int kFrameAlignment = OS::ActivationFrameAlignment(); 650 const int kFrameAlignment = OS::ActivationFrameAlignment();
466 if (kFrameAlignment > 0) { 651 if (kFrameAlignment > 0) {
467 ASSERT(IsPowerOf2(kFrameAlignment)); 652 ASSERT(IsPowerOf2(kFrameAlignment));
468 and_(esp, -kFrameAlignment); 653 and_(esp, -kFrameAlignment);
469 } 654 }
470 655
471 // Patch the saved entry sp. 656 // Patch the saved entry sp.
472 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp); 657 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
473 } 658 }
474 659
475 660
476 void MacroAssembler::EnterExitFrame(bool save_doubles) { 661 void MacroAssembler::EnterExitFrame(bool save_doubles) {
477 EnterExitFramePrologue(); 662 EnterExitFramePrologue();
478 663
479 // Setup argc and argv in callee-saved registers. 664 // Setup argc and argv in callee-saved registers.
480 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize; 665 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
481 mov(edi, Operand(eax)); 666 mov(edi, eax);
482 lea(esi, Operand(ebp, eax, times_4, offset)); 667 lea(esi, Operand(ebp, eax, times_4, offset));
483 668
484 // Reserve space for argc, argv and isolate. 669 // Reserve space for argc, argv and isolate.
485 EnterExitFrameEpilogue(3, save_doubles); 670 EnterExitFrameEpilogue(3, save_doubles);
486 } 671 }
487 672
488 673
489 void MacroAssembler::EnterApiExitFrame(int argc) { 674 void MacroAssembler::EnterApiExitFrame(int argc) {
490 EnterExitFramePrologue(); 675 EnterExitFramePrologue();
491 EnterExitFrameEpilogue(argc, false); 676 EnterExitFrameEpilogue(argc, false);
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
525 #endif 710 #endif
526 711
527 // Clear the top frame. 712 // Clear the top frame.
528 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, 713 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
529 isolate()); 714 isolate());
530 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0)); 715 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
531 } 716 }
532 717
533 718
534 void MacroAssembler::LeaveApiExitFrame() { 719 void MacroAssembler::LeaveApiExitFrame() {
535 mov(esp, Operand(ebp)); 720 mov(esp, ebp);
536 pop(ebp); 721 pop(ebp);
537 722
538 LeaveExitFrameEpilogue(); 723 LeaveExitFrameEpilogue();
539 } 724 }
540 725
541 726
542 void MacroAssembler::PushTryHandler(CodeLocation try_location, 727 void MacroAssembler::PushTryHandler(CodeLocation try_location,
543 HandlerType type) { 728 HandlerType type) {
544 // Adjust this code if not the case. 729 // Adjust this code if not the case.
545 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize); 730 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
(...skipping 27 matching lines...) Expand all
573 mov(Operand::StaticVariable(ExternalReference(Isolate::kHandlerAddress, 758 mov(Operand::StaticVariable(ExternalReference(Isolate::kHandlerAddress,
574 isolate())), 759 isolate())),
575 esp); 760 esp);
576 } 761 }
577 762
578 763
579 void MacroAssembler::PopTryHandler() { 764 void MacroAssembler::PopTryHandler() {
580 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); 765 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
581 pop(Operand::StaticVariable(ExternalReference(Isolate::kHandlerAddress, 766 pop(Operand::StaticVariable(ExternalReference(Isolate::kHandlerAddress,
582 isolate()))); 767 isolate())));
583 add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize)); 768 add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
584 } 769 }
585 770
586 771
587 void MacroAssembler::Throw(Register value) { 772 void MacroAssembler::Throw(Register value) {
588 // Adjust this code if not the case. 773 // Adjust this code if not the case.
589 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize); 774 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
590 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); 775 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
591 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 1 * kPointerSize); 776 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 1 * kPointerSize);
592 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize); 777 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
593 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 3 * kPointerSize); 778 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 3 * kPointerSize);
(...skipping 11 matching lines...) Expand all
605 // Restore next handler, context, and frame pointer; discard handler state. 790 // Restore next handler, context, and frame pointer; discard handler state.
606 pop(Operand::StaticVariable(handler_address)); 791 pop(Operand::StaticVariable(handler_address));
607 pop(esi); // Context. 792 pop(esi); // Context.
608 pop(ebp); // Frame pointer. 793 pop(ebp); // Frame pointer.
609 pop(edx); // State. 794 pop(edx); // State.
610 795
611 // If the handler is a JS frame, restore the context to the frame. 796 // If the handler is a JS frame, restore the context to the frame.
612 // (edx == ENTRY) == (ebp == 0) == (esi == 0), so we could test any 797 // (edx == ENTRY) == (ebp == 0) == (esi == 0), so we could test any
613 // of them. 798 // of them.
614 Label skip; 799 Label skip;
615 cmp(Operand(edx), Immediate(StackHandler::ENTRY)); 800 cmp(edx, Immediate(StackHandler::ENTRY));
616 j(equal, &skip, Label::kNear); 801 j(equal, &skip, Label::kNear);
617 mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi); 802 mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
618 bind(&skip); 803 bind(&skip);
619 804
620 ret(0); 805 ret(0);
621 } 806 }
622 807
623 808
624 void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type, 809 void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
625 Register value) { 810 Register value) {
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
689 Label* miss) { 874 Label* miss) {
690 Label same_contexts; 875 Label same_contexts;
691 876
692 ASSERT(!holder_reg.is(scratch)); 877 ASSERT(!holder_reg.is(scratch));
693 878
694 // Load current lexical context from the stack frame. 879 // Load current lexical context from the stack frame.
695 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset)); 880 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
696 881
697 // When generating debug code, make sure the lexical context is set. 882 // When generating debug code, make sure the lexical context is set.
698 if (emit_debug_code()) { 883 if (emit_debug_code()) {
699 cmp(Operand(scratch), Immediate(0)); 884 cmp(scratch, Immediate(0));
700 Check(not_equal, "we should not have an empty lexical context"); 885 Check(not_equal, "we should not have an empty lexical context");
701 } 886 }
702 // Load the global context of the current context. 887 // Load the global context of the current context.
703 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; 888 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
704 mov(scratch, FieldOperand(scratch, offset)); 889 mov(scratch, FieldOperand(scratch, offset));
705 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset)); 890 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
706 891
707 // Check the context is a global context. 892 // Check the context is a global context.
708 if (emit_debug_code()) { 893 if (emit_debug_code()) {
709 push(scratch); 894 push(scratch);
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after
777 962
778 Label done; 963 Label done;
779 964
780 // Compute the hash code from the untagged key. This must be kept in sync 965 // Compute the hash code from the untagged key. This must be kept in sync
781 // with ComputeIntegerHash in utils.h. 966 // with ComputeIntegerHash in utils.h.
782 // 967 //
783 // hash = ~hash + (hash << 15); 968 // hash = ~hash + (hash << 15);
784 mov(r1, r0); 969 mov(r1, r0);
785 not_(r0); 970 not_(r0);
786 shl(r1, 15); 971 shl(r1, 15);
787 add(r0, Operand(r1)); 972 add(r0, r1);
788 // hash = hash ^ (hash >> 12); 973 // hash = hash ^ (hash >> 12);
789 mov(r1, r0); 974 mov(r1, r0);
790 shr(r1, 12); 975 shr(r1, 12);
791 xor_(r0, Operand(r1)); 976 xor_(r0, r1);
792 // hash = hash + (hash << 2); 977 // hash = hash + (hash << 2);
793 lea(r0, Operand(r0, r0, times_4, 0)); 978 lea(r0, Operand(r0, r0, times_4, 0));
794 // hash = hash ^ (hash >> 4); 979 // hash = hash ^ (hash >> 4);
795 mov(r1, r0); 980 mov(r1, r0);
796 shr(r1, 4); 981 shr(r1, 4);
797 xor_(r0, Operand(r1)); 982 xor_(r0, r1);
798 // hash = hash * 2057; 983 // hash = hash * 2057;
799 imul(r0, r0, 2057); 984 imul(r0, r0, 2057);
800 // hash = hash ^ (hash >> 16); 985 // hash = hash ^ (hash >> 16);
801 mov(r1, r0); 986 mov(r1, r0);
802 shr(r1, 16); 987 shr(r1, 16);
803 xor_(r0, Operand(r1)); 988 xor_(r0, r1);
804 989
805 // Compute capacity mask. 990 // Compute capacity mask.
806 mov(r1, FieldOperand(elements, NumberDictionary::kCapacityOffset)); 991 mov(r1, FieldOperand(elements, NumberDictionary::kCapacityOffset));
807 shr(r1, kSmiTagSize); // convert smi to int 992 shr(r1, kSmiTagSize); // convert smi to int
808 dec(r1); 993 dec(r1);
809 994
810 // Generate an unrolled loop that performs a few probes before giving up. 995 // Generate an unrolled loop that performs a few probes before giving up.
811 const int kProbes = 4; 996 const int kProbes = 4;
812 for (int i = 0; i < kProbes; i++) { 997 for (int i = 0; i < kProbes; i++) {
813 // Use r2 for index calculations and keep the hash intact in r0. 998 // Use r2 for index calculations and keep the hash intact in r0.
814 mov(r2, r0); 999 mov(r2, r0);
815 // Compute the masked index: (hash + i + i * i) & mask. 1000 // Compute the masked index: (hash + i + i * i) & mask.
816 if (i > 0) { 1001 if (i > 0) {
817 add(Operand(r2), Immediate(NumberDictionary::GetProbeOffset(i))); 1002 add(r2, Immediate(NumberDictionary::GetProbeOffset(i)));
818 } 1003 }
819 and_(r2, Operand(r1)); 1004 and_(r2, r1);
820 1005
821 // Scale the index by multiplying by the entry size. 1006 // Scale the index by multiplying by the entry size.
822 ASSERT(NumberDictionary::kEntrySize == 3); 1007 ASSERT(NumberDictionary::kEntrySize == 3);
823 lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3 1008 lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
824 1009
825 // Check if the key matches. 1010 // Check if the key matches.
826 cmp(key, FieldOperand(elements, 1011 cmp(key, FieldOperand(elements,
827 r2, 1012 r2,
828 times_pointer_size, 1013 times_pointer_size,
829 NumberDictionary::kElementsStartOffset)); 1014 NumberDictionary::kElementsStartOffset));
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
865 cmp(result, Operand::StaticVariable(new_space_allocation_top)); 1050 cmp(result, Operand::StaticVariable(new_space_allocation_top));
866 Check(equal, "Unexpected allocation top"); 1051 Check(equal, "Unexpected allocation top");
867 #endif 1052 #endif
868 return; 1053 return;
869 } 1054 }
870 1055
871 // Move address of new object to result. Use scratch register if available. 1056 // Move address of new object to result. Use scratch register if available.
872 if (scratch.is(no_reg)) { 1057 if (scratch.is(no_reg)) {
873 mov(result, Operand::StaticVariable(new_space_allocation_top)); 1058 mov(result, Operand::StaticVariable(new_space_allocation_top));
874 } else { 1059 } else {
875 mov(Operand(scratch), Immediate(new_space_allocation_top)); 1060 mov(scratch, Immediate(new_space_allocation_top));
876 mov(result, Operand(scratch, 0)); 1061 mov(result, Operand(scratch, 0));
877 } 1062 }
878 } 1063 }
879 1064
880 1065
881 void MacroAssembler::UpdateAllocationTopHelper(Register result_end, 1066 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
882 Register scratch) { 1067 Register scratch) {
883 if (emit_debug_code()) { 1068 if (emit_debug_code()) {
884 test(result_end, Immediate(kObjectAlignmentMask)); 1069 test(result_end, Immediate(kObjectAlignmentMask));
885 Check(zero, "Unaligned allocation in new space"); 1070 Check(zero, "Unaligned allocation in new space");
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
924 1109
925 Register top_reg = result_end.is_valid() ? result_end : result; 1110 Register top_reg = result_end.is_valid() ? result_end : result;
926 1111
927 // Calculate new top and bail out if new space is exhausted. 1112 // Calculate new top and bail out if new space is exhausted.
928 ExternalReference new_space_allocation_limit = 1113 ExternalReference new_space_allocation_limit =
929 ExternalReference::new_space_allocation_limit_address(isolate()); 1114 ExternalReference::new_space_allocation_limit_address(isolate());
930 1115
931 if (!top_reg.is(result)) { 1116 if (!top_reg.is(result)) {
932 mov(top_reg, result); 1117 mov(top_reg, result);
933 } 1118 }
934 add(Operand(top_reg), Immediate(object_size)); 1119 add(top_reg, Immediate(object_size));
935 j(carry, gc_required); 1120 j(carry, gc_required);
936 cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit)); 1121 cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
937 j(above, gc_required); 1122 j(above, gc_required);
938 1123
939 // Update allocation top. 1124 // Update allocation top.
940 UpdateAllocationTopHelper(top_reg, scratch); 1125 UpdateAllocationTopHelper(top_reg, scratch);
941 1126
942 // Tag result if requested. 1127 // Tag result if requested.
943 if (top_reg.is(result)) { 1128 if (top_reg.is(result)) {
944 if ((flags & TAG_OBJECT) != 0) { 1129 if ((flags & TAG_OBJECT) != 0) {
945 sub(Operand(result), Immediate(object_size - kHeapObjectTag)); 1130 sub(result, Immediate(object_size - kHeapObjectTag));
946 } else { 1131 } else {
947 sub(Operand(result), Immediate(object_size)); 1132 sub(result, Immediate(object_size));
948 } 1133 }
949 } else if ((flags & TAG_OBJECT) != 0) { 1134 } else if ((flags & TAG_OBJECT) != 0) {
950 add(Operand(result), Immediate(kHeapObjectTag)); 1135 add(result, Immediate(kHeapObjectTag));
951 } 1136 }
952 } 1137 }
953 1138
954 1139
955 void MacroAssembler::AllocateInNewSpace(int header_size, 1140 void MacroAssembler::AllocateInNewSpace(int header_size,
956 ScaleFactor element_size, 1141 ScaleFactor element_size,
957 Register element_count, 1142 Register element_count,
958 Register result, 1143 Register result,
959 Register result_end, 1144 Register result_end,
960 Register scratch, 1145 Register scratch,
(...skipping 17 matching lines...) Expand all
978 // Load address of new object into result. 1163 // Load address of new object into result.
979 LoadAllocationTopHelper(result, scratch, flags); 1164 LoadAllocationTopHelper(result, scratch, flags);
980 1165
981 // Calculate new top and bail out if new space is exhausted. 1166 // Calculate new top and bail out if new space is exhausted.
982 ExternalReference new_space_allocation_limit = 1167 ExternalReference new_space_allocation_limit =
983 ExternalReference::new_space_allocation_limit_address(isolate()); 1168 ExternalReference::new_space_allocation_limit_address(isolate());
984 1169
985 // We assume that element_count*element_size + header_size does not 1170 // We assume that element_count*element_size + header_size does not
986 // overflow. 1171 // overflow.
987 lea(result_end, Operand(element_count, element_size, header_size)); 1172 lea(result_end, Operand(element_count, element_size, header_size));
988 add(result_end, Operand(result)); 1173 add(result_end, result);
989 j(carry, gc_required); 1174 j(carry, gc_required);
990 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit)); 1175 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
991 j(above, gc_required); 1176 j(above, gc_required);
992 1177
993 // Tag result if requested. 1178 // Tag result if requested.
994 if ((flags & TAG_OBJECT) != 0) { 1179 if ((flags & TAG_OBJECT) != 0) {
995 lea(result, Operand(result, kHeapObjectTag)); 1180 lea(result, Operand(result, kHeapObjectTag));
996 } 1181 }
997 1182
998 // Update allocation top. 1183 // Update allocation top.
(...skipping 24 matching lines...) Expand all
1023 1208
1024 // Load address of new object into result. 1209 // Load address of new object into result.
1025 LoadAllocationTopHelper(result, scratch, flags); 1210 LoadAllocationTopHelper(result, scratch, flags);
1026 1211
1027 // Calculate new top and bail out if new space is exhausted. 1212 // Calculate new top and bail out if new space is exhausted.
1028 ExternalReference new_space_allocation_limit = 1213 ExternalReference new_space_allocation_limit =
1029 ExternalReference::new_space_allocation_limit_address(isolate()); 1214 ExternalReference::new_space_allocation_limit_address(isolate());
1030 if (!object_size.is(result_end)) { 1215 if (!object_size.is(result_end)) {
1031 mov(result_end, object_size); 1216 mov(result_end, object_size);
1032 } 1217 }
1033 add(result_end, Operand(result)); 1218 add(result_end, result);
1034 j(carry, gc_required); 1219 j(carry, gc_required);
1035 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit)); 1220 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
1036 j(above, gc_required); 1221 j(above, gc_required);
1037 1222
1038 // Tag result if requested. 1223 // Tag result if requested.
1039 if ((flags & TAG_OBJECT) != 0) { 1224 if ((flags & TAG_OBJECT) != 0) {
1040 lea(result, Operand(result, kHeapObjectTag)); 1225 lea(result, Operand(result, kHeapObjectTag));
1041 } 1226 }
1042 1227
1043 // Update allocation top. 1228 // Update allocation top.
1044 UpdateAllocationTopHelper(result_end, scratch); 1229 UpdateAllocationTopHelper(result_end, scratch);
1045 } 1230 }
1046 1231
1047 1232
1048 void MacroAssembler::UndoAllocationInNewSpace(Register object) { 1233 void MacroAssembler::UndoAllocationInNewSpace(Register object) {
1049 ExternalReference new_space_allocation_top = 1234 ExternalReference new_space_allocation_top =
1050 ExternalReference::new_space_allocation_top_address(isolate()); 1235 ExternalReference::new_space_allocation_top_address(isolate());
1051 1236
1052 // Make sure the object has no tag before resetting top. 1237 // Make sure the object has no tag before resetting top.
1053 and_(Operand(object), Immediate(~kHeapObjectTagMask)); 1238 and_(object, Immediate(~kHeapObjectTagMask));
1054 #ifdef DEBUG 1239 #ifdef DEBUG
1055 cmp(object, Operand::StaticVariable(new_space_allocation_top)); 1240 cmp(object, Operand::StaticVariable(new_space_allocation_top));
1056 Check(below, "Undo allocation of non allocated memory"); 1241 Check(below, "Undo allocation of non allocated memory");
1057 #endif 1242 #endif
1058 mov(Operand::StaticVariable(new_space_allocation_top), object); 1243 mov(Operand::StaticVariable(new_space_allocation_top), object);
1059 } 1244 }
1060 1245
1061 1246
1062 void MacroAssembler::AllocateHeapNumber(Register result, 1247 void MacroAssembler::AllocateHeapNumber(Register result,
1063 Register scratch1, 1248 Register scratch1,
(...skipping 18 matching lines...) Expand all
1082 Register scratch1, 1267 Register scratch1,
1083 Register scratch2, 1268 Register scratch2,
1084 Register scratch3, 1269 Register scratch3,
1085 Label* gc_required) { 1270 Label* gc_required) {
1086 // Calculate the number of bytes needed for the characters in the string while 1271 // Calculate the number of bytes needed for the characters in the string while
1087 // observing object alignment. 1272 // observing object alignment.
1088 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0); 1273 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1089 ASSERT(kShortSize == 2); 1274 ASSERT(kShortSize == 2);
1090 // scratch1 = length * 2 + kObjectAlignmentMask. 1275 // scratch1 = length * 2 + kObjectAlignmentMask.
1091 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask)); 1276 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
1092 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask)); 1277 and_(scratch1, Immediate(~kObjectAlignmentMask));
1093 1278
1094 // Allocate two byte string in new space. 1279 // Allocate two byte string in new space.
1095 AllocateInNewSpace(SeqTwoByteString::kHeaderSize, 1280 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
1096 times_1, 1281 times_1,
1097 scratch1, 1282 scratch1,
1098 result, 1283 result,
1099 scratch2, 1284 scratch2,
1100 scratch3, 1285 scratch3,
1101 gc_required, 1286 gc_required,
1102 TAG_OBJECT); 1287 TAG_OBJECT);
(...skipping 13 matching lines...) Expand all
1116 Register length, 1301 Register length,
1117 Register scratch1, 1302 Register scratch1,
1118 Register scratch2, 1303 Register scratch2,
1119 Register scratch3, 1304 Register scratch3,
1120 Label* gc_required) { 1305 Label* gc_required) {
1121 // Calculate the number of bytes needed for the characters in the string while 1306 // Calculate the number of bytes needed for the characters in the string while
1122 // observing object alignment. 1307 // observing object alignment.
1123 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0); 1308 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
1124 mov(scratch1, length); 1309 mov(scratch1, length);
1125 ASSERT(kCharSize == 1); 1310 ASSERT(kCharSize == 1);
1126 add(Operand(scratch1), Immediate(kObjectAlignmentMask)); 1311 add(scratch1, Immediate(kObjectAlignmentMask));
1127 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask)); 1312 and_(scratch1, Immediate(~kObjectAlignmentMask));
1128 1313
1129 // Allocate ascii string in new space. 1314 // Allocate ascii string in new space.
1130 AllocateInNewSpace(SeqAsciiString::kHeaderSize, 1315 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
1131 times_1, 1316 times_1,
1132 scratch1, 1317 scratch1,
1133 result, 1318 result,
1134 scratch2, 1319 scratch2,
1135 scratch3, 1320 scratch3,
1136 gc_required, 1321 gc_required,
1137 TAG_OBJECT); 1322 TAG_OBJECT);
(...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after
1251 // have been tried here already, and this is fastest. 1436 // have been tried here already, and this is fastest.
1252 // A simpler loop is faster on small copies, but 30% slower on large ones. 1437 // A simpler loop is faster on small copies, but 30% slower on large ones.
1253 // The cld() instruction must have been emitted, to set the direction flag(), 1438 // The cld() instruction must have been emitted, to set the direction flag(),
1254 // before calling this function. 1439 // before calling this function.
1255 void MacroAssembler::CopyBytes(Register source, 1440 void MacroAssembler::CopyBytes(Register source,
1256 Register destination, 1441 Register destination,
1257 Register length, 1442 Register length,
1258 Register scratch) { 1443 Register scratch) {
1259 Label loop, done, short_string, short_loop; 1444 Label loop, done, short_string, short_loop;
1260 // Experimentation shows that the short string loop is faster if length < 10. 1445 // Experimentation shows that the short string loop is faster if length < 10.
1261 cmp(Operand(length), Immediate(10)); 1446 cmp(length, Immediate(10));
1262 j(less_equal, &short_string); 1447 j(less_equal, &short_string);
1263 1448
1264 ASSERT(source.is(esi)); 1449 ASSERT(source.is(esi));
1265 ASSERT(destination.is(edi)); 1450 ASSERT(destination.is(edi));
1266 ASSERT(length.is(ecx)); 1451 ASSERT(length.is(ecx));
1267 1452
1268 // Because source is 4-byte aligned in our uses of this function, 1453 // Because source is 4-byte aligned in our uses of this function,
1269 // we keep source aligned for the rep_movs call by copying the odd bytes 1454 // we keep source aligned for the rep_movs call by copying the odd bytes
1270 // at the end of the ranges. 1455 // at the end of the ranges.
1271 mov(scratch, Operand(source, length, times_1, -4)); 1456 mov(scratch, Operand(source, length, times_1, -4));
1272 mov(Operand(destination, length, times_1, -4), scratch); 1457 mov(Operand(destination, length, times_1, -4), scratch);
1273 mov(scratch, ecx); 1458 mov(scratch, ecx);
1274 shr(ecx, 2); 1459 shr(ecx, 2);
1275 rep_movs(); 1460 rep_movs();
1276 and_(Operand(scratch), Immediate(0x3)); 1461 and_(scratch, Immediate(0x3));
1277 add(destination, Operand(scratch)); 1462 add(destination, scratch);
1278 jmp(&done); 1463 jmp(&done);
1279 1464
1280 bind(&short_string); 1465 bind(&short_string);
1281 test(length, Operand(length)); 1466 test(length, length);
1282 j(zero, &done); 1467 j(zero, &done);
1283 1468
1284 bind(&short_loop); 1469 bind(&short_loop);
1285 mov_b(scratch, Operand(source, 0)); 1470 mov_b(scratch, Operand(source, 0));
1286 mov_b(Operand(destination, 0), scratch); 1471 mov_b(Operand(destination, 0), scratch);
1287 inc(source); 1472 inc(source);
1288 inc(destination); 1473 inc(destination);
1289 dec(length); 1474 dec(length);
1290 j(not_zero, &short_loop); 1475 j(not_zero, &short_loop);
1291 1476
1292 bind(&done); 1477 bind(&done);
1293 } 1478 }
1294 1479
1295 1480
1481 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
1482 Register end_offset,
1483 Register filler) {
1484 Label loop, entry;
1485 jmp(&entry);
1486 bind(&loop);
1487 mov(Operand(start_offset, 0), filler);
1488 add(start_offset, Immediate(kPointerSize));
1489 bind(&entry);
1490 cmp(start_offset, end_offset);
1491 j(less, &loop);
1492 }
1493
1494
1296 void MacroAssembler::NegativeZeroTest(Register result, 1495 void MacroAssembler::NegativeZeroTest(Register result,
1297 Register op, 1496 Register op,
1298 Label* then_label) { 1497 Label* then_label) {
1299 Label ok; 1498 Label ok;
1300 test(result, Operand(result)); 1499 test(result, result);
1301 j(not_zero, &ok); 1500 j(not_zero, &ok);
1302 test(op, Operand(op)); 1501 test(op, op);
1303 j(sign, then_label); 1502 j(sign, then_label);
1304 bind(&ok); 1503 bind(&ok);
1305 } 1504 }
1306 1505
1307 1506
1308 void MacroAssembler::NegativeZeroTest(Register result, 1507 void MacroAssembler::NegativeZeroTest(Register result,
1309 Register op1, 1508 Register op1,
1310 Register op2, 1509 Register op2,
1311 Register scratch, 1510 Register scratch,
1312 Label* then_label) { 1511 Label* then_label) {
1313 Label ok; 1512 Label ok;
1314 test(result, Operand(result)); 1513 test(result, result);
1315 j(not_zero, &ok); 1514 j(not_zero, &ok);
1316 mov(scratch, Operand(op1)); 1515 mov(scratch, op1);
1317 or_(scratch, Operand(op2)); 1516 or_(scratch, op2);
1318 j(sign, then_label); 1517 j(sign, then_label);
1319 bind(&ok); 1518 bind(&ok);
1320 } 1519 }
1321 1520
1322 1521
1323 void MacroAssembler::TryGetFunctionPrototype(Register function, 1522 void MacroAssembler::TryGetFunctionPrototype(Register function,
1324 Register result, 1523 Register result,
1325 Register scratch, 1524 Register scratch,
1326 Label* miss) { 1525 Label* miss) {
1327 // Check that the receiver isn't a smi. 1526 // Check that the receiver isn't a smi.
1328 JumpIfSmi(function, miss); 1527 JumpIfSmi(function, miss);
1329 1528
1330 // Check that the function really is a function. 1529 // Check that the function really is a function.
1331 CmpObjectType(function, JS_FUNCTION_TYPE, result); 1530 CmpObjectType(function, JS_FUNCTION_TYPE, result);
1332 j(not_equal, miss); 1531 j(not_equal, miss);
1333 1532
1334 // Make sure that the function has an instance prototype. 1533 // Make sure that the function has an instance prototype.
1335 Label non_instance; 1534 Label non_instance;
1336 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset)); 1535 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
1337 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype)); 1536 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
1338 j(not_zero, &non_instance); 1537 j(not_zero, &non_instance);
1339 1538
1340 // Get the prototype or initial map from the function. 1539 // Get the prototype or initial map from the function.
1341 mov(result, 1540 mov(result,
1342 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); 1541 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1343 1542
1344 // If the prototype or initial map is the hole, don't return it and 1543 // If the prototype or initial map is the hole, don't return it and
1345 // simply miss the cache instead. This will allow us to allocate a 1544 // simply miss the cache instead. This will allow us to allocate a
1346 // prototype object on-demand in the runtime system. 1545 // prototype object on-demand in the runtime system.
1347 cmp(Operand(result), Immediate(isolate()->factory()->the_hole_value())); 1546 cmp(result, Immediate(isolate()->factory()->the_hole_value()));
1348 j(equal, miss); 1547 j(equal, miss);
1349 1548
1350 // If the function does not have an initial map, we're done. 1549 // If the function does not have an initial map, we're done.
1351 Label done; 1550 Label done;
1352 CmpObjectType(result, MAP_TYPE, scratch); 1551 CmpObjectType(result, MAP_TYPE, scratch);
1353 j(not_equal, &done); 1552 j(not_equal, &done);
1354 1553
1355 // Get the prototype from the initial map. 1554 // Get the prototype from the initial map.
1356 mov(result, FieldOperand(result, Map::kPrototypeOffset)); 1555 mov(result, FieldOperand(result, Map::kPrototypeOffset));
1357 jmp(&done); 1556 jmp(&done);
1358 1557
1359 // Non-instance prototype: Fetch prototype from constructor field 1558 // Non-instance prototype: Fetch prototype from constructor field
1360 // in initial map. 1559 // in initial map.
1361 bind(&non_instance); 1560 bind(&non_instance);
1362 mov(result, FieldOperand(result, Map::kConstructorOffset)); 1561 mov(result, FieldOperand(result, Map::kConstructorOffset));
1363 1562
1364 // All done. 1563 // All done.
1365 bind(&done); 1564 bind(&done);
1366 } 1565 }
1367 1566
1368 1567
1369 void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) { 1568 void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) {
1370 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs. 1569 ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
1371 call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id); 1570 call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
1372 } 1571 }
1373 1572
1374 1573
1375 MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) { 1574 MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
1376 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs. 1575 ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
1377 Object* result; 1576 Object* result;
1378 { MaybeObject* maybe_result = stub->TryGetCode(); 1577 { MaybeObject* maybe_result = stub->TryGetCode();
1379 if (!maybe_result->ToObject(&result)) return maybe_result; 1578 if (!maybe_result->ToObject(&result)) return maybe_result;
1380 } 1579 }
1381 call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET); 1580 call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
1382 return result; 1581 return result;
1383 } 1582 }
1384 1583
1385 1584
1386 void MacroAssembler::TailCallStub(CodeStub* stub) { 1585 void MacroAssembler::TailCallStub(CodeStub* stub) {
1387 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs. 1586 ASSERT(allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe());
1388 jmp(stub->GetCode(), RelocInfo::CODE_TARGET); 1587 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1389 } 1588 }
1390 1589
1391 1590
1392 MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) { 1591 MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) {
1393 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
1394 Object* result; 1592 Object* result;
1395 { MaybeObject* maybe_result = stub->TryGetCode(); 1593 { MaybeObject* maybe_result = stub->TryGetCode();
1396 if (!maybe_result->ToObject(&result)) return maybe_result; 1594 if (!maybe_result->ToObject(&result)) return maybe_result;
1397 } 1595 }
1398 jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET); 1596 jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
1399 return result; 1597 return result;
1400 } 1598 }
1401 1599
1402 1600
1403 void MacroAssembler::StubReturn(int argc) { 1601 void MacroAssembler::StubReturn(int argc) {
1404 ASSERT(argc >= 1 && generating_stub()); 1602 ASSERT(argc >= 1 && generating_stub());
1405 ret((argc - 1) * kPointerSize); 1603 ret((argc - 1) * kPointerSize);
1406 } 1604 }
1407 1605
1408 1606
1607 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
1608 if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
1609 return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe();
1610 }
1611
1612
1409 void MacroAssembler::IllegalOperation(int num_arguments) { 1613 void MacroAssembler::IllegalOperation(int num_arguments) {
1410 if (num_arguments > 0) { 1614 if (num_arguments > 0) {
1411 add(Operand(esp), Immediate(num_arguments * kPointerSize)); 1615 add(esp, Immediate(num_arguments * kPointerSize));
1412 } 1616 }
1413 mov(eax, Immediate(isolate()->factory()->undefined_value())); 1617 mov(eax, Immediate(isolate()->factory()->undefined_value()));
1414 } 1618 }
1415 1619
1416 1620
1417 void MacroAssembler::IndexFromHash(Register hash, Register index) { 1621 void MacroAssembler::IndexFromHash(Register hash, Register index) {
1418 // The assert checks that the constants for the maximum number of digits 1622 // The assert checks that the constants for the maximum number of digits
1419 // for an array index cached in the hash field and the number of bits 1623 // for an array index cached in the hash field and the number of bits
1420 // reserved for it does not conflict. 1624 // reserved for it does not conflict.
1421 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) < 1625 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
(...skipping 13 matching lines...) Expand all
1435 1639
1436 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) { 1640 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
1437 CallRuntime(Runtime::FunctionForId(id), num_arguments); 1641 CallRuntime(Runtime::FunctionForId(id), num_arguments);
1438 } 1642 }
1439 1643
1440 1644
1441 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) { 1645 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
1442 const Runtime::Function* function = Runtime::FunctionForId(id); 1646 const Runtime::Function* function = Runtime::FunctionForId(id);
1443 Set(eax, Immediate(function->nargs)); 1647 Set(eax, Immediate(function->nargs));
1444 mov(ebx, Immediate(ExternalReference(function, isolate()))); 1648 mov(ebx, Immediate(ExternalReference(function, isolate())));
1445 CEntryStub ces(1); 1649 CEntryStub ces(1, kSaveFPRegs);
1446 ces.SaveDoubles();
1447 CallStub(&ces); 1650 CallStub(&ces);
1448 } 1651 }
1449 1652
1450 1653
1451 MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id, 1654 MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
1452 int num_arguments) { 1655 int num_arguments) {
1453 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments); 1656 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
1454 } 1657 }
1455 1658
1456 1659
(...skipping 159 matching lines...) Expand 10 before | Expand all | Expand 10 after
1616 mov(eax, Operand(esi, 0)); 1819 mov(eax, Operand(esi, 0));
1617 } 1820 }
1618 1821
1619 Label empty_handle; 1822 Label empty_handle;
1620 Label prologue; 1823 Label prologue;
1621 Label promote_scheduled_exception; 1824 Label promote_scheduled_exception;
1622 Label delete_allocated_handles; 1825 Label delete_allocated_handles;
1623 Label leave_exit_frame; 1826 Label leave_exit_frame;
1624 1827
1625 // Check if the result handle holds 0. 1828 // Check if the result handle holds 0.
1626 test(eax, Operand(eax)); 1829 test(eax, eax);
1627 j(zero, &empty_handle); 1830 j(zero, &empty_handle);
1628 // It was non-zero. Dereference to get the result value. 1831 // It was non-zero. Dereference to get the result value.
1629 mov(eax, Operand(eax, 0)); 1832 mov(eax, Operand(eax, 0));
1630 bind(&prologue); 1833 bind(&prologue);
1631 // No more valid handles (the result handle was the last one). Restore 1834 // No more valid handles (the result handle was the last one). Restore
1632 // previous handle scope. 1835 // previous handle scope.
1633 mov(Operand::StaticVariable(next_address), ebx); 1836 mov(Operand::StaticVariable(next_address), ebx);
1634 sub(Operand::StaticVariable(level_address), Immediate(1)); 1837 sub(Operand::StaticVariable(level_address), Immediate(1));
1635 Assert(above_equal, "Invalid HandleScope level"); 1838 Assert(above_equal, "Invalid HandleScope level");
1636 cmp(edi, Operand::StaticVariable(limit_address)); 1839 cmp(edi, Operand::StaticVariable(limit_address));
(...skipping 20 matching lines...) Expand all
1657 jmp(&prologue); 1860 jmp(&prologue);
1658 1861
1659 // HandleScope limit has changed. Delete allocated extensions. 1862 // HandleScope limit has changed. Delete allocated extensions.
1660 ExternalReference delete_extensions = 1863 ExternalReference delete_extensions =
1661 ExternalReference::delete_handle_scope_extensions(isolate()); 1864 ExternalReference::delete_handle_scope_extensions(isolate());
1662 bind(&delete_allocated_handles); 1865 bind(&delete_allocated_handles);
1663 mov(Operand::StaticVariable(limit_address), edi); 1866 mov(Operand::StaticVariable(limit_address), edi);
1664 mov(edi, eax); 1867 mov(edi, eax);
1665 mov(Operand(esp, 0), Immediate(ExternalReference::isolate_address())); 1868 mov(Operand(esp, 0), Immediate(ExternalReference::isolate_address()));
1666 mov(eax, Immediate(delete_extensions)); 1869 mov(eax, Immediate(delete_extensions));
1667 call(Operand(eax)); 1870 call(eax);
1668 mov(eax, edi); 1871 mov(eax, edi);
1669 jmp(&leave_exit_frame); 1872 jmp(&leave_exit_frame);
1670 1873
1671 return result; 1874 return result;
1672 } 1875 }
1673 1876
1674 1877
1675 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) { 1878 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
1676 // Set the entry point and jump to the C entry runtime stub. 1879 // Set the entry point and jump to the C entry runtime stub.
1677 mov(ebx, Immediate(ext)); 1880 mov(ebx, Immediate(ext));
(...skipping 13 matching lines...) Expand all
1691 1894
1692 void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) { 1895 void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) {
1693 // This macro takes the dst register to make the code more readable 1896 // This macro takes the dst register to make the code more readable
1694 // at the call sites. However, the dst register has to be ecx to 1897 // at the call sites. However, the dst register has to be ecx to
1695 // follow the calling convention which requires the call type to be 1898 // follow the calling convention which requires the call type to be
1696 // in ecx. 1899 // in ecx.
1697 ASSERT(dst.is(ecx)); 1900 ASSERT(dst.is(ecx));
1698 if (call_kind == CALL_AS_FUNCTION) { 1901 if (call_kind == CALL_AS_FUNCTION) {
1699 // Set to some non-zero smi by updating the least significant 1902 // Set to some non-zero smi by updating the least significant
1700 // byte. 1903 // byte.
1701 mov_b(Operand(dst), 1 << kSmiTagSize); 1904 mov_b(dst, 1 << kSmiTagSize);
1702 } else { 1905 } else {
1703 // Set to smi zero by clearing the register. 1906 // Set to smi zero by clearing the register.
1704 xor_(dst, Operand(dst)); 1907 xor_(dst, dst);
1705 } 1908 }
1706 } 1909 }
1707 1910
1708 1911
1709 void MacroAssembler::InvokePrologue(const ParameterCount& expected, 1912 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1710 const ParameterCount& actual, 1913 const ParameterCount& actual,
1711 Handle<Code> code_constant, 1914 Handle<Code> code_constant,
1712 const Operand& code_operand, 1915 const Operand& code_operand,
1713 Label* done, 1916 Label* done,
1714 InvokeFlag flag, 1917 InvokeFlag flag,
(...skipping 24 matching lines...) Expand all
1739 // Expected is in register, actual is immediate. This is the 1942 // Expected is in register, actual is immediate. This is the
1740 // case when we invoke function values without going through the 1943 // case when we invoke function values without going through the
1741 // IC mechanism. 1944 // IC mechanism.
1742 cmp(expected.reg(), actual.immediate()); 1945 cmp(expected.reg(), actual.immediate());
1743 j(equal, &invoke); 1946 j(equal, &invoke);
1744 ASSERT(expected.reg().is(ebx)); 1947 ASSERT(expected.reg().is(ebx));
1745 mov(eax, actual.immediate()); 1948 mov(eax, actual.immediate());
1746 } else if (!expected.reg().is(actual.reg())) { 1949 } else if (!expected.reg().is(actual.reg())) {
1747 // Both expected and actual are in (different) registers. This 1950 // Both expected and actual are in (different) registers. This
1748 // is the case when we invoke functions using call and apply. 1951 // is the case when we invoke functions using call and apply.
1749 cmp(expected.reg(), Operand(actual.reg())); 1952 cmp(expected.reg(), actual.reg());
1750 j(equal, &invoke); 1953 j(equal, &invoke);
1751 ASSERT(actual.reg().is(eax)); 1954 ASSERT(actual.reg().is(eax));
1752 ASSERT(expected.reg().is(ebx)); 1955 ASSERT(expected.reg().is(ebx));
1753 } 1956 }
1754 } 1957 }
1755 1958
1756 if (!definitely_matches) { 1959 if (!definitely_matches) {
1757 Handle<Code> adaptor = 1960 Handle<Code> adaptor =
1758 isolate()->builtins()->ArgumentsAdaptorTrampoline(); 1961 isolate()->builtins()->ArgumentsAdaptorTrampoline();
1759 if (!code_constant.is_null()) { 1962 if (!code_constant.is_null()) {
1760 mov(edx, Immediate(code_constant)); 1963 mov(edx, Immediate(code_constant));
1761 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag)); 1964 add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1762 } else if (!code_operand.is_reg(edx)) { 1965 } else if (!code_operand.is_reg(edx)) {
1763 mov(edx, code_operand); 1966 mov(edx, code_operand);
1764 } 1967 }
1765 1968
1766 if (flag == CALL_FUNCTION) { 1969 if (flag == CALL_FUNCTION) {
1767 call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET)); 1970 call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
1768 SetCallKind(ecx, call_kind); 1971 SetCallKind(ecx, call_kind);
1769 call(adaptor, RelocInfo::CODE_TARGET); 1972 call(adaptor, RelocInfo::CODE_TARGET);
1770 call_wrapper.AfterCall(); 1973 call_wrapper.AfterCall();
1771 jmp(done, done_near); 1974 jmp(done, done_near);
1772 } else { 1975 } else {
1773 SetCallKind(ecx, call_kind); 1976 SetCallKind(ecx, call_kind);
1774 jmp(adaptor, RelocInfo::CODE_TARGET); 1977 jmp(adaptor, RelocInfo::CODE_TARGET);
1775 } 1978 }
1776 bind(&invoke); 1979 bind(&invoke);
1777 } 1980 }
1778 } 1981 }
1779 1982
1780 1983
1781 void MacroAssembler::InvokeCode(const Operand& code, 1984 void MacroAssembler::InvokeCode(const Operand& code,
1782 const ParameterCount& expected, 1985 const ParameterCount& expected,
1783 const ParameterCount& actual, 1986 const ParameterCount& actual,
1784 InvokeFlag flag, 1987 InvokeFlag flag,
1785 const CallWrapper& call_wrapper, 1988 const CallWrapper& call_wrapper,
1786 CallKind call_kind) { 1989 CallKind call_kind) {
1990 // You can't call a function without a valid frame.
1991 ASSERT(flag == JUMP_FUNCTION || has_frame());
1992
1787 Label done; 1993 Label done;
1788 InvokePrologue(expected, actual, Handle<Code>::null(), code, 1994 InvokePrologue(expected, actual, Handle<Code>::null(), code,
1789 &done, flag, Label::kNear, call_wrapper, 1995 &done, flag, Label::kNear, call_wrapper,
1790 call_kind); 1996 call_kind);
1791 if (flag == CALL_FUNCTION) { 1997 if (flag == CALL_FUNCTION) {
1792 call_wrapper.BeforeCall(CallSize(code)); 1998 call_wrapper.BeforeCall(CallSize(code));
1793 SetCallKind(ecx, call_kind); 1999 SetCallKind(ecx, call_kind);
1794 call(code); 2000 call(code);
1795 call_wrapper.AfterCall(); 2001 call_wrapper.AfterCall();
1796 } else { 2002 } else {
1797 ASSERT(flag == JUMP_FUNCTION); 2003 ASSERT(flag == JUMP_FUNCTION);
1798 SetCallKind(ecx, call_kind); 2004 SetCallKind(ecx, call_kind);
1799 jmp(code); 2005 jmp(code);
1800 } 2006 }
1801 bind(&done); 2007 bind(&done);
1802 } 2008 }
1803 2009
1804 2010
1805 void MacroAssembler::InvokeCode(Handle<Code> code, 2011 void MacroAssembler::InvokeCode(Handle<Code> code,
1806 const ParameterCount& expected, 2012 const ParameterCount& expected,
1807 const ParameterCount& actual, 2013 const ParameterCount& actual,
1808 RelocInfo::Mode rmode, 2014 RelocInfo::Mode rmode,
1809 InvokeFlag flag, 2015 InvokeFlag flag,
1810 const CallWrapper& call_wrapper, 2016 const CallWrapper& call_wrapper,
1811 CallKind call_kind) { 2017 CallKind call_kind) {
2018 // You can't call a function without a valid frame.
2019 ASSERT(flag == JUMP_FUNCTION || has_frame());
2020
1812 Label done; 2021 Label done;
1813 Operand dummy(eax); 2022 Operand dummy(eax, 0);
1814 InvokePrologue(expected, actual, code, dummy, &done, flag, Label::kNear, 2023 InvokePrologue(expected, actual, code, dummy, &done, flag, Label::kNear,
1815 call_wrapper, call_kind); 2024 call_wrapper, call_kind);
1816 if (flag == CALL_FUNCTION) { 2025 if (flag == CALL_FUNCTION) {
1817 call_wrapper.BeforeCall(CallSize(code, rmode)); 2026 call_wrapper.BeforeCall(CallSize(code, rmode));
1818 SetCallKind(ecx, call_kind); 2027 SetCallKind(ecx, call_kind);
1819 call(code, rmode); 2028 call(code, rmode);
1820 call_wrapper.AfterCall(); 2029 call_wrapper.AfterCall();
1821 } else { 2030 } else {
1822 ASSERT(flag == JUMP_FUNCTION); 2031 ASSERT(flag == JUMP_FUNCTION);
1823 SetCallKind(ecx, call_kind); 2032 SetCallKind(ecx, call_kind);
1824 jmp(code, rmode); 2033 jmp(code, rmode);
1825 } 2034 }
1826 bind(&done); 2035 bind(&done);
1827 } 2036 }
1828 2037
1829 2038
1830 void MacroAssembler::InvokeFunction(Register fun, 2039 void MacroAssembler::InvokeFunction(Register fun,
1831 const ParameterCount& actual, 2040 const ParameterCount& actual,
1832 InvokeFlag flag, 2041 InvokeFlag flag,
1833 const CallWrapper& call_wrapper, 2042 const CallWrapper& call_wrapper,
1834 CallKind call_kind) { 2043 CallKind call_kind) {
2044 // You can't call a function without a valid frame.
2045 ASSERT(flag == JUMP_FUNCTION || has_frame());
2046
1835 ASSERT(fun.is(edi)); 2047 ASSERT(fun.is(edi));
1836 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); 2048 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1837 mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); 2049 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1838 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset)); 2050 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
1839 SmiUntag(ebx); 2051 SmiUntag(ebx);
1840 2052
1841 ParameterCount expected(ebx); 2053 ParameterCount expected(ebx);
1842 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset), 2054 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
1843 expected, actual, flag, call_wrapper, call_kind); 2055 expected, actual, flag, call_wrapper, call_kind);
1844 } 2056 }
1845 2057
1846 2058
1847 void MacroAssembler::InvokeFunction(JSFunction* function, 2059 void MacroAssembler::InvokeFunction(JSFunction* function,
1848 const ParameterCount& actual, 2060 const ParameterCount& actual,
1849 InvokeFlag flag, 2061 InvokeFlag flag,
1850 const CallWrapper& call_wrapper, 2062 const CallWrapper& call_wrapper,
1851 CallKind call_kind) { 2063 CallKind call_kind) {
2064 // You can't call a function without a valid frame.
2065 ASSERT(flag == JUMP_FUNCTION || has_frame());
2066
1852 ASSERT(function->is_compiled()); 2067 ASSERT(function->is_compiled());
1853 // Get the function and setup the context. 2068 // Get the function and setup the context.
1854 mov(edi, Immediate(Handle<JSFunction>(function))); 2069 mov(edi, Immediate(Handle<JSFunction>(function)));
1855 mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); 2070 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1856 2071
1857 ParameterCount expected(function->shared()->formal_parameter_count()); 2072 ParameterCount expected(function->shared()->formal_parameter_count());
1858 if (V8::UseCrankshaft()) { 2073 if (V8::UseCrankshaft()) {
1859 // TODO(kasperl): For now, we always call indirectly through the 2074 // TODO(kasperl): For now, we always call indirectly through the
1860 // code field in the function to allow recompilation to take effect 2075 // code field in the function to allow recompilation to take effect
1861 // without changing any of the call sites. 2076 // without changing any of the call sites.
1862 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset), 2077 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
1863 expected, actual, flag, call_wrapper, call_kind); 2078 expected, actual, flag, call_wrapper, call_kind);
1864 } else { 2079 } else {
1865 Handle<Code> code(function->code()); 2080 Handle<Code> code(function->code());
1866 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, 2081 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET,
1867 flag, call_wrapper, call_kind); 2082 flag, call_wrapper, call_kind);
1868 } 2083 }
1869 } 2084 }
1870 2085
1871 2086
1872 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, 2087 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
1873 InvokeFlag flag, 2088 InvokeFlag flag,
1874 const CallWrapper& call_wrapper) { 2089 const CallWrapper& call_wrapper) {
1875 // Calls are not allowed in some stubs. 2090 // You can't call a builtin without a valid frame.
1876 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls()); 2091 ASSERT(flag == JUMP_FUNCTION || has_frame());
1877 2092
1878 // Rely on the assertion to check that the number of provided 2093 // Rely on the assertion to check that the number of provided
1879 // arguments match the expected number of arguments. Fake a 2094 // arguments match the expected number of arguments. Fake a
1880 // parameter count to avoid emitting code to do the check. 2095 // parameter count to avoid emitting code to do the check.
1881 ParameterCount expected(0); 2096 ParameterCount expected(0);
1882 GetBuiltinFunction(edi, id); 2097 GetBuiltinFunction(edi, id);
1883 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset), 2098 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
1884 expected, expected, flag, call_wrapper, CALL_AS_METHOD); 2099 expected, expected, flag, call_wrapper, CALL_AS_METHOD);
1885 } 2100 }
1886 2101
2102
1887 void MacroAssembler::GetBuiltinFunction(Register target, 2103 void MacroAssembler::GetBuiltinFunction(Register target,
1888 Builtins::JavaScript id) { 2104 Builtins::JavaScript id) {
1889 // Load the JavaScript builtin function from the builtins object. 2105 // Load the JavaScript builtin function from the builtins object.
1890 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX))); 2106 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1891 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset)); 2107 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
1892 mov(target, FieldOperand(target, 2108 mov(target, FieldOperand(target,
1893 JSBuiltinsObject::OffsetOfFunctionWithId(id))); 2109 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
1894 } 2110 }
1895 2111
2112
1896 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) { 2113 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
1897 ASSERT(!target.is(edi)); 2114 ASSERT(!target.is(edi));
1898 // Load the JavaScript builtin function from the builtins object. 2115 // Load the JavaScript builtin function from the builtins object.
1899 GetBuiltinFunction(edi, id); 2116 GetBuiltinFunction(edi, id);
1900 // Load the code entry point from the function into the target register. 2117 // Load the code entry point from the function into the target register.
1901 mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset)); 2118 mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
1902 } 2119 }
1903 2120
1904 2121
1905 void MacroAssembler::LoadContext(Register dst, int context_chain_length) { 2122 void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
1987 void MacroAssembler::Ret() { 2204 void MacroAssembler::Ret() {
1988 ret(0); 2205 ret(0);
1989 } 2206 }
1990 2207
1991 2208
1992 void MacroAssembler::Ret(int bytes_dropped, Register scratch) { 2209 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
1993 if (is_uint16(bytes_dropped)) { 2210 if (is_uint16(bytes_dropped)) {
1994 ret(bytes_dropped); 2211 ret(bytes_dropped);
1995 } else { 2212 } else {
1996 pop(scratch); 2213 pop(scratch);
1997 add(Operand(esp), Immediate(bytes_dropped)); 2214 add(esp, Immediate(bytes_dropped));
1998 push(scratch); 2215 push(scratch);
1999 ret(0); 2216 ret(0);
2000 } 2217 }
2001 } 2218 }
2002 2219
2003 2220
2004 2221
2005 2222
2006 void MacroAssembler::Drop(int stack_elements) { 2223 void MacroAssembler::Drop(int stack_elements) {
2007 if (stack_elements > 0) { 2224 if (stack_elements > 0) {
2008 add(Operand(esp), Immediate(stack_elements * kPointerSize)); 2225 add(esp, Immediate(stack_elements * kPointerSize));
2009 } 2226 }
2010 } 2227 }
2011 2228
2012 2229
2013 void MacroAssembler::Move(Register dst, Register src) { 2230 void MacroAssembler::Move(Register dst, Register src) {
2014 if (!dst.is(src)) { 2231 if (!dst.is(src)) {
2015 mov(dst, src); 2232 mov(dst, src);
2016 } 2233 }
2017 } 2234 }
2018 2235
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after
2141 // from the real pointer as a smi. 2358 // from the real pointer as a smi.
2142 intptr_t p1 = reinterpret_cast<intptr_t>(msg); 2359 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
2143 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag; 2360 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
2144 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi()); 2361 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
2145 #ifdef DEBUG 2362 #ifdef DEBUG
2146 if (msg != NULL) { 2363 if (msg != NULL) {
2147 RecordComment("Abort message: "); 2364 RecordComment("Abort message: ");
2148 RecordComment(msg); 2365 RecordComment(msg);
2149 } 2366 }
2150 #endif 2367 #endif
2151 // Disable stub call restrictions to always allow calls to abort.
2152 AllowStubCallsScope allow_scope(this, true);
2153 2368
2154 push(eax); 2369 push(eax);
2155 push(Immediate(p0)); 2370 push(Immediate(p0));
2156 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0)))); 2371 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
2157 CallRuntime(Runtime::kAbort, 2); 2372 // Disable stub call restrictions to always allow calls to abort.
2373 if (!has_frame_) {
2374 // We don't actually want to generate a pile of code for this, so just
2375 // claim there is a stack frame, without generating one.
2376 FrameScope scope(this, StackFrame::NONE);
2377 CallRuntime(Runtime::kAbort, 2);
2378 } else {
2379 CallRuntime(Runtime::kAbort, 2);
2380 }
2158 // will not return here 2381 // will not return here
2159 int3(); 2382 int3();
2160 } 2383 }
2161 2384
2162 2385
2163 void MacroAssembler::LoadInstanceDescriptors(Register map, 2386 void MacroAssembler::LoadInstanceDescriptors(Register map,
2164 Register descriptors) { 2387 Register descriptors) {
2165 mov(descriptors, 2388 mov(descriptors,
2166 FieldOperand(map, Map::kInstanceDescriptorsOrBitField3Offset)); 2389 FieldOperand(map, Map::kInstanceDescriptorsOrBitField3Offset));
2167 Label not_smi; 2390 Label not_smi;
2168 JumpIfNotSmi(descriptors, &not_smi); 2391 JumpIfNotSmi(descriptors, &not_smi);
2169 mov(descriptors, isolate()->factory()->empty_descriptor_array()); 2392 mov(descriptors, isolate()->factory()->empty_descriptor_array());
2170 bind(&not_smi); 2393 bind(&not_smi);
2171 } 2394 }
2172 2395
2173 2396
2174 void MacroAssembler::LoadPowerOf2(XMMRegister dst, 2397 void MacroAssembler::LoadPowerOf2(XMMRegister dst,
2175 Register scratch, 2398 Register scratch,
2176 int power) { 2399 int power) {
2177 ASSERT(is_uintn(power + HeapNumber::kExponentBias, 2400 ASSERT(is_uintn(power + HeapNumber::kExponentBias,
2178 HeapNumber::kExponentBits)); 2401 HeapNumber::kExponentBits));
2179 mov(scratch, Immediate(power + HeapNumber::kExponentBias)); 2402 mov(scratch, Immediate(power + HeapNumber::kExponentBias));
2180 movd(dst, Operand(scratch)); 2403 movd(dst, scratch);
2181 psllq(dst, HeapNumber::kMantissaBits); 2404 psllq(dst, HeapNumber::kMantissaBits);
2182 } 2405 }
2183 2406
2184 2407
2185 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii( 2408 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
2186 Register instance_type, 2409 Register instance_type,
2187 Register scratch, 2410 Register scratch,
2188 Label* failure) { 2411 Label* failure) {
2189 if (!scratch.is(instance_type)) { 2412 if (!scratch.is(instance_type)) {
2190 mov(scratch, instance_type); 2413 mov(scratch, instance_type);
2191 } 2414 }
2192 and_(scratch, 2415 and_(scratch,
2193 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask); 2416 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2194 cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag); 2417 cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag);
2195 j(not_equal, failure); 2418 j(not_equal, failure);
2196 } 2419 }
2197 2420
2198 2421
2199 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1, 2422 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
2200 Register object2, 2423 Register object2,
2201 Register scratch1, 2424 Register scratch1,
2202 Register scratch2, 2425 Register scratch2,
2203 Label* failure) { 2426 Label* failure) {
2204 // Check that both objects are not smis. 2427 // Check that both objects are not smis.
2205 STATIC_ASSERT(kSmiTag == 0); 2428 STATIC_ASSERT(kSmiTag == 0);
2206 mov(scratch1, Operand(object1)); 2429 mov(scratch1, object1);
2207 and_(scratch1, Operand(object2)); 2430 and_(scratch1, object2);
2208 JumpIfSmi(scratch1, failure); 2431 JumpIfSmi(scratch1, failure);
2209 2432
2210 // Load instance type for both strings. 2433 // Load instance type for both strings.
2211 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset)); 2434 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
2212 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset)); 2435 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
2213 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset)); 2436 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2214 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset)); 2437 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2215 2438
2216 // Check that both are flat ascii strings. 2439 // Check that both are flat ascii strings.
2217 const int kFlatAsciiStringMask = 2440 const int kFlatAsciiStringMask =
2218 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask; 2441 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2219 const int kFlatAsciiStringTag = ASCII_STRING_TYPE; 2442 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
2220 // Interleave bits from both instance types and compare them in one check. 2443 // Interleave bits from both instance types and compare them in one check.
2221 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3)); 2444 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2222 and_(scratch1, kFlatAsciiStringMask); 2445 and_(scratch1, kFlatAsciiStringMask);
2223 and_(scratch2, kFlatAsciiStringMask); 2446 and_(scratch2, kFlatAsciiStringMask);
2224 lea(scratch1, Operand(scratch1, scratch2, times_8, 0)); 2447 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2225 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3)); 2448 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
2226 j(not_equal, failure); 2449 j(not_equal, failure);
2227 } 2450 }
2228 2451
2229 2452
2230 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) { 2453 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
2231 int frame_alignment = OS::ActivationFrameAlignment(); 2454 int frame_alignment = OS::ActivationFrameAlignment();
2232 if (frame_alignment != 0) { 2455 if (frame_alignment != 0) {
2233 // Make stack end at alignment and make room for num_arguments words 2456 // Make stack end at alignment and make room for num_arguments words
2234 // and the original value of esp. 2457 // and the original value of esp.
2235 mov(scratch, esp); 2458 mov(scratch, esp);
2236 sub(Operand(esp), Immediate((num_arguments + 1) * kPointerSize)); 2459 sub(esp, Immediate((num_arguments + 1) * kPointerSize));
2237 ASSERT(IsPowerOf2(frame_alignment)); 2460 ASSERT(IsPowerOf2(frame_alignment));
2238 and_(esp, -frame_alignment); 2461 and_(esp, -frame_alignment);
2239 mov(Operand(esp, num_arguments * kPointerSize), scratch); 2462 mov(Operand(esp, num_arguments * kPointerSize), scratch);
2240 } else { 2463 } else {
2241 sub(Operand(esp), Immediate(num_arguments * kPointerSize)); 2464 sub(esp, Immediate(num_arguments * kPointerSize));
2242 } 2465 }
2243 } 2466 }
2244 2467
2245 2468
2246 void MacroAssembler::CallCFunction(ExternalReference function, 2469 void MacroAssembler::CallCFunction(ExternalReference function,
2247 int num_arguments) { 2470 int num_arguments) {
2248 // Trashing eax is ok as it will be the return value. 2471 // Trashing eax is ok as it will be the return value.
2249 mov(Operand(eax), Immediate(function)); 2472 mov(eax, Immediate(function));
2250 CallCFunction(eax, num_arguments); 2473 CallCFunction(eax, num_arguments);
2251 } 2474 }
2252 2475
2253 2476
2254 void MacroAssembler::CallCFunction(Register function, 2477 void MacroAssembler::CallCFunction(Register function,
2255 int num_arguments) { 2478 int num_arguments) {
2479 ASSERT(has_frame());
2256 // Check stack alignment. 2480 // Check stack alignment.
2257 if (emit_debug_code()) { 2481 if (emit_debug_code()) {
2258 CheckStackAlignment(); 2482 CheckStackAlignment();
2259 } 2483 }
2260 2484
2261 call(Operand(function)); 2485 call(function);
2262 if (OS::ActivationFrameAlignment() != 0) { 2486 if (OS::ActivationFrameAlignment() != 0) {
2263 mov(esp, Operand(esp, num_arguments * kPointerSize)); 2487 mov(esp, Operand(esp, num_arguments * kPointerSize));
2264 } else { 2488 } else {
2265 add(Operand(esp), Immediate(num_arguments * kPointerSize)); 2489 add(esp, Immediate(num_arguments * kPointerSize));
2266 } 2490 }
2267 } 2491 }
2268 2492
2269 2493
2494 bool AreAliased(Register r1, Register r2, Register r3, Register r4) {
2495 if (r1.is(r2)) return true;
2496 if (r1.is(r3)) return true;
2497 if (r1.is(r4)) return true;
2498 if (r2.is(r3)) return true;
2499 if (r2.is(r4)) return true;
2500 if (r3.is(r4)) return true;
2501 return false;
2502 }
2503
2504
2270 CodePatcher::CodePatcher(byte* address, int size) 2505 CodePatcher::CodePatcher(byte* address, int size)
2271 : address_(address), 2506 : address_(address),
2272 size_(size), 2507 size_(size),
2273 masm_(Isolate::Current(), address, size + Assembler::kGap) { 2508 masm_(Isolate::Current(), address, size + Assembler::kGap) {
2274 // Create a new macro assembler pointing to the address of the code to patch. 2509 // Create a new macro assembler pointing to the address of the code to patch.
2275 // The size is adjusted with kGap on order for the assembler to generate size 2510 // The size is adjusted with kGap on order for the assembler to generate size
2276 // bytes of instructions without failing with buffer size constraints. 2511 // bytes of instructions without failing with buffer size constraints.
2277 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); 2512 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2278 } 2513 }
2279 2514
2280 2515
2281 CodePatcher::~CodePatcher() { 2516 CodePatcher::~CodePatcher() {
2282 // Indicate that code has changed. 2517 // Indicate that code has changed.
2283 CPU::FlushICache(address_, size_); 2518 CPU::FlushICache(address_, size_);
2284 2519
2285 // Check that the code was patched as expected. 2520 // Check that the code was patched as expected.
2286 ASSERT(masm_.pc_ == address_ + size_); 2521 ASSERT(masm_.pc_ == address_ + size_);
2287 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); 2522 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2288 } 2523 }
2289 2524
2290 2525
2526 void MacroAssembler::CheckPageFlag(
2527 Register object,
2528 Register scratch,
2529 int mask,
2530 Condition cc,
2531 Label* condition_met,
2532 Label::Distance condition_met_distance) {
2533 ASSERT(cc == zero || cc == not_zero);
2534 if (scratch.is(object)) {
2535 and_(scratch, Immediate(~Page::kPageAlignmentMask));
2536 } else {
2537 mov(scratch, Immediate(~Page::kPageAlignmentMask));
2538 and_(scratch, object);
2539 }
2540 if (mask < (1 << kBitsPerByte)) {
2541 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
2542 static_cast<uint8_t>(mask));
2543 } else {
2544 test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
2545 }
2546 j(cc, condition_met, condition_met_distance);
2547 }
2548
2549
2550 void MacroAssembler::JumpIfBlack(Register object,
2551 Register scratch0,
2552 Register scratch1,
2553 Label* on_black,
2554 Label::Distance on_black_near) {
2555 HasColor(object, scratch0, scratch1,
2556 on_black, on_black_near,
2557 1, 0); // kBlackBitPattern.
2558 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
2559 }
2560
2561
2562 void MacroAssembler::HasColor(Register object,
2563 Register bitmap_scratch,
2564 Register mask_scratch,
2565 Label* has_color,
2566 Label::Distance has_color_distance,
2567 int first_bit,
2568 int second_bit) {
2569 ASSERT(!AreAliased(object, bitmap_scratch, mask_scratch, ecx));
2570
2571 GetMarkBits(object, bitmap_scratch, mask_scratch);
2572
2573 Label other_color, word_boundary;
2574 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2575 j(first_bit == 1 ? zero : not_zero, &other_color, Label::kNear);
2576 add(mask_scratch, mask_scratch); // Shift left 1 by adding.
2577 j(zero, &word_boundary, Label::kNear);
2578 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2579 j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
2580 jmp(&other_color, Label::kNear);
2581
2582 bind(&word_boundary);
2583 test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize), 1);
2584
2585 j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
2586 bind(&other_color);
2587 }
2588
2589
2590 void MacroAssembler::GetMarkBits(Register addr_reg,
2591 Register bitmap_reg,
2592 Register mask_reg) {
2593 ASSERT(!AreAliased(addr_reg, mask_reg, bitmap_reg, ecx));
2594 mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
2595 and_(bitmap_reg, addr_reg);
2596 mov(ecx, addr_reg);
2597 int shift =
2598 Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
2599 shr(ecx, shift);
2600 and_(ecx,
2601 (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
2602
2603 add(bitmap_reg, ecx);
2604 mov(ecx, addr_reg);
2605 shr(ecx, kPointerSizeLog2);
2606 and_(ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
2607 mov(mask_reg, Immediate(1));
2608 shl_cl(mask_reg);
2609 }
2610
2611
2612 void MacroAssembler::EnsureNotWhite(
2613 Register value,
2614 Register bitmap_scratch,
2615 Register mask_scratch,
2616 Label* value_is_white_and_not_data,
2617 Label::Distance distance) {
2618 ASSERT(!AreAliased(value, bitmap_scratch, mask_scratch, ecx));
2619 GetMarkBits(value, bitmap_scratch, mask_scratch);
2620
2621 // If the value is black or grey we don't need to do anything.
2622 ASSERT(strcmp(Marking::kWhiteBitPattern, "00") == 0);
2623 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
2624 ASSERT(strcmp(Marking::kGreyBitPattern, "11") == 0);
2625 ASSERT(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
2626
2627 Label done;
2628
2629 // Since both black and grey have a 1 in the first position and white does
2630 // not have a 1 there we only need to check one bit.
2631 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2632 j(not_zero, &done, Label::kNear);
2633
2634 if (FLAG_debug_code) {
2635 // Check for impossible bit pattern.
2636 Label ok;
2637 push(mask_scratch);
2638 // shl. May overflow making the check conservative.
2639 add(mask_scratch, mask_scratch);
2640 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2641 j(zero, &ok, Label::kNear);
2642 int3();
2643 bind(&ok);
2644 pop(mask_scratch);
2645 }
2646
2647 // Value is white. We check whether it is data that doesn't need scanning.
2648 // Currently only checks for HeapNumber and non-cons strings.
2649 Register map = ecx; // Holds map while checking type.
2650 Register length = ecx; // Holds length of object after checking type.
2651 Label not_heap_number;
2652 Label is_data_object;
2653
2654 // Check for heap-number
2655 mov(map, FieldOperand(value, HeapObject::kMapOffset));
2656 cmp(map, FACTORY->heap_number_map());
2657 j(not_equal, &not_heap_number, Label::kNear);
2658 mov(length, Immediate(HeapNumber::kSize));
2659 jmp(&is_data_object, Label::kNear);
2660
2661 bind(&not_heap_number);
2662 // Check for strings.
2663 ASSERT(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
2664 ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
2665 // If it's a string and it's not a cons string then it's an object containing
2666 // no GC pointers.
2667 Register instance_type = ecx;
2668 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
2669 test_b(instance_type, kIsIndirectStringMask | kIsNotStringMask);
2670 j(not_zero, value_is_white_and_not_data);
2671 // It's a non-indirect (non-cons and non-slice) string.
2672 // If it's external, the length is just ExternalString::kSize.
2673 // Otherwise it's String::kHeaderSize + string->length() * (1 or 2).
2674 Label not_external;
2675 // External strings are the only ones with the kExternalStringTag bit
2676 // set.
2677 ASSERT_EQ(0, kSeqStringTag & kExternalStringTag);
2678 ASSERT_EQ(0, kConsStringTag & kExternalStringTag);
2679 test_b(instance_type, kExternalStringTag);
2680 j(zero, &not_external, Label::kNear);
2681 mov(length, Immediate(ExternalString::kSize));
2682 jmp(&is_data_object, Label::kNear);
2683
2684 bind(&not_external);
2685 // Sequential string, either ASCII or UC16.
2686 ASSERT(kAsciiStringTag == 0x04);
2687 and_(length, Immediate(kStringEncodingMask));
2688 xor_(length, Immediate(kStringEncodingMask));
2689 add(length, Immediate(0x04));
2690 // Value now either 4 (if ASCII) or 8 (if UC16), i.e., char-size shifted
2691 // by 2. If we multiply the string length as smi by this, it still
2692 // won't overflow a 32-bit value.
2693 ASSERT_EQ(SeqAsciiString::kMaxSize, SeqTwoByteString::kMaxSize);
2694 ASSERT(SeqAsciiString::kMaxSize <=
2695 static_cast<int>(0xffffffffu >> (2 + kSmiTagSize)));
2696 imul(length, FieldOperand(value, String::kLengthOffset));
2697 shr(length, 2 + kSmiTagSize + kSmiShiftSize);
2698 add(length, Immediate(SeqString::kHeaderSize + kObjectAlignmentMask));
2699 and_(length, Immediate(~kObjectAlignmentMask));
2700
2701 bind(&is_data_object);
2702 // Value is a data object, and it is white. Mark it black. Since we know
2703 // that the object is white we can make it black by flipping one bit.
2704 or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
2705
2706 and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
2707 add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset),
2708 length);
2709 if (FLAG_debug_code) {
2710 mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
2711 cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
2712 Check(less_equal, "Live Bytes Count overflow chunk size");
2713 }
2714
2715 bind(&done);
2716 }
2717
2291 } } // namespace v8::internal 2718 } } // namespace v8::internal
2292 2719
2293 #endif // V8_TARGET_ARCH_IA32 2720 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/macro-assembler-ia32.h ('k') | src/ia32/regexp-macro-assembler-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698