| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 87 | 87 |
| 88 for (int i = Register::kNumRegisters - 1; i >= 0; i--) { | 88 for (int i = Register::kNumRegisters - 1; i >= 0; i--) { |
| 89 if (preserve[i]) pop(Register::from_code(i)); | 89 if (preserve[i]) pop(Register::from_code(i)); |
| 90 } | 90 } |
| 91 } | 91 } |
| 92 | 92 |
| 93 | 93 |
| 94 void MacroAssembler::RememberedSetHelper(Register addr, | 94 void MacroAssembler::RememberedSetHelper(Register addr, |
| 95 Register scratch, | 95 Register scratch, |
| 96 SaveFPRegsMode save_fp) { | 96 SaveFPRegsMode save_fp) { |
| 97 NearLabel done; | 97 Label done; |
| 98 // Load store buffer top. | 98 // Load store buffer top. |
| 99 ExternalReference store_buffer = | 99 ExternalReference store_buffer = |
| 100 ExternalReference::store_buffer_top(isolate()); | 100 ExternalReference::store_buffer_top(isolate()); |
| 101 mov(scratch, Operand::StaticVariable(store_buffer)); | 101 mov(scratch, Operand::StaticVariable(store_buffer)); |
| 102 // Store pointer to buffer. | 102 // Store pointer to buffer. |
| 103 mov(Operand(scratch, 0), addr); | 103 mov(Operand(scratch, 0), addr); |
| 104 // Increment buffer top. | 104 // Increment buffer top. |
| 105 add(Operand(scratch), Immediate(kPointerSize)); | 105 add(Operand(scratch), Immediate(kPointerSize)); |
| 106 // Write back new top of buffer. | 106 // Write back new top of buffer. |
| 107 mov(Operand::StaticVariable(store_buffer), scratch); | 107 mov(Operand::StaticVariable(store_buffer), scratch); |
| 108 // Call stub on end of buffer. | 108 // Call stub on end of buffer. |
| 109 // Check for end of buffer. | 109 // Check for end of buffer. |
| 110 test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit)); | 110 test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit)); |
| 111 j(equal, &done); | 111 j(equal, &done, Label::kNear); |
| 112 StoreBufferOverflowStub store_buffer_overflow = | 112 StoreBufferOverflowStub store_buffer_overflow = |
| 113 StoreBufferOverflowStub(save_fp); | 113 StoreBufferOverflowStub(save_fp); |
| 114 CallStub(&store_buffer_overflow); | 114 CallStub(&store_buffer_overflow); |
| 115 bind(&done); | 115 bind(&done); |
| 116 } | 116 } |
| 117 | 117 |
| 118 | 118 |
| 119 void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg, |
| 120 XMMRegister scratch_reg, |
| 121 Register result_reg) { |
| 122 Label done; |
| 123 ExternalReference zero_ref = ExternalReference::address_of_zero(); |
| 124 movdbl(scratch_reg, Operand::StaticVariable(zero_ref)); |
| 125 Set(result_reg, Immediate(0)); |
| 126 ucomisd(input_reg, scratch_reg); |
| 127 j(below, &done, Label::kNear); |
| 128 ExternalReference half_ref = ExternalReference::address_of_one_half(); |
| 129 movdbl(scratch_reg, Operand::StaticVariable(half_ref)); |
| 130 addsd(scratch_reg, input_reg); |
| 131 cvttsd2si(result_reg, Operand(scratch_reg)); |
| 132 test(result_reg, Immediate(0xFFFFFF00)); |
| 133 j(zero, &done, Label::kNear); |
| 134 Set(result_reg, Immediate(255)); |
| 135 bind(&done); |
| 136 } |
| 137 |
| 138 |
| 139 void MacroAssembler::ClampUint8(Register reg) { |
| 140 Label done; |
| 141 test(reg, Immediate(0xFFFFFF00)); |
| 142 j(zero, &done, Label::kNear); |
| 143 setcc(negative, reg); // 1 if negative, 0 if positive. |
| 144 dec_b(reg); // 0 if negative, 255 if positive. |
| 145 bind(&done); |
| 146 } |
| 147 |
| 148 |
| 149 void MacroAssembler::InNewSpace(Register object, |
| 150 Register scratch, |
| 151 Condition cc, |
| 152 Label* branch, |
| 153 Label::Distance branch_near) { |
| 154 ASSERT(cc == equal || cc == not_equal); |
| 155 if (Serializer::enabled()) { |
| 156 // Can't do arithmetic on external references if it might get serialized. |
| 157 mov(scratch, Operand(object)); |
| 158 // The mask isn't really an address. We load it as an external reference in |
| 159 // case the size of the new space is different between the snapshot maker |
| 160 // and the running system. |
| 161 and_(Operand(scratch), |
| 162 Immediate(ExternalReference::new_space_mask(isolate()))); |
| 163 cmp(Operand(scratch), |
| 164 Immediate(ExternalReference::new_space_start(isolate()))); |
| 165 j(cc, branch, branch_near); |
| 166 } else { |
| 167 int32_t new_space_start = reinterpret_cast<int32_t>( |
| 168 ExternalReference::new_space_start(isolate()).address()); |
| 169 lea(scratch, Operand(object, -new_space_start)); |
| 170 and_(scratch, isolate()->heap()->NewSpaceMask()); |
| 171 j(cc, branch, branch_near); |
| 172 } |
| 173 } |
| 174 |
| 175 |
| 119 void MacroAssembler::RecordWriteArray(Register object, | 176 void MacroAssembler::RecordWriteArray(Register object, |
| 120 Register value, | 177 Register value, |
| 121 Register index, | 178 Register index, |
| 122 SaveFPRegsMode save_fp, | 179 SaveFPRegsMode save_fp, |
| 123 EmitRememberedSet emit_remembered_set, | 180 EmitRememberedSet emit_remembered_set, |
| 124 SmiCheck smi_check) { | 181 SmiCheck smi_check) { |
| 125 // First, check if a write barrier is even needed. The tests below | 182 // First, check if a write barrier is even needed. The tests below |
| 126 // catch stores of Smis. | 183 // catch stores of Smis. |
| 127 NearLabel done; | 184 Label done; |
| 128 | 185 |
| 129 // Skip barrier if writing a smi. | 186 // Skip barrier if writing a smi. |
| 130 if (smi_check == INLINE_SMI_CHECK) { | 187 if (smi_check == INLINE_SMI_CHECK) { |
| 131 ASSERT_EQ(0, kSmiTag); | 188 ASSERT_EQ(0, kSmiTag); |
| 132 test(value, Immediate(kSmiTagMask)); | 189 test(value, Immediate(kSmiTagMask)); |
| 133 j(zero, &done); | 190 j(zero, &done); |
| 134 } | 191 } |
| 135 | 192 |
| 136 // Array access: calculate the destination address in the same manner as | 193 // Array access: calculate the destination address in the same manner as |
| 137 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset | 194 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset |
| 138 // into an array of words. | 195 // into an array of words. |
| 139 ASSERT_EQ(1, kSmiTagSize); | 196 ASSERT_EQ(1, kSmiTagSize); |
| 140 ASSERT_EQ(0, kSmiTag); | 197 ASSERT_EQ(0, kSmiTag); |
| 198 test(value, Immediate(kSmiTagMask)); |
| 199 j(zero, &done, Label::kNear); |
| 141 Register dst = index; | 200 Register dst = index; |
| 142 lea(dst, Operand(object, index, times_half_pointer_size, | 201 lea(dst, Operand(object, index, times_half_pointer_size, |
| 143 FixedArray::kHeaderSize - kHeapObjectTag)); | 202 FixedArray::kHeaderSize - kHeapObjectTag)); |
| 144 | 203 |
| 145 RecordWrite(object, dst, value, save_fp, emit_remembered_set, OMIT_SMI_CHECK); | 204 RecordWrite(object, dst, value, save_fp, emit_remembered_set, OMIT_SMI_CHECK); |
| 146 | 205 |
| 147 bind(&done); | 206 bind(&done); |
| 148 | 207 |
| 149 // Clobber clobbered input registers when running with the debug-code flag | 208 // Clobber clobbered input registers when running with the debug-code flag |
| 150 // turned on to provoke errors. | 209 // turned on to provoke errors. |
| 151 if (emit_debug_code()) { | 210 if (emit_debug_code()) { |
| 152 mov(value, Immediate(BitCast<int32_t>(kZapValue))); | 211 mov(value, Immediate(BitCast<int32_t>(kZapValue))); |
| 153 mov(index, Immediate(BitCast<int32_t>(kZapValue))); | 212 mov(index, Immediate(BitCast<int32_t>(kZapValue))); |
| 154 } | 213 } |
| 155 } | 214 } |
| 156 | 215 |
| 157 | 216 |
| 158 void MacroAssembler::RecordWriteField( | 217 void MacroAssembler::RecordWriteField( |
| 159 Register object, | 218 Register object, |
| 160 int offset, | 219 int offset, |
| 161 Register value, | 220 Register value, |
| 162 Register dst, | 221 Register dst, |
| 163 SaveFPRegsMode save_fp, | 222 SaveFPRegsMode save_fp, |
| 164 EmitRememberedSet emit_remembered_set, | 223 EmitRememberedSet emit_remembered_set, |
| 165 SmiCheck smi_check) { | 224 SmiCheck smi_check) { |
| 166 // First, check if a write barrier is even needed. The tests below | 225 // First, check if a write barrier is even needed. The tests below |
| 167 // catch stores of Smis. | 226 // catch stores of Smis. |
| 168 NearLabel done; | 227 Label done; |
| 169 | 228 |
| 170 // Skip barrier if writing a smi. | 229 // Skip barrier if writing a smi. |
| 171 if (smi_check == INLINE_SMI_CHECK) { | 230 if (smi_check == INLINE_SMI_CHECK) { |
| 172 ASSERT_EQ(0, kSmiTag); | 231 ASSERT_EQ(0, kSmiTag); |
| 173 test(value, Immediate(kSmiTagMask)); | 232 test(value, Immediate(kSmiTagMask)); |
| 174 j(zero, &done); | 233 j(zero, &done, Label::kNear); |
| 175 } | 234 } |
| 176 | 235 |
| 177 // Although the object register is tagged, the offset is relative to the start | 236 // Although the object register is tagged, the offset is relative to the start |
| 178 // of the object, so so offset must be a multiple of kPointerSize. | 237 // of the object, so so offset must be a multiple of kPointerSize. |
| 179 ASSERT(IsAligned(offset, kPointerSize)); | 238 ASSERT(IsAligned(offset, kPointerSize)); |
| 180 | 239 |
| 181 lea(dst, FieldOperand(object, offset)); | 240 lea(dst, FieldOperand(object, offset)); |
| 182 if (emit_debug_code()) { | 241 if (emit_debug_code()) { |
| 183 NearLabel ok; | 242 Label ok; |
| 184 test_b(Operand(dst), (1 << kPointerSizeLog2) - 1); | 243 test_b(Operand(dst), (1 << kPointerSizeLog2) - 1); |
| 185 j(zero, &ok); | 244 j(zero, &ok, Label::kNear); |
| 186 int3(); | 245 int3(); |
| 187 bind(&ok); | 246 bind(&ok); |
| 188 } | 247 } |
| 189 | 248 |
| 190 RecordWrite(object, dst, value, save_fp, emit_remembered_set, OMIT_SMI_CHECK); | 249 RecordWrite(object, dst, value, save_fp, emit_remembered_set, OMIT_SMI_CHECK); |
| 191 | 250 |
| 192 bind(&done); | 251 bind(&done); |
| 193 | 252 |
| 194 // Clobber clobbered input registers when running with the debug-code flag | 253 // Clobber clobbered input registers when running with the debug-code flag |
| 195 // turned on to provoke errors. | 254 // turned on to provoke errors. |
| (...skipping 16 matching lines...) Expand all Loading... |
| 212 if (emit_debug_code()) { | 271 if (emit_debug_code()) { |
| 213 AbortIfSmi(object); | 272 AbortIfSmi(object); |
| 214 } | 273 } |
| 215 | 274 |
| 216 if (emit_remembered_set == OMIT_REMEMBERED_SET && | 275 if (emit_remembered_set == OMIT_REMEMBERED_SET && |
| 217 FLAG_incremental_marking == false) { | 276 FLAG_incremental_marking == false) { |
| 218 return; | 277 return; |
| 219 } | 278 } |
| 220 | 279 |
| 221 if (FLAG_debug_code) { | 280 if (FLAG_debug_code) { |
| 222 NearLabel ok; | 281 Label ok; |
| 223 cmp(value, Operand(address, 0)); | 282 cmp(value, Operand(address, 0)); |
| 224 j(equal, &ok); | 283 j(equal, &ok, Label::kNear); |
| 225 Abort("Registers did not match in write barrier"); | 284 int3(); |
| 226 bind(&ok); | 285 bind(&ok); |
| 227 } | 286 } |
| 228 | 287 |
| 229 // First, check if a write barrier is even needed. The tests below | 288 // First, check if a write barrier is even needed. The tests below |
| 230 // catch stores of Smis and stores into young gen. | 289 // catch stores of Smis and stores into young gen. |
| 231 NearLabel done; | 290 Label done; |
| 232 | 291 |
| 233 if (smi_check == INLINE_SMI_CHECK) { | 292 if (smi_check == INLINE_SMI_CHECK) { |
| 234 // Skip barrier if writing a smi. | 293 // Skip barrier if writing a smi. |
| 235 ASSERT_EQ(0, kSmiTag); | 294 ASSERT_EQ(0, kSmiTag); |
| 236 test(value, Immediate(kSmiTagMask)); | 295 test(value, Immediate(kSmiTagMask)); |
| 237 j(zero, &done); | 296 j(zero, &done, Label::kNear); |
| 238 } | 297 } |
| 239 | 298 |
| 240 CheckPageFlag(value, | 299 CheckPageFlag(value, |
| 241 value, // Used as scratch. | 300 value, // Used as scratch. |
| 242 MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING, | 301 MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING, |
| 243 zero, | 302 zero, |
| 244 &done); | 303 &done, |
| 304 Label::kNear); |
| 245 CheckPageFlag(object, | 305 CheckPageFlag(object, |
| 246 value, // Used as scratch. | 306 value, // Used as scratch. |
| 247 MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING, | 307 MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING, |
| 248 zero, | 308 zero, |
| 249 &done); | 309 &done, |
| 310 Label::kNear); |
| 250 | 311 |
| 251 RecordWriteStub stub(object, value, address, emit_remembered_set, fp_mode); | 312 RecordWriteStub stub(object, value, address, emit_remembered_set, fp_mode); |
| 252 CallStub(&stub); | 313 CallStub(&stub); |
| 253 | 314 |
| 254 bind(&done); | 315 bind(&done); |
| 255 | 316 |
| 256 // Clobber clobbered registers when running with the debug-code flag | 317 // Clobber clobbered registers when running with the debug-code flag |
| 257 // turned on to provoke errors. | 318 // turned on to provoke errors. |
| 258 if (emit_debug_code()) { | 319 if (emit_debug_code()) { |
| 259 mov(address, Immediate(BitCast<int32_t>(kZapValue))); | 320 mov(address, Immediate(BitCast<int32_t>(kZapValue))); |
| 260 mov(value, Immediate(BitCast<int32_t>(kZapValue))); | 321 mov(value, Immediate(BitCast<int32_t>(kZapValue))); |
| 261 } | 322 } |
| 262 } | 323 } |
| 263 | 324 |
| 264 | 325 |
| 265 #ifdef ENABLE_DEBUGGER_SUPPORT | 326 #ifdef ENABLE_DEBUGGER_SUPPORT |
| 266 void MacroAssembler::DebugBreak() { | 327 void MacroAssembler::DebugBreak() { |
| 267 Set(eax, Immediate(0)); | 328 Set(eax, Immediate(0)); |
| 268 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate()))); | 329 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate()))); |
| 269 CEntryStub ces(1); | 330 CEntryStub ces(1); |
| 270 call(ces.GetCode(), RelocInfo::DEBUG_BREAK); | 331 call(ces.GetCode(), RelocInfo::DEBUG_BREAK); |
| 271 } | 332 } |
| 272 #endif | 333 #endif |
| 273 | 334 |
| 274 | 335 |
| 275 void MacroAssembler::Set(Register dst, const Immediate& x) { | 336 void MacroAssembler::Set(Register dst, const Immediate& x) { |
| 276 if (x.is_zero()) { | 337 if (x.is_zero()) { |
| 277 xor_(dst, Operand(dst)); // shorter than mov | 338 xor_(dst, Operand(dst)); // Shorter than mov. |
| 278 } else { | 339 } else { |
| 279 mov(dst, x); | 340 mov(dst, x); |
| 280 } | 341 } |
| 281 } | 342 } |
| 282 | 343 |
| 283 | 344 |
| 284 void MacroAssembler::Set(const Operand& dst, const Immediate& x) { | 345 void MacroAssembler::Set(const Operand& dst, const Immediate& x) { |
| 285 mov(dst, x); | 346 mov(dst, x); |
| 286 } | 347 } |
| 287 | 348 |
| 288 | 349 |
| 350 bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) { |
| 351 static const int kMaxImmediateBits = 17; |
| 352 if (x.rmode_ != RelocInfo::NONE) return false; |
| 353 return !is_intn(x.x_, kMaxImmediateBits); |
| 354 } |
| 355 |
| 356 |
| 357 void MacroAssembler::SafeSet(Register dst, const Immediate& x) { |
| 358 if (IsUnsafeImmediate(x) && jit_cookie() != 0) { |
| 359 Set(dst, Immediate(x.x_ ^ jit_cookie())); |
| 360 xor_(dst, jit_cookie()); |
| 361 } else { |
| 362 Set(dst, x); |
| 363 } |
| 364 } |
| 365 |
| 366 |
| 367 void MacroAssembler::SafePush(const Immediate& x) { |
| 368 if (IsUnsafeImmediate(x) && jit_cookie() != 0) { |
| 369 push(Immediate(x.x_ ^ jit_cookie())); |
| 370 xor_(Operand(esp, 0), Immediate(jit_cookie())); |
| 371 } else { |
| 372 push(x); |
| 373 } |
| 374 } |
| 375 |
| 376 |
| 289 void MacroAssembler::CmpObjectType(Register heap_object, | 377 void MacroAssembler::CmpObjectType(Register heap_object, |
| 290 InstanceType type, | 378 InstanceType type, |
| 291 Register map) { | 379 Register map) { |
| 292 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset)); | 380 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset)); |
| 293 CmpInstanceType(map, type); | 381 CmpInstanceType(map, type); |
| 294 } | 382 } |
| 295 | 383 |
| 296 | 384 |
| 297 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) { | 385 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) { |
| 298 cmpb(FieldOperand(map, Map::kInstanceTypeOffset), | 386 cmpb(FieldOperand(map, Map::kInstanceTypeOffset), |
| 299 static_cast<int8_t>(type)); | 387 static_cast<int8_t>(type)); |
| 300 } | 388 } |
| 301 | 389 |
| 302 | 390 |
| 303 void MacroAssembler::CheckMap(Register obj, | 391 void MacroAssembler::CheckMap(Register obj, |
| 304 Handle<Map> map, | 392 Handle<Map> map, |
| 305 Label* fail, | 393 Label* fail, |
| 306 bool is_heap_object) { | 394 SmiCheckType smi_check_type) { |
| 307 if (!is_heap_object) { | 395 if (smi_check_type == DONT_DO_SMI_CHECK) { |
| 308 test(obj, Immediate(kSmiTagMask)); | 396 JumpIfSmi(obj, fail); |
| 309 j(zero, fail); | |
| 310 } | 397 } |
| 311 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map)); | 398 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map)); |
| 312 j(not_equal, fail); | 399 j(not_equal, fail); |
| 313 } | 400 } |
| 314 | 401 |
| 315 | 402 |
| 403 void MacroAssembler::DispatchMap(Register obj, |
| 404 Handle<Map> map, |
| 405 Handle<Code> success, |
| 406 SmiCheckType smi_check_type) { |
| 407 Label fail; |
| 408 if (smi_check_type == DONT_DO_SMI_CHECK) { |
| 409 JumpIfSmi(obj, &fail); |
| 410 } |
| 411 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map)); |
| 412 j(equal, success); |
| 413 |
| 414 bind(&fail); |
| 415 } |
| 416 |
| 417 |
| 316 Condition MacroAssembler::IsObjectStringType(Register heap_object, | 418 Condition MacroAssembler::IsObjectStringType(Register heap_object, |
| 317 Register map, | 419 Register map, |
| 318 Register instance_type) { | 420 Register instance_type) { |
| 319 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset)); | 421 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset)); |
| 320 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset)); | 422 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset)); |
| 321 ASSERT(kNotStringTag != 0); | 423 ASSERT(kNotStringTag != 0); |
| 322 test(instance_type, Immediate(kIsNotStringMask)); | 424 test(instance_type, Immediate(kIsNotStringMask)); |
| 323 return zero; | 425 return zero; |
| 324 } | 426 } |
| 325 | 427 |
| (...skipping 261 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 587 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); | 689 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); |
| 588 pop(Operand::StaticVariable(handler_address)); | 690 pop(Operand::StaticVariable(handler_address)); |
| 589 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize); | 691 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize); |
| 590 pop(ebp); | 692 pop(ebp); |
| 591 pop(edx); // Remove state. | 693 pop(edx); // Remove state. |
| 592 | 694 |
| 593 // Before returning we restore the context from the frame pointer if | 695 // Before returning we restore the context from the frame pointer if |
| 594 // not NULL. The frame pointer is NULL in the exception handler of | 696 // not NULL. The frame pointer is NULL in the exception handler of |
| 595 // a JS entry frame. | 697 // a JS entry frame. |
| 596 Set(esi, Immediate(0)); // Tentatively set context pointer to NULL. | 698 Set(esi, Immediate(0)); // Tentatively set context pointer to NULL. |
| 597 NearLabel skip; | 699 Label skip; |
| 598 cmp(ebp, 0); | 700 cmp(ebp, 0); |
| 599 j(equal, &skip, not_taken); | 701 j(equal, &skip, Label::kNear); |
| 600 mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 702 mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 601 bind(&skip); | 703 bind(&skip); |
| 602 | 704 |
| 603 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize); | 705 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize); |
| 604 ret(0); | 706 ret(0); |
| 605 } | 707 } |
| 606 | 708 |
| 607 | 709 |
| 608 void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type, | 710 void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type, |
| 609 Register value) { | 711 Register value) { |
| 610 // Adjust this code if not the case. | 712 // Adjust this code if not the case. |
| 611 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize); | 713 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize); |
| 612 | 714 |
| 613 // eax must hold the exception. | 715 // eax must hold the exception. |
| 614 if (!value.is(eax)) { | 716 if (!value.is(eax)) { |
| 615 mov(eax, value); | 717 mov(eax, value); |
| 616 } | 718 } |
| 617 | 719 |
| 618 // Drop sp to the top stack handler. | 720 // Drop sp to the top stack handler. |
| 619 ExternalReference handler_address(Isolate::k_handler_address, | 721 ExternalReference handler_address(Isolate::k_handler_address, |
| 620 isolate()); | 722 isolate()); |
| 621 mov(esp, Operand::StaticVariable(handler_address)); | 723 mov(esp, Operand::StaticVariable(handler_address)); |
| 622 | 724 |
| 623 // Unwind the handlers until the ENTRY handler is found. | 725 // Unwind the handlers until the ENTRY handler is found. |
| 624 NearLabel loop, done; | 726 Label loop, done; |
| 625 bind(&loop); | 727 bind(&loop); |
| 626 // Load the type of the current stack handler. | 728 // Load the type of the current stack handler. |
| 627 const int kStateOffset = StackHandlerConstants::kStateOffset; | 729 const int kStateOffset = StackHandlerConstants::kStateOffset; |
| 628 cmp(Operand(esp, kStateOffset), Immediate(StackHandler::ENTRY)); | 730 cmp(Operand(esp, kStateOffset), Immediate(StackHandler::ENTRY)); |
| 629 j(equal, &done); | 731 j(equal, &done, Label::kNear); |
| 630 // Fetch the next handler in the list. | 732 // Fetch the next handler in the list. |
| 631 const int kNextOffset = StackHandlerConstants::kNextOffset; | 733 const int kNextOffset = StackHandlerConstants::kNextOffset; |
| 632 mov(esp, Operand(esp, kNextOffset)); | 734 mov(esp, Operand(esp, kNextOffset)); |
| 633 jmp(&loop); | 735 jmp(&loop); |
| 634 bind(&done); | 736 bind(&done); |
| 635 | 737 |
| 636 // Set the top handler address to next handler past the current ENTRY handler. | 738 // Set the top handler address to next handler past the current ENTRY handler. |
| 637 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); | 739 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); |
| 638 pop(Operand::StaticVariable(handler_address)); | 740 pop(Operand::StaticVariable(handler_address)); |
| 639 | 741 |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 690 push(scratch); | 792 push(scratch); |
| 691 // Read the first word and compare to global_context_map. | 793 // Read the first word and compare to global_context_map. |
| 692 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); | 794 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); |
| 693 cmp(scratch, isolate()->factory()->global_context_map()); | 795 cmp(scratch, isolate()->factory()->global_context_map()); |
| 694 Check(equal, "JSGlobalObject::global_context should be a global context."); | 796 Check(equal, "JSGlobalObject::global_context should be a global context."); |
| 695 pop(scratch); | 797 pop(scratch); |
| 696 } | 798 } |
| 697 | 799 |
| 698 // Check if both contexts are the same. | 800 // Check if both contexts are the same. |
| 699 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset)); | 801 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset)); |
| 700 j(equal, &same_contexts, taken); | 802 j(equal, &same_contexts); |
| 701 | 803 |
| 702 // Compare security tokens, save holder_reg on the stack so we can use it | 804 // Compare security tokens, save holder_reg on the stack so we can use it |
| 703 // as a temporary register. | 805 // as a temporary register. |
| 704 // | 806 // |
| 705 // TODO(119): avoid push(holder_reg)/pop(holder_reg) | 807 // TODO(119): avoid push(holder_reg)/pop(holder_reg) |
| 706 push(holder_reg); | 808 push(holder_reg); |
| 707 // Check that the security token in the calling global object is | 809 // Check that the security token in the calling global object is |
| 708 // compatible with the security token in the receiving global | 810 // compatible with the security token in the receiving global |
| 709 // object. | 811 // object. |
| 710 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset)); | 812 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset)); |
| 711 | 813 |
| 712 // Check the context is a global context. | 814 // Check the context is a global context. |
| 713 if (emit_debug_code()) { | 815 if (emit_debug_code()) { |
| 714 cmp(holder_reg, isolate()->factory()->null_value()); | 816 cmp(holder_reg, isolate()->factory()->null_value()); |
| 715 Check(not_equal, "JSGlobalProxy::context() should not be null."); | 817 Check(not_equal, "JSGlobalProxy::context() should not be null."); |
| 716 | 818 |
| 717 push(holder_reg); | 819 push(holder_reg); |
| 718 // Read the first word and compare to global_context_map(), | 820 // Read the first word and compare to global_context_map(), |
| 719 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset)); | 821 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset)); |
| 720 cmp(holder_reg, isolate()->factory()->global_context_map()); | 822 cmp(holder_reg, isolate()->factory()->global_context_map()); |
| 721 Check(equal, "JSGlobalObject::global_context should be a global context."); | 823 Check(equal, "JSGlobalObject::global_context should be a global context."); |
| 722 pop(holder_reg); | 824 pop(holder_reg); |
| 723 } | 825 } |
| 724 | 826 |
| 725 int token_offset = Context::kHeaderSize + | 827 int token_offset = Context::kHeaderSize + |
| 726 Context::SECURITY_TOKEN_INDEX * kPointerSize; | 828 Context::SECURITY_TOKEN_INDEX * kPointerSize; |
| 727 mov(scratch, FieldOperand(scratch, token_offset)); | 829 mov(scratch, FieldOperand(scratch, token_offset)); |
| 728 cmp(scratch, FieldOperand(holder_reg, token_offset)); | 830 cmp(scratch, FieldOperand(holder_reg, token_offset)); |
| 729 pop(holder_reg); | 831 pop(holder_reg); |
| 730 j(not_equal, miss, not_taken); | 832 j(not_equal, miss); |
| 731 | 833 |
| 732 bind(&same_contexts); | 834 bind(&same_contexts); |
| 733 } | 835 } |
| 734 | 836 |
| 735 | 837 |
| 736 void MacroAssembler::LoadAllocationTopHelper(Register result, | 838 void MacroAssembler::LoadAllocationTopHelper(Register result, |
| 737 Register scratch, | 839 Register scratch, |
| 738 AllocationFlags flags) { | 840 AllocationFlags flags) { |
| 739 ExternalReference new_space_allocation_top = | 841 ExternalReference new_space_allocation_top = |
| 740 ExternalReference::new_space_allocation_top_address(isolate()); | 842 ExternalReference::new_space_allocation_top_address(isolate()); |
| (...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 808 Register top_reg = result_end.is_valid() ? result_end : result; | 910 Register top_reg = result_end.is_valid() ? result_end : result; |
| 809 | 911 |
| 810 // Calculate new top and bail out if new space is exhausted. | 912 // Calculate new top and bail out if new space is exhausted. |
| 811 ExternalReference new_space_allocation_limit = | 913 ExternalReference new_space_allocation_limit = |
| 812 ExternalReference::new_space_allocation_limit_address(isolate()); | 914 ExternalReference::new_space_allocation_limit_address(isolate()); |
| 813 | 915 |
| 814 if (!top_reg.is(result)) { | 916 if (!top_reg.is(result)) { |
| 815 mov(top_reg, result); | 917 mov(top_reg, result); |
| 816 } | 918 } |
| 817 add(Operand(top_reg), Immediate(object_size)); | 919 add(Operand(top_reg), Immediate(object_size)); |
| 818 j(carry, gc_required, not_taken); | 920 j(carry, gc_required); |
| 819 cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit)); | 921 cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit)); |
| 820 j(above, gc_required, not_taken); | 922 j(above, gc_required); |
| 821 | 923 |
| 822 // Update allocation top. | 924 // Update allocation top. |
| 823 UpdateAllocationTopHelper(top_reg, scratch); | 925 UpdateAllocationTopHelper(top_reg, scratch); |
| 824 | 926 |
| 825 // Tag result if requested. | 927 // Tag result if requested. |
| 826 if (top_reg.is(result)) { | 928 if (top_reg.is(result)) { |
| 827 if ((flags & TAG_OBJECT) != 0) { | 929 if ((flags & TAG_OBJECT) != 0) { |
| 828 sub(Operand(result), Immediate(object_size - kHeapObjectTag)); | 930 sub(Operand(result), Immediate(object_size - kHeapObjectTag)); |
| 829 } else { | 931 } else { |
| 830 sub(Operand(result), Immediate(object_size)); | 932 sub(Operand(result), Immediate(object_size)); |
| (...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 907 // Load address of new object into result. | 1009 // Load address of new object into result. |
| 908 LoadAllocationTopHelper(result, scratch, flags); | 1010 LoadAllocationTopHelper(result, scratch, flags); |
| 909 | 1011 |
| 910 // Calculate new top and bail out if new space is exhausted. | 1012 // Calculate new top and bail out if new space is exhausted. |
| 911 ExternalReference new_space_allocation_limit = | 1013 ExternalReference new_space_allocation_limit = |
| 912 ExternalReference::new_space_allocation_limit_address(isolate()); | 1014 ExternalReference::new_space_allocation_limit_address(isolate()); |
| 913 if (!object_size.is(result_end)) { | 1015 if (!object_size.is(result_end)) { |
| 914 mov(result_end, object_size); | 1016 mov(result_end, object_size); |
| 915 } | 1017 } |
| 916 add(result_end, Operand(result)); | 1018 add(result_end, Operand(result)); |
| 917 j(carry, gc_required, not_taken); | 1019 j(carry, gc_required); |
| 918 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit)); | 1020 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit)); |
| 919 j(above, gc_required, not_taken); | 1021 j(above, gc_required); |
| 920 | 1022 |
| 921 // Tag result if requested. | 1023 // Tag result if requested. |
| 922 if ((flags & TAG_OBJECT) != 0) { | 1024 if ((flags & TAG_OBJECT) != 0) { |
| 923 lea(result, Operand(result, kHeapObjectTag)); | 1025 lea(result, Operand(result, kHeapObjectTag)); |
| 924 } | 1026 } |
| 925 | 1027 |
| 926 // Update allocation top. | 1028 // Update allocation top. |
| 927 UpdateAllocationTopHelper(result_end, scratch); | 1029 UpdateAllocationTopHelper(result_end, scratch); |
| 928 } | 1030 } |
| 929 | 1031 |
| (...skipping 208 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1138 | 1240 |
| 1139 bind(&done); | 1241 bind(&done); |
| 1140 } | 1242 } |
| 1141 | 1243 |
| 1142 | 1244 |
| 1143 void MacroAssembler::NegativeZeroTest(Register result, | 1245 void MacroAssembler::NegativeZeroTest(Register result, |
| 1144 Register op, | 1246 Register op, |
| 1145 Label* then_label) { | 1247 Label* then_label) { |
| 1146 Label ok; | 1248 Label ok; |
| 1147 test(result, Operand(result)); | 1249 test(result, Operand(result)); |
| 1148 j(not_zero, &ok, taken); | 1250 j(not_zero, &ok); |
| 1149 test(op, Operand(op)); | 1251 test(op, Operand(op)); |
| 1150 j(sign, then_label, not_taken); | 1252 j(sign, then_label); |
| 1151 bind(&ok); | 1253 bind(&ok); |
| 1152 } | 1254 } |
| 1153 | 1255 |
| 1154 | 1256 |
| 1155 void MacroAssembler::NegativeZeroTest(Register result, | 1257 void MacroAssembler::NegativeZeroTest(Register result, |
| 1156 Register op1, | 1258 Register op1, |
| 1157 Register op2, | 1259 Register op2, |
| 1158 Register scratch, | 1260 Register scratch, |
| 1159 Label* then_label) { | 1261 Label* then_label) { |
| 1160 Label ok; | 1262 Label ok; |
| 1161 test(result, Operand(result)); | 1263 test(result, Operand(result)); |
| 1162 j(not_zero, &ok, taken); | 1264 j(not_zero, &ok); |
| 1163 mov(scratch, Operand(op1)); | 1265 mov(scratch, Operand(op1)); |
| 1164 or_(scratch, Operand(op2)); | 1266 or_(scratch, Operand(op2)); |
| 1165 j(sign, then_label, not_taken); | 1267 j(sign, then_label); |
| 1166 bind(&ok); | 1268 bind(&ok); |
| 1167 } | 1269 } |
| 1168 | 1270 |
| 1169 | 1271 |
| 1170 void MacroAssembler::TryGetFunctionPrototype(Register function, | 1272 void MacroAssembler::TryGetFunctionPrototype(Register function, |
| 1171 Register result, | 1273 Register result, |
| 1172 Register scratch, | 1274 Register scratch, |
| 1173 Label* miss) { | 1275 Label* miss) { |
| 1174 // Check that the receiver isn't a smi. | 1276 // Check that the receiver isn't a smi. |
| 1175 test(function, Immediate(kSmiTagMask)); | 1277 test(function, Immediate(kSmiTagMask)); |
| 1176 j(zero, miss, not_taken); | 1278 j(zero, miss); |
| 1177 | 1279 |
| 1178 // Check that the function really is a function. | 1280 // Check that the function really is a function. |
| 1179 CmpObjectType(function, JS_FUNCTION_TYPE, result); | 1281 CmpObjectType(function, JS_FUNCTION_TYPE, result); |
| 1180 j(not_equal, miss, not_taken); | 1282 j(not_equal, miss); |
| 1181 | 1283 |
| 1182 // Make sure that the function has an instance prototype. | 1284 // Make sure that the function has an instance prototype. |
| 1183 Label non_instance; | 1285 Label non_instance; |
| 1184 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset)); | 1286 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset)); |
| 1185 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype)); | 1287 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype)); |
| 1186 j(not_zero, &non_instance, not_taken); | 1288 j(not_zero, &non_instance); |
| 1187 | 1289 |
| 1188 // Get the prototype or initial map from the function. | 1290 // Get the prototype or initial map from the function. |
| 1189 mov(result, | 1291 mov(result, |
| 1190 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); | 1292 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); |
| 1191 | 1293 |
| 1192 // If the prototype or initial map is the hole, don't return it and | 1294 // If the prototype or initial map is the hole, don't return it and |
| 1193 // simply miss the cache instead. This will allow us to allocate a | 1295 // simply miss the cache instead. This will allow us to allocate a |
| 1194 // prototype object on-demand in the runtime system. | 1296 // prototype object on-demand in the runtime system. |
| 1195 cmp(Operand(result), Immediate(isolate()->factory()->the_hole_value())); | 1297 cmp(Operand(result), Immediate(isolate()->factory()->the_hole_value())); |
| 1196 j(equal, miss, not_taken); | 1298 j(equal, miss); |
| 1197 | 1299 |
| 1198 // If the function does not have an initial map, we're done. | 1300 // If the function does not have an initial map, we're done. |
| 1199 Label done; | 1301 Label done; |
| 1200 CmpObjectType(result, MAP_TYPE, scratch); | 1302 CmpObjectType(result, MAP_TYPE, scratch); |
| 1201 j(not_equal, &done); | 1303 j(not_equal, &done); |
| 1202 | 1304 |
| 1203 // Get the prototype from the initial map. | 1305 // Get the prototype from the initial map. |
| 1204 mov(result, FieldOperand(result, Map::kPrototypeOffset)); | 1306 mov(result, FieldOperand(result, Map::kPrototypeOffset)); |
| 1205 jmp(&done); | 1307 jmp(&done); |
| 1206 | 1308 |
| 1207 // Non-instance prototype: Fetch prototype from constructor field | 1309 // Non-instance prototype: Fetch prototype from constructor field |
| 1208 // in initial map. | 1310 // in initial map. |
| 1209 bind(&non_instance); | 1311 bind(&non_instance); |
| 1210 mov(result, FieldOperand(result, Map::kConstructorOffset)); | 1312 mov(result, FieldOperand(result, Map::kConstructorOffset)); |
| 1211 | 1313 |
| 1212 // All done. | 1314 // All done. |
| 1213 bind(&done); | 1315 bind(&done); |
| 1214 } | 1316 } |
| 1215 | 1317 |
| 1216 | 1318 |
| 1217 void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) { | 1319 void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) { |
| 1218 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs. | 1320 // ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs. |
| 1321 // TODO(gc): Fix this! |
| 1219 call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id); | 1322 call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id); |
| 1220 } | 1323 } |
| 1221 | 1324 |
| 1222 | 1325 |
| 1223 MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) { | 1326 MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) { |
| 1224 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs. | 1327 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs. |
| 1225 Object* result; | 1328 Object* result; |
| 1226 { MaybeObject* maybe_result = stub->TryGetCode(); | 1329 { MaybeObject* maybe_result = stub->TryGetCode(); |
| 1227 if (!maybe_result->ToObject(&result)) return maybe_result; | 1330 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 1228 } | 1331 } |
| (...skipping 237 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1466 } | 1569 } |
| 1467 | 1570 |
| 1468 Label empty_handle; | 1571 Label empty_handle; |
| 1469 Label prologue; | 1572 Label prologue; |
| 1470 Label promote_scheduled_exception; | 1573 Label promote_scheduled_exception; |
| 1471 Label delete_allocated_handles; | 1574 Label delete_allocated_handles; |
| 1472 Label leave_exit_frame; | 1575 Label leave_exit_frame; |
| 1473 | 1576 |
| 1474 // Check if the result handle holds 0. | 1577 // Check if the result handle holds 0. |
| 1475 test(eax, Operand(eax)); | 1578 test(eax, Operand(eax)); |
| 1476 j(zero, &empty_handle, not_taken); | 1579 j(zero, &empty_handle); |
| 1477 // It was non-zero. Dereference to get the result value. | 1580 // It was non-zero. Dereference to get the result value. |
| 1478 mov(eax, Operand(eax, 0)); | 1581 mov(eax, Operand(eax, 0)); |
| 1479 bind(&prologue); | 1582 bind(&prologue); |
| 1480 // No more valid handles (the result handle was the last one). Restore | 1583 // No more valid handles (the result handle was the last one). Restore |
| 1481 // previous handle scope. | 1584 // previous handle scope. |
| 1482 mov(Operand::StaticVariable(next_address), ebx); | 1585 mov(Operand::StaticVariable(next_address), ebx); |
| 1483 sub(Operand::StaticVariable(level_address), Immediate(1)); | 1586 sub(Operand::StaticVariable(level_address), Immediate(1)); |
| 1484 Assert(above_equal, "Invalid HandleScope level"); | 1587 Assert(above_equal, "Invalid HandleScope level"); |
| 1485 cmp(edi, Operand::StaticVariable(limit_address)); | 1588 cmp(edi, Operand::StaticVariable(limit_address)); |
| 1486 j(not_equal, &delete_allocated_handles, not_taken); | 1589 j(not_equal, &delete_allocated_handles); |
| 1487 bind(&leave_exit_frame); | 1590 bind(&leave_exit_frame); |
| 1488 | 1591 |
| 1489 // Check if the function scheduled an exception. | 1592 // Check if the function scheduled an exception. |
| 1490 ExternalReference scheduled_exception_address = | 1593 ExternalReference scheduled_exception_address = |
| 1491 ExternalReference::scheduled_exception_address(isolate()); | 1594 ExternalReference::scheduled_exception_address(isolate()); |
| 1492 cmp(Operand::StaticVariable(scheduled_exception_address), | 1595 cmp(Operand::StaticVariable(scheduled_exception_address), |
| 1493 Immediate(isolate()->factory()->the_hole_value())); | 1596 Immediate(isolate()->factory()->the_hole_value())); |
| 1494 j(not_equal, &promote_scheduled_exception, not_taken); | 1597 j(not_equal, &promote_scheduled_exception); |
| 1495 LeaveApiExitFrame(); | 1598 LeaveApiExitFrame(); |
| 1496 ret(stack_space * kPointerSize); | 1599 ret(stack_space * kPointerSize); |
| 1497 bind(&promote_scheduled_exception); | 1600 bind(&promote_scheduled_exception); |
| 1498 MaybeObject* result = | 1601 MaybeObject* result = |
| 1499 TryTailCallRuntime(Runtime::kPromoteScheduledException, 0, 1); | 1602 TryTailCallRuntime(Runtime::kPromoteScheduledException, 0, 1); |
| 1500 if (result->IsFailure()) { | 1603 if (result->IsFailure()) { |
| 1501 return result; | 1604 return result; |
| 1502 } | 1605 } |
| 1503 bind(&empty_handle); | 1606 bind(&empty_handle); |
| 1504 // It was zero; the result is undefined. | 1607 // It was zero; the result is undefined. |
| (...skipping 30 matching lines...) Expand all Loading... |
| 1535 mov(ebx, Immediate(ext)); | 1638 mov(ebx, Immediate(ext)); |
| 1536 CEntryStub ces(1); | 1639 CEntryStub ces(1); |
| 1537 return TryTailCallStub(&ces); | 1640 return TryTailCallStub(&ces); |
| 1538 } | 1641 } |
| 1539 | 1642 |
| 1540 | 1643 |
| 1541 void MacroAssembler::InvokePrologue(const ParameterCount& expected, | 1644 void MacroAssembler::InvokePrologue(const ParameterCount& expected, |
| 1542 const ParameterCount& actual, | 1645 const ParameterCount& actual, |
| 1543 Handle<Code> code_constant, | 1646 Handle<Code> code_constant, |
| 1544 const Operand& code_operand, | 1647 const Operand& code_operand, |
| 1545 NearLabel* done, | 1648 Label* done, |
| 1546 InvokeFlag flag, | 1649 InvokeFlag flag, |
| 1650 Label::Distance done_near, |
| 1547 const CallWrapper& call_wrapper) { | 1651 const CallWrapper& call_wrapper) { |
| 1548 bool definitely_matches = false; | 1652 bool definitely_matches = false; |
| 1549 Label invoke; | 1653 Label invoke; |
| 1550 if (expected.is_immediate()) { | 1654 if (expected.is_immediate()) { |
| 1551 ASSERT(actual.is_immediate()); | 1655 ASSERT(actual.is_immediate()); |
| 1552 if (expected.immediate() == actual.immediate()) { | 1656 if (expected.immediate() == actual.immediate()) { |
| 1553 definitely_matches = true; | 1657 definitely_matches = true; |
| 1554 } else { | 1658 } else { |
| 1555 mov(eax, actual.immediate()); | 1659 mov(eax, actual.immediate()); |
| 1556 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel; | 1660 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel; |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1590 mov(edx, Immediate(code_constant)); | 1694 mov(edx, Immediate(code_constant)); |
| 1591 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag)); | 1695 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag)); |
| 1592 } else if (!code_operand.is_reg(edx)) { | 1696 } else if (!code_operand.is_reg(edx)) { |
| 1593 mov(edx, code_operand); | 1697 mov(edx, code_operand); |
| 1594 } | 1698 } |
| 1595 | 1699 |
| 1596 if (flag == CALL_FUNCTION) { | 1700 if (flag == CALL_FUNCTION) { |
| 1597 call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET)); | 1701 call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET)); |
| 1598 call(adaptor, RelocInfo::CODE_TARGET); | 1702 call(adaptor, RelocInfo::CODE_TARGET); |
| 1599 call_wrapper.AfterCall(); | 1703 call_wrapper.AfterCall(); |
| 1600 jmp(done); | 1704 jmp(done, done_near); |
| 1601 } else { | 1705 } else { |
| 1602 jmp(adaptor, RelocInfo::CODE_TARGET); | 1706 jmp(adaptor, RelocInfo::CODE_TARGET); |
| 1603 } | 1707 } |
| 1604 bind(&invoke); | 1708 bind(&invoke); |
| 1605 } | 1709 } |
| 1606 } | 1710 } |
| 1607 | 1711 |
| 1608 | 1712 |
| 1609 void MacroAssembler::InvokeCode(const Operand& code, | 1713 void MacroAssembler::InvokeCode(const Operand& code, |
| 1610 const ParameterCount& expected, | 1714 const ParameterCount& expected, |
| 1611 const ParameterCount& actual, | 1715 const ParameterCount& actual, |
| 1612 InvokeFlag flag, | 1716 InvokeFlag flag, |
| 1613 const CallWrapper& call_wrapper) { | 1717 const CallWrapper& call_wrapper) { |
| 1614 NearLabel done; | 1718 Label done; |
| 1615 InvokePrologue(expected, actual, Handle<Code>::null(), code, | 1719 InvokePrologue(expected, actual, Handle<Code>::null(), code, |
| 1616 &done, flag, call_wrapper); | 1720 &done, flag, Label::kNear, call_wrapper); |
| 1617 if (flag == CALL_FUNCTION) { | 1721 if (flag == CALL_FUNCTION) { |
| 1618 call_wrapper.BeforeCall(CallSize(code)); | 1722 call_wrapper.BeforeCall(CallSize(code)); |
| 1619 call(code); | 1723 call(code); |
| 1620 call_wrapper.AfterCall(); | 1724 call_wrapper.AfterCall(); |
| 1621 } else { | 1725 } else { |
| 1622 ASSERT(flag == JUMP_FUNCTION); | 1726 ASSERT(flag == JUMP_FUNCTION); |
| 1623 jmp(code); | 1727 jmp(code); |
| 1624 } | 1728 } |
| 1625 bind(&done); | 1729 bind(&done); |
| 1626 } | 1730 } |
| 1627 | 1731 |
| 1628 | 1732 |
| 1629 void MacroAssembler::InvokeCode(Handle<Code> code, | 1733 void MacroAssembler::InvokeCode(Handle<Code> code, |
| 1630 const ParameterCount& expected, | 1734 const ParameterCount& expected, |
| 1631 const ParameterCount& actual, | 1735 const ParameterCount& actual, |
| 1632 RelocInfo::Mode rmode, | 1736 RelocInfo::Mode rmode, |
| 1633 InvokeFlag flag, | 1737 InvokeFlag flag, |
| 1634 const CallWrapper& call_wrapper) { | 1738 const CallWrapper& call_wrapper) { |
| 1635 NearLabel done; | 1739 Label done; |
| 1636 Operand dummy(eax); | 1740 Operand dummy(eax); |
| 1637 InvokePrologue(expected, actual, code, dummy, &done, flag, call_wrapper); | 1741 InvokePrologue(expected, actual, code, dummy, &done, flag, Label::kNear, |
| 1742 call_wrapper); |
| 1638 if (flag == CALL_FUNCTION) { | 1743 if (flag == CALL_FUNCTION) { |
| 1639 call_wrapper.BeforeCall(CallSize(code, rmode)); | 1744 call_wrapper.BeforeCall(CallSize(code, rmode)); |
| 1640 call(code, rmode); | 1745 call(code, rmode); |
| 1641 call_wrapper.AfterCall(); | 1746 call_wrapper.AfterCall(); |
| 1642 } else { | 1747 } else { |
| 1643 ASSERT(flag == JUMP_FUNCTION); | 1748 ASSERT(flag == JUMP_FUNCTION); |
| 1644 jmp(code, rmode); | 1749 jmp(code, rmode); |
| 1645 } | 1750 } |
| 1646 bind(&done); | 1751 bind(&done); |
| 1647 } | 1752 } |
| (...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1758 mov(function, Operand(function, Context::SlotOffset(index))); | 1863 mov(function, Operand(function, Context::SlotOffset(index))); |
| 1759 } | 1864 } |
| 1760 | 1865 |
| 1761 | 1866 |
| 1762 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, | 1867 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, |
| 1763 Register map) { | 1868 Register map) { |
| 1764 // Load the initial map. The global functions all have initial maps. | 1869 // Load the initial map. The global functions all have initial maps. |
| 1765 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); | 1870 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); |
| 1766 if (emit_debug_code()) { | 1871 if (emit_debug_code()) { |
| 1767 Label ok, fail; | 1872 Label ok, fail; |
| 1768 CheckMap(map, isolate()->factory()->meta_map(), &fail, false); | 1873 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK); |
| 1769 jmp(&ok); | 1874 jmp(&ok); |
| 1770 bind(&fail); | 1875 bind(&fail); |
| 1771 Abort("Global functions must have initial map"); | 1876 Abort("Global functions must have initial map"); |
| 1772 bind(&ok); | 1877 bind(&ok); |
| 1773 } | 1878 } |
| 1774 } | 1879 } |
| 1775 | 1880 |
| 1776 | 1881 |
| 1777 // Store the value in register src in the safepoint register stack | 1882 // Store the value in register src in the safepoint register stack |
| 1778 // slot for register dst. | 1883 // slot for register dst. |
| (...skipping 143 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1922 Immediate(factory->fixed_cow_array_map())); | 2027 Immediate(factory->fixed_cow_array_map())); |
| 1923 j(equal, &ok); | 2028 j(equal, &ok); |
| 1924 Abort("JSObject with fast elements map has slow elements"); | 2029 Abort("JSObject with fast elements map has slow elements"); |
| 1925 bind(&ok); | 2030 bind(&ok); |
| 1926 } | 2031 } |
| 1927 } | 2032 } |
| 1928 | 2033 |
| 1929 | 2034 |
| 1930 void MacroAssembler::Check(Condition cc, const char* msg) { | 2035 void MacroAssembler::Check(Condition cc, const char* msg) { |
| 1931 Label L; | 2036 Label L; |
| 1932 j(cc, &L, taken); | 2037 j(cc, &L); |
| 1933 Abort(msg); | 2038 Abort(msg); |
| 1934 // will not return here | 2039 // will not return here |
| 1935 bind(&L); | 2040 bind(&L); |
| 1936 } | 2041 } |
| 1937 | 2042 |
| 1938 | 2043 |
| 1939 void MacroAssembler::CheckStackAlignment() { | 2044 void MacroAssembler::CheckStackAlignment() { |
| 1940 int frame_alignment = OS::ActivationFrameAlignment(); | 2045 int frame_alignment = OS::ActivationFrameAlignment(); |
| 1941 int frame_alignment_mask = frame_alignment - 1; | 2046 int frame_alignment_mask = frame_alignment - 1; |
| 1942 if (frame_alignment > kPointerSize) { | 2047 if (frame_alignment > kPointerSize) { |
| (...skipping 28 matching lines...) Expand all Loading... |
| 1971 | 2076 |
| 1972 push(eax); | 2077 push(eax); |
| 1973 push(Immediate(p0)); | 2078 push(Immediate(p0)); |
| 1974 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0)))); | 2079 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0)))); |
| 1975 CallRuntime(Runtime::kAbort, 2); | 2080 CallRuntime(Runtime::kAbort, 2); |
| 1976 // will not return here | 2081 // will not return here |
| 1977 int3(); | 2082 int3(); |
| 1978 } | 2083 } |
| 1979 | 2084 |
| 1980 | 2085 |
| 1981 void MacroAssembler::JumpIfNotNumber(Register reg, | |
| 1982 TypeInfo info, | |
| 1983 Label* on_not_number) { | |
| 1984 if (emit_debug_code()) AbortIfSmi(reg); | |
| 1985 if (!info.IsNumber()) { | |
| 1986 cmp(FieldOperand(reg, HeapObject::kMapOffset), | |
| 1987 isolate()->factory()->heap_number_map()); | |
| 1988 j(not_equal, on_not_number); | |
| 1989 } | |
| 1990 } | |
| 1991 | |
| 1992 | |
| 1993 void MacroAssembler::ConvertToInt32(Register dst, | |
| 1994 Register source, | |
| 1995 Register scratch, | |
| 1996 TypeInfo info, | |
| 1997 Label* on_not_int32) { | |
| 1998 if (emit_debug_code()) { | |
| 1999 AbortIfSmi(source); | |
| 2000 AbortIfNotNumber(source); | |
| 2001 } | |
| 2002 if (info.IsInteger32()) { | |
| 2003 cvttsd2si(dst, FieldOperand(source, HeapNumber::kValueOffset)); | |
| 2004 } else { | |
| 2005 Label done; | |
| 2006 bool push_pop = (scratch.is(no_reg) && dst.is(source)); | |
| 2007 ASSERT(!scratch.is(source)); | |
| 2008 if (push_pop) { | |
| 2009 push(dst); | |
| 2010 scratch = dst; | |
| 2011 } | |
| 2012 if (scratch.is(no_reg)) scratch = dst; | |
| 2013 cvttsd2si(scratch, FieldOperand(source, HeapNumber::kValueOffset)); | |
| 2014 cmp(scratch, 0x80000000u); | |
| 2015 if (push_pop) { | |
| 2016 j(not_equal, &done); | |
| 2017 pop(dst); | |
| 2018 jmp(on_not_int32); | |
| 2019 } else { | |
| 2020 j(equal, on_not_int32); | |
| 2021 } | |
| 2022 | |
| 2023 bind(&done); | |
| 2024 if (push_pop) { | |
| 2025 add(Operand(esp), Immediate(kPointerSize)); // Pop. | |
| 2026 } | |
| 2027 if (!scratch.is(dst)) { | |
| 2028 mov(dst, scratch); | |
| 2029 } | |
| 2030 } | |
| 2031 } | |
| 2032 | |
| 2033 | |
| 2034 void MacroAssembler::LoadPowerOf2(XMMRegister dst, | 2086 void MacroAssembler::LoadPowerOf2(XMMRegister dst, |
| 2035 Register scratch, | 2087 Register scratch, |
| 2036 int power) { | 2088 int power) { |
| 2037 ASSERT(is_uintn(power + HeapNumber::kExponentBias, | 2089 ASSERT(is_uintn(power + HeapNumber::kExponentBias, |
| 2038 HeapNumber::kExponentBits)); | 2090 HeapNumber::kExponentBits)); |
| 2039 mov(scratch, Immediate(power + HeapNumber::kExponentBias)); | 2091 mov(scratch, Immediate(power + HeapNumber::kExponentBias)); |
| 2040 movd(dst, Operand(scratch)); | 2092 movd(dst, Operand(scratch)); |
| 2041 psllq(dst, HeapNumber::kMantissaBits); | 2093 psllq(dst, HeapNumber::kMantissaBits); |
| 2042 } | 2094 } |
| 2043 | 2095 |
| (...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2156 | 2208 |
| 2157 // Check that the code was patched as expected. | 2209 // Check that the code was patched as expected. |
| 2158 ASSERT(masm_.pc_ == address_ + size_); | 2210 ASSERT(masm_.pc_ == address_ + size_); |
| 2159 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 2211 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
| 2160 } | 2212 } |
| 2161 | 2213 |
| 2162 | 2214 |
| 2163 } } // namespace v8::internal | 2215 } } // namespace v8::internal |
| 2164 | 2216 |
| 2165 #endif // V8_TARGET_ARCH_IA32 | 2217 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |