OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 27 matching lines...) Expand all Loading... |
38 namespace v8 { | 38 namespace v8 { |
39 namespace internal { | 39 namespace internal { |
40 | 40 |
41 // ------------------------------------------------------------------------- | 41 // ------------------------------------------------------------------------- |
42 // MacroAssembler implementation. | 42 // MacroAssembler implementation. |
43 | 43 |
44 MacroAssembler::MacroAssembler(void* buffer, int size) | 44 MacroAssembler::MacroAssembler(void* buffer, int size) |
45 : Assembler(buffer, size), | 45 : Assembler(buffer, size), |
46 generating_stub_(false), | 46 generating_stub_(false), |
47 allow_stub_calls_(true), | 47 allow_stub_calls_(true), |
48 code_object_(Heap::undefined_value()) { | 48 code_object_(HEAP->undefined_value()) { |
49 } | 49 } |
50 | 50 |
51 | 51 |
52 void MacroAssembler::RecordWriteHelper(Register object, | 52 void MacroAssembler::RecordWriteHelper(Register object, |
53 Register addr, | 53 Register addr, |
54 Register scratch) { | 54 Register scratch) { |
55 if (emit_debug_code()) { | 55 if (emit_debug_code()) { |
56 // Check that the object is not in new space. | 56 // Check that the object is not in new space. |
57 Label not_in_new_space; | 57 Label not_in_new_space; |
58 InNewSpace(object, scratch, not_equal, ¬_in_new_space); | 58 InNewSpace(object, scratch, not_equal, ¬_in_new_space); |
(...skipping 165 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
224 Register scratch, | 224 Register scratch, |
225 Label* fail) { | 225 Label* fail) { |
226 movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset)); | 226 movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset)); |
227 sub(Operand(scratch), Immediate(FIRST_JS_OBJECT_TYPE)); | 227 sub(Operand(scratch), Immediate(FIRST_JS_OBJECT_TYPE)); |
228 cmp(scratch, LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE); | 228 cmp(scratch, LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE); |
229 j(above, fail); | 229 j(above, fail); |
230 } | 230 } |
231 | 231 |
232 | 232 |
233 void MacroAssembler::FCmp() { | 233 void MacroAssembler::FCmp() { |
234 if (CpuFeatures::IsSupported(CMOV)) { | 234 if (Isolate::Current()->cpu_features()->IsSupported(CMOV)) { |
235 fucomip(); | 235 fucomip(); |
236 ffree(0); | 236 ffree(0); |
237 fincstp(); | 237 fincstp(); |
238 } else { | 238 } else { |
239 fucompp(); | 239 fucompp(); |
240 push(eax); | 240 push(eax); |
241 fnstsw_ax(); | 241 fnstsw_ax(); |
242 sahf(); | 242 sahf(); |
243 pop(eax); | 243 pop(eax); |
244 } | 244 } |
245 } | 245 } |
246 | 246 |
247 | 247 |
248 void MacroAssembler::AbortIfNotNumber(Register object) { | 248 void MacroAssembler::AbortIfNotNumber(Register object) { |
249 Label ok; | 249 Label ok; |
250 test(object, Immediate(kSmiTagMask)); | 250 test(object, Immediate(kSmiTagMask)); |
251 j(zero, &ok); | 251 j(zero, &ok); |
252 cmp(FieldOperand(object, HeapObject::kMapOffset), | 252 cmp(FieldOperand(object, HeapObject::kMapOffset), |
253 Factory::heap_number_map()); | 253 FACTORY->heap_number_map()); |
254 Assert(equal, "Operand not a number"); | 254 Assert(equal, "Operand not a number"); |
255 bind(&ok); | 255 bind(&ok); |
256 } | 256 } |
257 | 257 |
258 | 258 |
259 void MacroAssembler::AbortIfNotSmi(Register object) { | 259 void MacroAssembler::AbortIfNotSmi(Register object) { |
260 test(object, Immediate(kSmiTagMask)); | 260 test(object, Immediate(kSmiTagMask)); |
261 Assert(equal, "Operand is not a smi"); | 261 Assert(equal, "Operand is not a smi"); |
262 } | 262 } |
263 | 263 |
(...skipping 15 matching lines...) Expand all Loading... |
279 } | 279 } |
280 | 280 |
281 | 281 |
282 void MacroAssembler::EnterFrame(StackFrame::Type type) { | 282 void MacroAssembler::EnterFrame(StackFrame::Type type) { |
283 push(ebp); | 283 push(ebp); |
284 mov(ebp, Operand(esp)); | 284 mov(ebp, Operand(esp)); |
285 push(esi); | 285 push(esi); |
286 push(Immediate(Smi::FromInt(type))); | 286 push(Immediate(Smi::FromInt(type))); |
287 push(Immediate(CodeObject())); | 287 push(Immediate(CodeObject())); |
288 if (emit_debug_code()) { | 288 if (emit_debug_code()) { |
289 cmp(Operand(esp, 0), Immediate(Factory::undefined_value())); | 289 cmp(Operand(esp, 0), Immediate(FACTORY->undefined_value())); |
290 Check(not_equal, "code object not properly patched"); | 290 Check(not_equal, "code object not properly patched"); |
291 } | 291 } |
292 } | 292 } |
293 | 293 |
294 | 294 |
295 void MacroAssembler::LeaveFrame(StackFrame::Type type) { | 295 void MacroAssembler::LeaveFrame(StackFrame::Type type) { |
296 if (emit_debug_code()) { | 296 if (emit_debug_code()) { |
297 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset), | 297 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset), |
298 Immediate(Smi::FromInt(type))); | 298 Immediate(Smi::FromInt(type))); |
299 Check(equal, "stack frame types must match"); | 299 Check(equal, "stack frame types must match"); |
300 } | 300 } |
301 leave(); | 301 leave(); |
302 } | 302 } |
303 | 303 |
304 | 304 |
305 void MacroAssembler::EnterExitFramePrologue() { | 305 void MacroAssembler::EnterExitFramePrologue() { |
306 // Setup the frame structure on the stack. | 306 // Setup the frame structure on the stack. |
307 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize); | 307 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize); |
308 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize); | 308 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize); |
309 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize); | 309 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize); |
310 push(ebp); | 310 push(ebp); |
311 mov(ebp, Operand(esp)); | 311 mov(ebp, Operand(esp)); |
312 | 312 |
313 // Reserve room for entry stack pointer and push the code object. | 313 // Reserve room for entry stack pointer and push the code object. |
314 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize); | 314 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize); |
315 push(Immediate(0)); // Saved entry sp, patched before call. | 315 push(Immediate(0)); // Saved entry sp, patched before call. |
316 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot. | 316 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot. |
317 | 317 |
318 // Save the frame pointer and the context in top. | 318 // Save the frame pointer and the context in top. |
319 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address); | 319 ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address); |
320 ExternalReference context_address(Top::k_context_address); | 320 ExternalReference context_address(Isolate::k_context_address); |
321 mov(Operand::StaticVariable(c_entry_fp_address), ebp); | 321 mov(Operand::StaticVariable(c_entry_fp_address), ebp); |
322 mov(Operand::StaticVariable(context_address), esi); | 322 mov(Operand::StaticVariable(context_address), esi); |
323 } | 323 } |
324 | 324 |
325 | 325 |
326 void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) { | 326 void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) { |
327 // Optionally save all XMM registers. | 327 // Optionally save all XMM registers. |
328 if (save_doubles) { | 328 if (save_doubles) { |
329 CpuFeatures::Scope scope(SSE2); | 329 CpuFeatures::Scope scope(SSE2); |
330 int space = XMMRegister::kNumRegisters * kDoubleSize + argc * kPointerSize; | 330 int space = XMMRegister::kNumRegisters * kDoubleSize + argc * kPointerSize; |
331 sub(Operand(esp), Immediate(space)); | 331 sub(Operand(esp), Immediate(space)); |
332 const int offset = -2 * kPointerSize; | 332 const int offset = -2 * kPointerSize; |
333 for (int i = 0; i < XMMRegister::kNumRegisters; i++) { | 333 for (int i = 0; i < XMMRegister::kNumRegisters; i++) { |
334 XMMRegister reg = XMMRegister::from_code(i); | 334 XMMRegister reg = XMMRegister::from_code(i); |
335 movdbl(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg); | 335 movdbl(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg); |
336 } | 336 } |
337 } else { | 337 } else { |
338 sub(Operand(esp), Immediate(argc * kPointerSize)); | 338 sub(Operand(esp), Immediate(argc * kPointerSize)); |
339 } | 339 } |
340 | 340 |
341 // Get the required frame alignment for the OS. | 341 // Get the required frame alignment for the OS. |
342 static const int kFrameAlignment = OS::ActivationFrameAlignment(); | 342 const int kFrameAlignment = OS::ActivationFrameAlignment(); |
343 if (kFrameAlignment > 0) { | 343 if (kFrameAlignment > 0) { |
344 ASSERT(IsPowerOf2(kFrameAlignment)); | 344 ASSERT(IsPowerOf2(kFrameAlignment)); |
345 and_(esp, -kFrameAlignment); | 345 and_(esp, -kFrameAlignment); |
346 } | 346 } |
347 | 347 |
348 // Patch the saved entry sp. | 348 // Patch the saved entry sp. |
349 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp); | 349 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp); |
350 } | 350 } |
351 | 351 |
352 | 352 |
353 void MacroAssembler::EnterExitFrame(bool save_doubles) { | 353 void MacroAssembler::EnterExitFrame(bool save_doubles) { |
354 EnterExitFramePrologue(); | 354 EnterExitFramePrologue(); |
355 | 355 |
356 // Setup argc and argv in callee-saved registers. | 356 // Setup argc and argv in callee-saved registers. |
357 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize; | 357 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize; |
358 mov(edi, Operand(eax)); | 358 mov(edi, Operand(eax)); |
359 lea(esi, Operand(ebp, eax, times_4, offset)); | 359 lea(esi, Operand(ebp, eax, times_4, offset)); |
360 | 360 |
361 EnterExitFrameEpilogue(2, save_doubles); | 361 // Reserve space for argc, argv and isolate. |
| 362 EnterExitFrameEpilogue(3, save_doubles); |
362 } | 363 } |
363 | 364 |
364 | 365 |
365 void MacroAssembler::EnterApiExitFrame(int argc) { | 366 void MacroAssembler::EnterApiExitFrame(int argc) { |
366 EnterExitFramePrologue(); | 367 EnterExitFramePrologue(); |
367 EnterExitFrameEpilogue(argc, false); | 368 EnterExitFrameEpilogue(argc, false); |
368 } | 369 } |
369 | 370 |
370 | 371 |
371 void MacroAssembler::LeaveExitFrame(bool save_doubles) { | 372 void MacroAssembler::LeaveExitFrame(bool save_doubles) { |
(...skipping 15 matching lines...) Expand all Loading... |
387 lea(esp, Operand(esi, 1 * kPointerSize)); | 388 lea(esp, Operand(esi, 1 * kPointerSize)); |
388 | 389 |
389 // Push the return address to get ready to return. | 390 // Push the return address to get ready to return. |
390 push(ecx); | 391 push(ecx); |
391 | 392 |
392 LeaveExitFrameEpilogue(); | 393 LeaveExitFrameEpilogue(); |
393 } | 394 } |
394 | 395 |
395 void MacroAssembler::LeaveExitFrameEpilogue() { | 396 void MacroAssembler::LeaveExitFrameEpilogue() { |
396 // Restore current context from top and clear it in debug mode. | 397 // Restore current context from top and clear it in debug mode. |
397 ExternalReference context_address(Top::k_context_address); | 398 ExternalReference context_address(Isolate::k_context_address); |
398 mov(esi, Operand::StaticVariable(context_address)); | 399 mov(esi, Operand::StaticVariable(context_address)); |
399 #ifdef DEBUG | 400 #ifdef DEBUG |
400 mov(Operand::StaticVariable(context_address), Immediate(0)); | 401 mov(Operand::StaticVariable(context_address), Immediate(0)); |
401 #endif | 402 #endif |
402 | 403 |
403 // Clear the top frame. | 404 // Clear the top frame. |
404 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address); | 405 ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address); |
405 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0)); | 406 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0)); |
406 } | 407 } |
407 | 408 |
408 | 409 |
409 void MacroAssembler::LeaveApiExitFrame() { | 410 void MacroAssembler::LeaveApiExitFrame() { |
410 mov(esp, Operand(ebp)); | 411 mov(esp, Operand(ebp)); |
411 pop(ebp); | 412 pop(ebp); |
412 | 413 |
413 LeaveExitFrameEpilogue(); | 414 LeaveExitFrameEpilogue(); |
414 } | 415 } |
(...skipping 13 matching lines...) Expand all Loading... |
428 push(ebp); | 429 push(ebp); |
429 } else { | 430 } else { |
430 ASSERT(try_location == IN_JS_ENTRY); | 431 ASSERT(try_location == IN_JS_ENTRY); |
431 // The frame pointer does not point to a JS frame so we save NULL | 432 // The frame pointer does not point to a JS frame so we save NULL |
432 // for ebp. We expect the code throwing an exception to check ebp | 433 // for ebp. We expect the code throwing an exception to check ebp |
433 // before dereferencing it to restore the context. | 434 // before dereferencing it to restore the context. |
434 push(Immediate(StackHandler::ENTRY)); | 435 push(Immediate(StackHandler::ENTRY)); |
435 push(Immediate(0)); // NULL frame pointer. | 436 push(Immediate(0)); // NULL frame pointer. |
436 } | 437 } |
437 // Save the current handler as the next handler. | 438 // Save the current handler as the next handler. |
438 push(Operand::StaticVariable(ExternalReference(Top::k_handler_address))); | 439 push(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address))); |
439 // Link this handler as the new current one. | 440 // Link this handler as the new current one. |
440 mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp); | 441 mov(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address)), |
| 442 esp); |
441 } | 443 } |
442 | 444 |
443 | 445 |
444 void MacroAssembler::PopTryHandler() { | 446 void MacroAssembler::PopTryHandler() { |
445 ASSERT_EQ(0, StackHandlerConstants::kNextOffset); | 447 ASSERT_EQ(0, StackHandlerConstants::kNextOffset); |
446 pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address))); | 448 pop(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address))); |
447 add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize)); | 449 add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize)); |
448 } | 450 } |
449 | 451 |
450 | 452 |
451 void MacroAssembler::Throw(Register value) { | 453 void MacroAssembler::Throw(Register value) { |
452 // Adjust this code if not the case. | 454 // Adjust this code if not the case. |
453 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize); | 455 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize); |
454 | 456 |
455 // eax must hold the exception. | 457 // eax must hold the exception. |
456 if (!value.is(eax)) { | 458 if (!value.is(eax)) { |
457 mov(eax, value); | 459 mov(eax, value); |
458 } | 460 } |
459 | 461 |
460 // Drop the sp to the top of the handler. | 462 // Drop the sp to the top of the handler. |
461 ExternalReference handler_address(Top::k_handler_address); | 463 ExternalReference handler_address(Isolate::k_handler_address); |
462 mov(esp, Operand::StaticVariable(handler_address)); | 464 mov(esp, Operand::StaticVariable(handler_address)); |
463 | 465 |
464 // Restore next handler and frame pointer, discard handler state. | 466 // Restore next handler and frame pointer, discard handler state. |
465 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); | 467 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); |
466 pop(Operand::StaticVariable(handler_address)); | 468 pop(Operand::StaticVariable(handler_address)); |
467 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize); | 469 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize); |
468 pop(ebp); | 470 pop(ebp); |
469 pop(edx); // Remove state. | 471 pop(edx); // Remove state. |
470 | 472 |
471 // Before returning we restore the context from the frame pointer if | 473 // Before returning we restore the context from the frame pointer if |
(...skipping 15 matching lines...) Expand all Loading... |
487 Register value) { | 489 Register value) { |
488 // Adjust this code if not the case. | 490 // Adjust this code if not the case. |
489 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize); | 491 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize); |
490 | 492 |
491 // eax must hold the exception. | 493 // eax must hold the exception. |
492 if (!value.is(eax)) { | 494 if (!value.is(eax)) { |
493 mov(eax, value); | 495 mov(eax, value); |
494 } | 496 } |
495 | 497 |
496 // Drop sp to the top stack handler. | 498 // Drop sp to the top stack handler. |
497 ExternalReference handler_address(Top::k_handler_address); | 499 ExternalReference handler_address(Isolate::k_handler_address); |
498 mov(esp, Operand::StaticVariable(handler_address)); | 500 mov(esp, Operand::StaticVariable(handler_address)); |
499 | 501 |
500 // Unwind the handlers until the ENTRY handler is found. | 502 // Unwind the handlers until the ENTRY handler is found. |
501 NearLabel loop, done; | 503 NearLabel loop, done; |
502 bind(&loop); | 504 bind(&loop); |
503 // Load the type of the current stack handler. | 505 // Load the type of the current stack handler. |
504 const int kStateOffset = StackHandlerConstants::kStateOffset; | 506 const int kStateOffset = StackHandlerConstants::kStateOffset; |
505 cmp(Operand(esp, kStateOffset), Immediate(StackHandler::ENTRY)); | 507 cmp(Operand(esp, kStateOffset), Immediate(StackHandler::ENTRY)); |
506 j(equal, &done); | 508 j(equal, &done); |
507 // Fetch the next handler in the list. | 509 // Fetch the next handler in the list. |
508 const int kNextOffset = StackHandlerConstants::kNextOffset; | 510 const int kNextOffset = StackHandlerConstants::kNextOffset; |
509 mov(esp, Operand(esp, kNextOffset)); | 511 mov(esp, Operand(esp, kNextOffset)); |
510 jmp(&loop); | 512 jmp(&loop); |
511 bind(&done); | 513 bind(&done); |
512 | 514 |
513 // Set the top handler address to next handler past the current ENTRY handler. | 515 // Set the top handler address to next handler past the current ENTRY handler. |
514 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); | 516 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); |
515 pop(Operand::StaticVariable(handler_address)); | 517 pop(Operand::StaticVariable(handler_address)); |
516 | 518 |
517 if (type == OUT_OF_MEMORY) { | 519 if (type == OUT_OF_MEMORY) { |
518 // Set external caught exception to false. | 520 // Set external caught exception to false. |
519 ExternalReference external_caught(Top::k_external_caught_exception_address); | 521 ExternalReference external_caught( |
| 522 Isolate::k_external_caught_exception_address); |
520 mov(eax, false); | 523 mov(eax, false); |
521 mov(Operand::StaticVariable(external_caught), eax); | 524 mov(Operand::StaticVariable(external_caught), eax); |
522 | 525 |
523 // Set pending exception and eax to out of memory exception. | 526 // Set pending exception and eax to out of memory exception. |
524 ExternalReference pending_exception(Top::k_pending_exception_address); | 527 ExternalReference pending_exception(Isolate::k_pending_exception_address); |
525 mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException())); | 528 mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException())); |
526 mov(Operand::StaticVariable(pending_exception), eax); | 529 mov(Operand::StaticVariable(pending_exception), eax); |
527 } | 530 } |
528 | 531 |
529 // Clear the context pointer. | 532 // Clear the context pointer. |
530 Set(esi, Immediate(0)); | 533 Set(esi, Immediate(0)); |
531 | 534 |
532 // Restore fp from handler and discard handler state. | 535 // Restore fp from handler and discard handler state. |
533 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize); | 536 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize); |
534 pop(ebp); | 537 pop(ebp); |
(...skipping 22 matching lines...) Expand all Loading... |
557 // Load the global context of the current context. | 560 // Load the global context of the current context. |
558 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; | 561 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; |
559 mov(scratch, FieldOperand(scratch, offset)); | 562 mov(scratch, FieldOperand(scratch, offset)); |
560 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset)); | 563 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset)); |
561 | 564 |
562 // Check the context is a global context. | 565 // Check the context is a global context. |
563 if (emit_debug_code()) { | 566 if (emit_debug_code()) { |
564 push(scratch); | 567 push(scratch); |
565 // Read the first word and compare to global_context_map. | 568 // Read the first word and compare to global_context_map. |
566 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); | 569 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); |
567 cmp(scratch, Factory::global_context_map()); | 570 cmp(scratch, FACTORY->global_context_map()); |
568 Check(equal, "JSGlobalObject::global_context should be a global context."); | 571 Check(equal, "JSGlobalObject::global_context should be a global context."); |
569 pop(scratch); | 572 pop(scratch); |
570 } | 573 } |
571 | 574 |
572 // Check if both contexts are the same. | 575 // Check if both contexts are the same. |
573 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset)); | 576 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset)); |
574 j(equal, &same_contexts, taken); | 577 j(equal, &same_contexts, taken); |
575 | 578 |
576 // Compare security tokens, save holder_reg on the stack so we can use it | 579 // Compare security tokens, save holder_reg on the stack so we can use it |
577 // as a temporary register. | 580 // as a temporary register. |
578 // | 581 // |
579 // TODO(119): avoid push(holder_reg)/pop(holder_reg) | 582 // TODO(119): avoid push(holder_reg)/pop(holder_reg) |
580 push(holder_reg); | 583 push(holder_reg); |
581 // Check that the security token in the calling global object is | 584 // Check that the security token in the calling global object is |
582 // compatible with the security token in the receiving global | 585 // compatible with the security token in the receiving global |
583 // object. | 586 // object. |
584 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset)); | 587 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset)); |
585 | 588 |
586 // Check the context is a global context. | 589 // Check the context is a global context. |
587 if (emit_debug_code()) { | 590 if (emit_debug_code()) { |
588 cmp(holder_reg, Factory::null_value()); | 591 cmp(holder_reg, FACTORY->null_value()); |
589 Check(not_equal, "JSGlobalProxy::context() should not be null."); | 592 Check(not_equal, "JSGlobalProxy::context() should not be null."); |
590 | 593 |
591 push(holder_reg); | 594 push(holder_reg); |
592 // Read the first word and compare to global_context_map(), | 595 // Read the first word and compare to global_context_map(), |
593 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset)); | 596 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset)); |
594 cmp(holder_reg, Factory::global_context_map()); | 597 cmp(holder_reg, FACTORY->global_context_map()); |
595 Check(equal, "JSGlobalObject::global_context should be a global context."); | 598 Check(equal, "JSGlobalObject::global_context should be a global context."); |
596 pop(holder_reg); | 599 pop(holder_reg); |
597 } | 600 } |
598 | 601 |
599 int token_offset = Context::kHeaderSize + | 602 int token_offset = Context::kHeaderSize + |
600 Context::SECURITY_TOKEN_INDEX * kPointerSize; | 603 Context::SECURITY_TOKEN_INDEX * kPointerSize; |
601 mov(scratch, FieldOperand(scratch, token_offset)); | 604 mov(scratch, FieldOperand(scratch, token_offset)); |
602 cmp(scratch, FieldOperand(holder_reg, token_offset)); | 605 cmp(scratch, FieldOperand(holder_reg, token_offset)); |
603 pop(holder_reg); | 606 pop(holder_reg); |
604 j(not_equal, miss, not_taken); | 607 j(not_equal, miss, not_taken); |
(...skipping 218 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
823 // Allocate heap number in new space. | 826 // Allocate heap number in new space. |
824 AllocateInNewSpace(HeapNumber::kSize, | 827 AllocateInNewSpace(HeapNumber::kSize, |
825 result, | 828 result, |
826 scratch1, | 829 scratch1, |
827 scratch2, | 830 scratch2, |
828 gc_required, | 831 gc_required, |
829 TAG_OBJECT); | 832 TAG_OBJECT); |
830 | 833 |
831 // Set the map. | 834 // Set the map. |
832 mov(FieldOperand(result, HeapObject::kMapOffset), | 835 mov(FieldOperand(result, HeapObject::kMapOffset), |
833 Immediate(Factory::heap_number_map())); | 836 Immediate(FACTORY->heap_number_map())); |
834 } | 837 } |
835 | 838 |
836 | 839 |
837 void MacroAssembler::AllocateTwoByteString(Register result, | 840 void MacroAssembler::AllocateTwoByteString(Register result, |
838 Register length, | 841 Register length, |
839 Register scratch1, | 842 Register scratch1, |
840 Register scratch2, | 843 Register scratch2, |
841 Register scratch3, | 844 Register scratch3, |
842 Label* gc_required) { | 845 Label* gc_required) { |
843 // Calculate the number of bytes needed for the characters in the string while | 846 // Calculate the number of bytes needed for the characters in the string while |
844 // observing object alignment. | 847 // observing object alignment. |
845 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0); | 848 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0); |
846 ASSERT(kShortSize == 2); | 849 ASSERT(kShortSize == 2); |
847 // scratch1 = length * 2 + kObjectAlignmentMask. | 850 // scratch1 = length * 2 + kObjectAlignmentMask. |
848 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask)); | 851 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask)); |
849 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask)); | 852 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask)); |
850 | 853 |
851 // Allocate two byte string in new space. | 854 // Allocate two byte string in new space. |
852 AllocateInNewSpace(SeqTwoByteString::kHeaderSize, | 855 AllocateInNewSpace(SeqTwoByteString::kHeaderSize, |
853 times_1, | 856 times_1, |
854 scratch1, | 857 scratch1, |
855 result, | 858 result, |
856 scratch2, | 859 scratch2, |
857 scratch3, | 860 scratch3, |
858 gc_required, | 861 gc_required, |
859 TAG_OBJECT); | 862 TAG_OBJECT); |
860 | 863 |
861 // Set the map, length and hash field. | 864 // Set the map, length and hash field. |
862 mov(FieldOperand(result, HeapObject::kMapOffset), | 865 mov(FieldOperand(result, HeapObject::kMapOffset), |
863 Immediate(Factory::string_map())); | 866 Immediate(FACTORY->string_map())); |
864 mov(scratch1, length); | 867 mov(scratch1, length); |
865 SmiTag(scratch1); | 868 SmiTag(scratch1); |
866 mov(FieldOperand(result, String::kLengthOffset), scratch1); | 869 mov(FieldOperand(result, String::kLengthOffset), scratch1); |
867 mov(FieldOperand(result, String::kHashFieldOffset), | 870 mov(FieldOperand(result, String::kHashFieldOffset), |
868 Immediate(String::kEmptyHashField)); | 871 Immediate(String::kEmptyHashField)); |
869 } | 872 } |
870 | 873 |
871 | 874 |
872 void MacroAssembler::AllocateAsciiString(Register result, | 875 void MacroAssembler::AllocateAsciiString(Register result, |
873 Register length, | 876 Register length, |
(...skipping 14 matching lines...) Expand all Loading... |
888 times_1, | 891 times_1, |
889 scratch1, | 892 scratch1, |
890 result, | 893 result, |
891 scratch2, | 894 scratch2, |
892 scratch3, | 895 scratch3, |
893 gc_required, | 896 gc_required, |
894 TAG_OBJECT); | 897 TAG_OBJECT); |
895 | 898 |
896 // Set the map, length and hash field. | 899 // Set the map, length and hash field. |
897 mov(FieldOperand(result, HeapObject::kMapOffset), | 900 mov(FieldOperand(result, HeapObject::kMapOffset), |
898 Immediate(Factory::ascii_string_map())); | 901 Immediate(FACTORY->ascii_string_map())); |
899 mov(scratch1, length); | 902 mov(scratch1, length); |
900 SmiTag(scratch1); | 903 SmiTag(scratch1); |
901 mov(FieldOperand(result, String::kLengthOffset), scratch1); | 904 mov(FieldOperand(result, String::kLengthOffset), scratch1); |
902 mov(FieldOperand(result, String::kHashFieldOffset), | 905 mov(FieldOperand(result, String::kHashFieldOffset), |
903 Immediate(String::kEmptyHashField)); | 906 Immediate(String::kEmptyHashField)); |
904 } | 907 } |
905 | 908 |
906 | 909 |
907 void MacroAssembler::AllocateAsciiString(Register result, | 910 void MacroAssembler::AllocateAsciiString(Register result, |
908 int length, | 911 int length, |
909 Register scratch1, | 912 Register scratch1, |
910 Register scratch2, | 913 Register scratch2, |
911 Label* gc_required) { | 914 Label* gc_required) { |
912 ASSERT(length > 0); | 915 ASSERT(length > 0); |
913 | 916 |
914 // Allocate ascii string in new space. | 917 // Allocate ascii string in new space. |
915 AllocateInNewSpace(SeqAsciiString::SizeFor(length), | 918 AllocateInNewSpace(SeqAsciiString::SizeFor(length), |
916 result, | 919 result, |
917 scratch1, | 920 scratch1, |
918 scratch2, | 921 scratch2, |
919 gc_required, | 922 gc_required, |
920 TAG_OBJECT); | 923 TAG_OBJECT); |
921 | 924 |
922 // Set the map, length and hash field. | 925 // Set the map, length and hash field. |
923 mov(FieldOperand(result, HeapObject::kMapOffset), | 926 mov(FieldOperand(result, HeapObject::kMapOffset), |
924 Immediate(Factory::ascii_string_map())); | 927 Immediate(FACTORY->ascii_string_map())); |
925 mov(FieldOperand(result, String::kLengthOffset), | 928 mov(FieldOperand(result, String::kLengthOffset), |
926 Immediate(Smi::FromInt(length))); | 929 Immediate(Smi::FromInt(length))); |
927 mov(FieldOperand(result, String::kHashFieldOffset), | 930 mov(FieldOperand(result, String::kHashFieldOffset), |
928 Immediate(String::kEmptyHashField)); | 931 Immediate(String::kEmptyHashField)); |
929 } | 932 } |
930 | 933 |
931 | 934 |
932 void MacroAssembler::AllocateConsString(Register result, | 935 void MacroAssembler::AllocateConsString(Register result, |
933 Register scratch1, | 936 Register scratch1, |
934 Register scratch2, | 937 Register scratch2, |
935 Label* gc_required) { | 938 Label* gc_required) { |
936 // Allocate heap number in new space. | 939 // Allocate heap number in new space. |
937 AllocateInNewSpace(ConsString::kSize, | 940 AllocateInNewSpace(ConsString::kSize, |
938 result, | 941 result, |
939 scratch1, | 942 scratch1, |
940 scratch2, | 943 scratch2, |
941 gc_required, | 944 gc_required, |
942 TAG_OBJECT); | 945 TAG_OBJECT); |
943 | 946 |
944 // Set the map. The other fields are left uninitialized. | 947 // Set the map. The other fields are left uninitialized. |
945 mov(FieldOperand(result, HeapObject::kMapOffset), | 948 mov(FieldOperand(result, HeapObject::kMapOffset), |
946 Immediate(Factory::cons_string_map())); | 949 Immediate(FACTORY->cons_string_map())); |
947 } | 950 } |
948 | 951 |
949 | 952 |
950 void MacroAssembler::AllocateAsciiConsString(Register result, | 953 void MacroAssembler::AllocateAsciiConsString(Register result, |
951 Register scratch1, | 954 Register scratch1, |
952 Register scratch2, | 955 Register scratch2, |
953 Label* gc_required) { | 956 Label* gc_required) { |
954 // Allocate heap number in new space. | 957 // Allocate heap number in new space. |
955 AllocateInNewSpace(ConsString::kSize, | 958 AllocateInNewSpace(ConsString::kSize, |
956 result, | 959 result, |
957 scratch1, | 960 scratch1, |
958 scratch2, | 961 scratch2, |
959 gc_required, | 962 gc_required, |
960 TAG_OBJECT); | 963 TAG_OBJECT); |
961 | 964 |
962 // Set the map. The other fields are left uninitialized. | 965 // Set the map. The other fields are left uninitialized. |
963 mov(FieldOperand(result, HeapObject::kMapOffset), | 966 mov(FieldOperand(result, HeapObject::kMapOffset), |
964 Immediate(Factory::cons_ascii_string_map())); | 967 Immediate(FACTORY->cons_ascii_string_map())); |
965 } | 968 } |
966 | 969 |
967 | 970 |
968 // Copy memory, byte-by-byte, from source to destination. Not optimized for | 971 // Copy memory, byte-by-byte, from source to destination. Not optimized for |
969 // long or aligned copies. The contents of scratch and length are destroyed. | 972 // long or aligned copies. The contents of scratch and length are destroyed. |
970 // Source and destination are incremented by length. | 973 // Source and destination are incremented by length. |
971 // Many variants of movsb, loop unrolling, word moves, and indexed operands | 974 // Many variants of movsb, loop unrolling, word moves, and indexed operands |
972 // have been tried here already, and this is fastest. | 975 // have been tried here already, and this is fastest. |
973 // A simpler loop is faster on small copies, but 30% slower on large ones. | 976 // A simpler loop is faster on small copies, but 30% slower on large ones. |
974 // The cld() instruction must have been emitted, to set the direction flag(), | 977 // The cld() instruction must have been emitted, to set the direction flag(), |
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1072 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype)); | 1075 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype)); |
1073 j(not_zero, &non_instance, not_taken); | 1076 j(not_zero, &non_instance, not_taken); |
1074 | 1077 |
1075 // Get the prototype or initial map from the function. | 1078 // Get the prototype or initial map from the function. |
1076 mov(result, | 1079 mov(result, |
1077 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); | 1080 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); |
1078 | 1081 |
1079 // If the prototype or initial map is the hole, don't return it and | 1082 // If the prototype or initial map is the hole, don't return it and |
1080 // simply miss the cache instead. This will allow us to allocate a | 1083 // simply miss the cache instead. This will allow us to allocate a |
1081 // prototype object on-demand in the runtime system. | 1084 // prototype object on-demand in the runtime system. |
1082 cmp(Operand(result), Immediate(Factory::the_hole_value())); | 1085 cmp(Operand(result), Immediate(FACTORY->the_hole_value())); |
1083 j(equal, miss, not_taken); | 1086 j(equal, miss, not_taken); |
1084 | 1087 |
1085 // If the function does not have an initial map, we're done. | 1088 // If the function does not have an initial map, we're done. |
1086 Label done; | 1089 Label done; |
1087 CmpObjectType(result, MAP_TYPE, scratch); | 1090 CmpObjectType(result, MAP_TYPE, scratch); |
1088 j(not_equal, &done); | 1091 j(not_equal, &done); |
1089 | 1092 |
1090 // Get the prototype from the initial map. | 1093 // Get the prototype from the initial map. |
1091 mov(result, FieldOperand(result, Map::kPrototypeOffset)); | 1094 mov(result, FieldOperand(result, Map::kPrototypeOffset)); |
1092 jmp(&done); | 1095 jmp(&done); |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1138 void MacroAssembler::StubReturn(int argc) { | 1141 void MacroAssembler::StubReturn(int argc) { |
1139 ASSERT(argc >= 1 && generating_stub()); | 1142 ASSERT(argc >= 1 && generating_stub()); |
1140 ret((argc - 1) * kPointerSize); | 1143 ret((argc - 1) * kPointerSize); |
1141 } | 1144 } |
1142 | 1145 |
1143 | 1146 |
1144 void MacroAssembler::IllegalOperation(int num_arguments) { | 1147 void MacroAssembler::IllegalOperation(int num_arguments) { |
1145 if (num_arguments > 0) { | 1148 if (num_arguments > 0) { |
1146 add(Operand(esp), Immediate(num_arguments * kPointerSize)); | 1149 add(Operand(esp), Immediate(num_arguments * kPointerSize)); |
1147 } | 1150 } |
1148 mov(eax, Immediate(Factory::undefined_value())); | 1151 mov(eax, Immediate(FACTORY->undefined_value())); |
1149 } | 1152 } |
1150 | 1153 |
1151 | 1154 |
1152 void MacroAssembler::IndexFromHash(Register hash, Register index) { | 1155 void MacroAssembler::IndexFromHash(Register hash, Register index) { |
1153 // The assert checks that the constants for the maximum number of digits | 1156 // The assert checks that the constants for the maximum number of digits |
1154 // for an array index cached in the hash field and the number of bits | 1157 // for an array index cached in the hash field and the number of bits |
1155 // reserved for it does not conflict. | 1158 // reserved for it does not conflict. |
1156 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) < | 1159 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) < |
1157 (1 << String::kArrayIndexValueBits)); | 1160 (1 << String::kArrayIndexValueBits)); |
1158 // We want the smi-tagged index in key. kArrayIndexValueMask has zeros in | 1161 // We want the smi-tagged index in key. kArrayIndexValueMask has zeros in |
1159 // the low kHashShift bits. | 1162 // the low kHashShift bits. |
1160 and_(hash, String::kArrayIndexValueMask); | 1163 and_(hash, String::kArrayIndexValueMask); |
1161 STATIC_ASSERT(String::kHashShift >= kSmiTagSize && kSmiTag == 0); | 1164 STATIC_ASSERT(String::kHashShift >= kSmiTagSize && kSmiTag == 0); |
1162 if (String::kHashShift > kSmiTagSize) { | 1165 if (String::kHashShift > kSmiTagSize) { |
1163 shr(hash, String::kHashShift - kSmiTagSize); | 1166 shr(hash, String::kHashShift - kSmiTagSize); |
1164 } | 1167 } |
1165 if (!index.is(hash)) { | 1168 if (!index.is(hash)) { |
1166 mov(index, hash); | 1169 mov(index, hash); |
1167 } | 1170 } |
1168 } | 1171 } |
1169 | 1172 |
1170 | 1173 |
1171 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) { | 1174 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) { |
1172 CallRuntime(Runtime::FunctionForId(id), num_arguments); | 1175 CallRuntime(Runtime::FunctionForId(id), num_arguments); |
1173 } | 1176 } |
1174 | 1177 |
1175 | 1178 |
1176 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) { | 1179 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) { |
1177 Runtime::Function* function = Runtime::FunctionForId(id); | 1180 const Runtime::Function* function = Runtime::FunctionForId(id); |
1178 Set(eax, Immediate(function->nargs)); | 1181 Set(eax, Immediate(function->nargs)); |
1179 mov(ebx, Immediate(ExternalReference(function))); | 1182 mov(ebx, Immediate(ExternalReference(function))); |
1180 CEntryStub ces(1); | 1183 CEntryStub ces(1); |
1181 ces.SaveDoubles(); | 1184 ces.SaveDoubles(); |
1182 CallStub(&ces); | 1185 CallStub(&ces); |
1183 } | 1186 } |
1184 | 1187 |
1185 | 1188 |
1186 MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id, | 1189 MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id, |
1187 int num_arguments) { | 1190 int num_arguments) { |
1188 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments); | 1191 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments); |
1189 } | 1192 } |
1190 | 1193 |
1191 | 1194 |
1192 void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) { | 1195 void MacroAssembler::CallRuntime(const Runtime::Function* f, |
| 1196 int num_arguments) { |
1193 // If the expected number of arguments of the runtime function is | 1197 // If the expected number of arguments of the runtime function is |
1194 // constant, we check that the actual number of arguments match the | 1198 // constant, we check that the actual number of arguments match the |
1195 // expectation. | 1199 // expectation. |
1196 if (f->nargs >= 0 && f->nargs != num_arguments) { | 1200 if (f->nargs >= 0 && f->nargs != num_arguments) { |
1197 IllegalOperation(num_arguments); | 1201 IllegalOperation(num_arguments); |
1198 return; | 1202 return; |
1199 } | 1203 } |
1200 | 1204 |
1201 // TODO(1236192): Most runtime routines don't need the number of | 1205 // TODO(1236192): Most runtime routines don't need the number of |
1202 // arguments passed in because it is constant. At some point we | 1206 // arguments passed in because it is constant. At some point we |
1203 // should remove this need and make the runtime routine entry code | 1207 // should remove this need and make the runtime routine entry code |
1204 // smarter. | 1208 // smarter. |
1205 Set(eax, Immediate(num_arguments)); | 1209 Set(eax, Immediate(num_arguments)); |
1206 mov(ebx, Immediate(ExternalReference(f))); | 1210 mov(ebx, Immediate(ExternalReference(f))); |
1207 CEntryStub ces(1); | 1211 CEntryStub ces(1); |
1208 CallStub(&ces); | 1212 CallStub(&ces); |
1209 } | 1213 } |
1210 | 1214 |
1211 | 1215 |
1212 MaybeObject* MacroAssembler::TryCallRuntime(Runtime::Function* f, | 1216 MaybeObject* MacroAssembler::TryCallRuntime(const Runtime::Function* f, |
1213 int num_arguments) { | 1217 int num_arguments) { |
1214 if (f->nargs >= 0 && f->nargs != num_arguments) { | 1218 if (f->nargs >= 0 && f->nargs != num_arguments) { |
1215 IllegalOperation(num_arguments); | 1219 IllegalOperation(num_arguments); |
1216 // Since we did not call the stub, there was no allocation failure. | 1220 // Since we did not call the stub, there was no allocation failure. |
1217 // Return some non-failure object. | 1221 // Return some non-failure object. |
1218 return Heap::undefined_value(); | 1222 return HEAP->undefined_value(); |
1219 } | 1223 } |
1220 | 1224 |
1221 // TODO(1236192): Most runtime routines don't need the number of | 1225 // TODO(1236192): Most runtime routines don't need the number of |
1222 // arguments passed in because it is constant. At some point we | 1226 // arguments passed in because it is constant. At some point we |
1223 // should remove this need and make the runtime routine entry code | 1227 // should remove this need and make the runtime routine entry code |
1224 // smarter. | 1228 // smarter. |
1225 Set(eax, Immediate(num_arguments)); | 1229 Set(eax, Immediate(num_arguments)); |
1226 mov(ebx, Immediate(ExternalReference(f))); | 1230 mov(ebx, Immediate(ExternalReference(f))); |
1227 CEntryStub ces(1); | 1231 CEntryStub ces(1); |
1228 return TryCallStub(&ces); | 1232 return TryCallStub(&ces); |
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1368 sub(Operand::StaticVariable(level_address), Immediate(1)); | 1372 sub(Operand::StaticVariable(level_address), Immediate(1)); |
1369 Assert(above_equal, "Invalid HandleScope level"); | 1373 Assert(above_equal, "Invalid HandleScope level"); |
1370 cmp(edi, Operand::StaticVariable(limit_address)); | 1374 cmp(edi, Operand::StaticVariable(limit_address)); |
1371 j(not_equal, &delete_allocated_handles, not_taken); | 1375 j(not_equal, &delete_allocated_handles, not_taken); |
1372 bind(&leave_exit_frame); | 1376 bind(&leave_exit_frame); |
1373 | 1377 |
1374 // Check if the function scheduled an exception. | 1378 // Check if the function scheduled an exception. |
1375 ExternalReference scheduled_exception_address = | 1379 ExternalReference scheduled_exception_address = |
1376 ExternalReference::scheduled_exception_address(); | 1380 ExternalReference::scheduled_exception_address(); |
1377 cmp(Operand::StaticVariable(scheduled_exception_address), | 1381 cmp(Operand::StaticVariable(scheduled_exception_address), |
1378 Immediate(Factory::the_hole_value())); | 1382 Immediate(FACTORY->the_hole_value())); |
1379 j(not_equal, &promote_scheduled_exception, not_taken); | 1383 j(not_equal, &promote_scheduled_exception, not_taken); |
1380 LeaveApiExitFrame(); | 1384 LeaveApiExitFrame(); |
1381 ret(stack_space * kPointerSize); | 1385 ret(stack_space * kPointerSize); |
1382 bind(&promote_scheduled_exception); | 1386 bind(&promote_scheduled_exception); |
1383 MaybeObject* result = | 1387 MaybeObject* result = |
1384 TryTailCallRuntime(Runtime::kPromoteScheduledException, 0, 1); | 1388 TryTailCallRuntime(Runtime::kPromoteScheduledException, 0, 1); |
1385 if (result->IsFailure()) { | 1389 if (result->IsFailure()) { |
1386 return result; | 1390 return result; |
1387 } | 1391 } |
1388 bind(&empty_handle); | 1392 bind(&empty_handle); |
1389 // It was zero; the result is undefined. | 1393 // It was zero; the result is undefined. |
1390 mov(eax, Factory::undefined_value()); | 1394 mov(eax, FACTORY->undefined_value()); |
1391 jmp(&prologue); | 1395 jmp(&prologue); |
1392 | 1396 |
1393 // HandleScope limit has changed. Delete allocated extensions. | 1397 // HandleScope limit has changed. Delete allocated extensions. |
1394 bind(&delete_allocated_handles); | 1398 bind(&delete_allocated_handles); |
1395 mov(Operand::StaticVariable(limit_address), edi); | 1399 mov(Operand::StaticVariable(limit_address), edi); |
1396 mov(edi, eax); | 1400 mov(edi, eax); |
| 1401 mov(Operand(esp, 0), Immediate(ExternalReference::isolate_address())); |
1397 mov(eax, Immediate(ExternalReference::delete_handle_scope_extensions())); | 1402 mov(eax, Immediate(ExternalReference::delete_handle_scope_extensions())); |
1398 call(Operand(eax)); | 1403 call(Operand(eax)); |
1399 mov(eax, edi); | 1404 mov(eax, edi); |
1400 jmp(&leave_exit_frame); | 1405 jmp(&leave_exit_frame); |
1401 | 1406 |
1402 return result; | 1407 return result; |
1403 } | 1408 } |
1404 | 1409 |
1405 | 1410 |
1406 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) { | 1411 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) { |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1460 // is the case when we invoke functions using call and apply. | 1465 // is the case when we invoke functions using call and apply. |
1461 cmp(expected.reg(), Operand(actual.reg())); | 1466 cmp(expected.reg(), Operand(actual.reg())); |
1462 j(equal, &invoke); | 1467 j(equal, &invoke); |
1463 ASSERT(actual.reg().is(eax)); | 1468 ASSERT(actual.reg().is(eax)); |
1464 ASSERT(expected.reg().is(ebx)); | 1469 ASSERT(expected.reg().is(ebx)); |
1465 } | 1470 } |
1466 } | 1471 } |
1467 | 1472 |
1468 if (!definitely_matches) { | 1473 if (!definitely_matches) { |
1469 Handle<Code> adaptor = | 1474 Handle<Code> adaptor = |
1470 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline)); | 1475 Handle<Code>(Isolate::Current()->builtins()->builtin( |
| 1476 Builtins::ArgumentsAdaptorTrampoline)); |
1471 if (!code_constant.is_null()) { | 1477 if (!code_constant.is_null()) { |
1472 mov(edx, Immediate(code_constant)); | 1478 mov(edx, Immediate(code_constant)); |
1473 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag)); | 1479 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag)); |
1474 } else if (!code_operand.is_reg(edx)) { | 1480 } else if (!code_operand.is_reg(edx)) { |
1475 mov(edx, code_operand); | 1481 mov(edx, code_operand); |
1476 } | 1482 } |
1477 | 1483 |
1478 if (flag == CALL_FUNCTION) { | 1484 if (flag == CALL_FUNCTION) { |
1479 call(adaptor, RelocInfo::CODE_TARGET); | 1485 call(adaptor, RelocInfo::CODE_TARGET); |
1480 if (post_call_generator != NULL) post_call_generator->Generate(); | 1486 if (post_call_generator != NULL) post_call_generator->Generate(); |
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1638 mov(function, Operand(function, Context::SlotOffset(index))); | 1644 mov(function, Operand(function, Context::SlotOffset(index))); |
1639 } | 1645 } |
1640 | 1646 |
1641 | 1647 |
1642 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, | 1648 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, |
1643 Register map) { | 1649 Register map) { |
1644 // Load the initial map. The global functions all have initial maps. | 1650 // Load the initial map. The global functions all have initial maps. |
1645 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); | 1651 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); |
1646 if (emit_debug_code()) { | 1652 if (emit_debug_code()) { |
1647 Label ok, fail; | 1653 Label ok, fail; |
1648 CheckMap(map, Factory::meta_map(), &fail, false); | 1654 CheckMap(map, FACTORY->meta_map(), &fail, false); |
1649 jmp(&ok); | 1655 jmp(&ok); |
1650 bind(&fail); | 1656 bind(&fail); |
1651 Abort("Global functions must have initial map"); | 1657 Abort("Global functions must have initial map"); |
1652 bind(&ok); | 1658 bind(&ok); |
1653 } | 1659 } |
1654 } | 1660 } |
1655 | 1661 |
1656 | 1662 |
1657 // Store the value in register src in the safepoint register stack | 1663 // Store the value in register src in the safepoint register stack |
1658 // slot for register dst. | 1664 // slot for register dst. |
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1788 | 1794 |
1789 void MacroAssembler::Assert(Condition cc, const char* msg) { | 1795 void MacroAssembler::Assert(Condition cc, const char* msg) { |
1790 if (emit_debug_code()) Check(cc, msg); | 1796 if (emit_debug_code()) Check(cc, msg); |
1791 } | 1797 } |
1792 | 1798 |
1793 | 1799 |
1794 void MacroAssembler::AssertFastElements(Register elements) { | 1800 void MacroAssembler::AssertFastElements(Register elements) { |
1795 if (emit_debug_code()) { | 1801 if (emit_debug_code()) { |
1796 Label ok; | 1802 Label ok; |
1797 cmp(FieldOperand(elements, HeapObject::kMapOffset), | 1803 cmp(FieldOperand(elements, HeapObject::kMapOffset), |
1798 Immediate(Factory::fixed_array_map())); | 1804 Immediate(FACTORY->fixed_array_map())); |
1799 j(equal, &ok); | 1805 j(equal, &ok); |
1800 cmp(FieldOperand(elements, HeapObject::kMapOffset), | 1806 cmp(FieldOperand(elements, HeapObject::kMapOffset), |
1801 Immediate(Factory::fixed_cow_array_map())); | 1807 Immediate(FACTORY->fixed_cow_array_map())); |
1802 j(equal, &ok); | 1808 j(equal, &ok); |
1803 Abort("JSObject with fast elements map has slow elements"); | 1809 Abort("JSObject with fast elements map has slow elements"); |
1804 bind(&ok); | 1810 bind(&ok); |
1805 } | 1811 } |
1806 } | 1812 } |
1807 | 1813 |
1808 | 1814 |
1809 void MacroAssembler::Check(Condition cc, const char* msg) { | 1815 void MacroAssembler::Check(Condition cc, const char* msg) { |
1810 Label L; | 1816 Label L; |
1811 j(cc, &L, taken); | 1817 j(cc, &L, taken); |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1856 int3(); | 1862 int3(); |
1857 } | 1863 } |
1858 | 1864 |
1859 | 1865 |
1860 void MacroAssembler::JumpIfNotNumber(Register reg, | 1866 void MacroAssembler::JumpIfNotNumber(Register reg, |
1861 TypeInfo info, | 1867 TypeInfo info, |
1862 Label* on_not_number) { | 1868 Label* on_not_number) { |
1863 if (emit_debug_code()) AbortIfSmi(reg); | 1869 if (emit_debug_code()) AbortIfSmi(reg); |
1864 if (!info.IsNumber()) { | 1870 if (!info.IsNumber()) { |
1865 cmp(FieldOperand(reg, HeapObject::kMapOffset), | 1871 cmp(FieldOperand(reg, HeapObject::kMapOffset), |
1866 Factory::heap_number_map()); | 1872 FACTORY->heap_number_map()); |
1867 j(not_equal, on_not_number); | 1873 j(not_equal, on_not_number); |
1868 } | 1874 } |
1869 } | 1875 } |
1870 | 1876 |
1871 | 1877 |
1872 void MacroAssembler::ConvertToInt32(Register dst, | 1878 void MacroAssembler::ConvertToInt32(Register dst, |
1873 Register source, | 1879 Register source, |
1874 Register scratch, | 1880 Register scratch, |
1875 TypeInfo info, | 1881 TypeInfo info, |
1876 Label* on_not_int32) { | 1882 Label* on_not_int32) { |
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1961 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3)); | 1967 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3)); |
1962 and_(scratch1, kFlatAsciiStringMask); | 1968 and_(scratch1, kFlatAsciiStringMask); |
1963 and_(scratch2, kFlatAsciiStringMask); | 1969 and_(scratch2, kFlatAsciiStringMask); |
1964 lea(scratch1, Operand(scratch1, scratch2, times_8, 0)); | 1970 lea(scratch1, Operand(scratch1, scratch2, times_8, 0)); |
1965 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3)); | 1971 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3)); |
1966 j(not_equal, failure); | 1972 j(not_equal, failure); |
1967 } | 1973 } |
1968 | 1974 |
1969 | 1975 |
1970 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) { | 1976 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) { |
| 1977 // Reserve space for Isolate address which is always passed as last parameter |
| 1978 num_arguments += 1; |
| 1979 |
1971 int frameAlignment = OS::ActivationFrameAlignment(); | 1980 int frameAlignment = OS::ActivationFrameAlignment(); |
1972 if (frameAlignment != 0) { | 1981 if (frameAlignment != 0) { |
1973 // Make stack end at alignment and make room for num_arguments words | 1982 // Make stack end at alignment and make room for num_arguments words |
1974 // and the original value of esp. | 1983 // and the original value of esp. |
1975 mov(scratch, esp); | 1984 mov(scratch, esp); |
1976 sub(Operand(esp), Immediate((num_arguments + 1) * kPointerSize)); | 1985 sub(Operand(esp), Immediate((num_arguments + 1) * kPointerSize)); |
1977 ASSERT(IsPowerOf2(frameAlignment)); | 1986 ASSERT(IsPowerOf2(frameAlignment)); |
1978 and_(esp, -frameAlignment); | 1987 and_(esp, -frameAlignment); |
1979 mov(Operand(esp, num_arguments * kPointerSize), scratch); | 1988 mov(Operand(esp, num_arguments * kPointerSize), scratch); |
1980 } else { | 1989 } else { |
1981 sub(Operand(esp), Immediate(num_arguments * kPointerSize)); | 1990 sub(Operand(esp), Immediate(num_arguments * kPointerSize)); |
1982 } | 1991 } |
1983 } | 1992 } |
1984 | 1993 |
1985 | 1994 |
1986 void MacroAssembler::CallCFunction(ExternalReference function, | 1995 void MacroAssembler::CallCFunction(ExternalReference function, |
1987 int num_arguments) { | 1996 int num_arguments) { |
1988 // Trashing eax is ok as it will be the return value. | 1997 // Trashing eax is ok as it will be the return value. |
1989 mov(Operand(eax), Immediate(function)); | 1998 mov(Operand(eax), Immediate(function)); |
1990 CallCFunction(eax, num_arguments); | 1999 CallCFunction(eax, num_arguments); |
1991 } | 2000 } |
1992 | 2001 |
1993 | 2002 |
1994 void MacroAssembler::CallCFunction(Register function, | 2003 void MacroAssembler::CallCFunction(Register function, |
1995 int num_arguments) { | 2004 int num_arguments) { |
| 2005 // Pass current isolate address as additional parameter. |
| 2006 mov(Operand(esp, num_arguments * kPointerSize), |
| 2007 Immediate(ExternalReference::isolate_address())); |
| 2008 num_arguments += 1; |
| 2009 |
1996 // Check stack alignment. | 2010 // Check stack alignment. |
1997 if (emit_debug_code()) { | 2011 if (emit_debug_code()) { |
1998 CheckStackAlignment(); | 2012 CheckStackAlignment(); |
1999 } | 2013 } |
2000 | 2014 |
2001 call(Operand(function)); | 2015 call(Operand(function)); |
2002 if (OS::ActivationFrameAlignment() != 0) { | 2016 if (OS::ActivationFrameAlignment() != 0) { |
2003 mov(esp, Operand(esp, num_arguments * kPointerSize)); | 2017 mov(esp, Operand(esp, num_arguments * kPointerSize)); |
2004 } else { | 2018 } else { |
2005 add(Operand(esp), Immediate(num_arguments * sizeof(int32_t))); | 2019 add(Operand(esp), Immediate(num_arguments * sizeof(int32_t))); |
(...skipping 16 matching lines...) Expand all Loading... |
2022 | 2036 |
2023 // Check that the code was patched as expected. | 2037 // Check that the code was patched as expected. |
2024 ASSERT(masm_.pc_ == address_ + size_); | 2038 ASSERT(masm_.pc_ == address_ + size_); |
2025 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 2039 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
2026 } | 2040 } |
2027 | 2041 |
2028 | 2042 |
2029 } } // namespace v8::internal | 2043 } } // namespace v8::internal |
2030 | 2044 |
2031 #endif // V8_TARGET_ARCH_IA32 | 2045 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |