Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(553)

Unified Diff: src/ia32/macro-assembler-ia32.cc

Issue 6529032: Merge 6168:6800 from bleeding_edge to experimental/gc branch. (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/gc/
Patch Set: Created 9 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « src/ia32/macro-assembler-ia32.h ('k') | src/ia32/stub-cache-ia32.cc » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: src/ia32/macro-assembler-ia32.cc
===================================================================
--- src/ia32/macro-assembler-ia32.cc (revision 6800)
+++ src/ia32/macro-assembler-ia32.cc (working copy)
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -86,11 +86,6 @@
Register value,
Register scratch,
SaveFPRegsMode save_fp) {
- // The compiled code assumes that record write doesn't change the
- // context register, so we check that none of the clobbered
- // registers are esi.
- ASSERT(!object.is(esi) && !value.is(esi) && !scratch.is(esi));
-
// First, check if a write barrier is even needed. The tests below
// catch stores of Smis and stores into young gen.
NearLabel done;
@@ -138,11 +133,6 @@
Register address,
Register value,
SaveFPRegsMode save_fp) {
- // The compiled code assumes that record write doesn't change the
- // context register, so we check that none of the clobbered
- // registers are esi.
- ASSERT(!object.is(esi) && !value.is(esi) && !address.is(esi));
-
// First, check if a write barrier is even needed. The tests below
// catch stores of Smis and stores into young gen.
Label done;
@@ -348,7 +338,7 @@
CpuFeatures::Scope scope(SSE2);
int space = XMMRegister::kNumRegisters * kDoubleSize + argc * kPointerSize;
sub(Operand(esp), Immediate(space));
- int offset = -2 * kPointerSize;
+ const int offset = -2 * kPointerSize;
for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
XMMRegister reg = XMMRegister::from_code(i);
movdbl(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
@@ -391,22 +381,10 @@
// Optionally restore all XMM registers.
if (save_doubles) {
CpuFeatures::Scope scope(SSE2);
- if (save_doubles) {
- int offset = -2 * kPointerSize;
- for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
- XMMRegister reg = XMMRegister::from_code(i);
- movdbl(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
- }
- } else if (FLAG_debug_code) {
- // Zap all fp registers on a runtime call if we were not asked to preserve
- // them.
- push(eax);
- mov(eax, Factory::nan_value());
- for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
- XMMRegister reg = XMMRegister::from_code(i);
- movdbl(reg, FieldOperand(eax, HeapNumber::kValueOffset));
- }
- pop(eax);
+ const int offset = -2 * kPointerSize;
+ for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
+ XMMRegister reg = XMMRegister::from_code(i);
+ movdbl(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
}
}
@@ -479,6 +457,97 @@
}
+void MacroAssembler::Throw(Register value) {
+ // Adjust this code if not the case.
+ STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
+
+ // eax must hold the exception.
+ if (!value.is(eax)) {
+ mov(eax, value);
+ }
+
+ // Drop the sp to the top of the handler.
+ ExternalReference handler_address(Top::k_handler_address);
+ mov(esp, Operand::StaticVariable(handler_address));
+
+ // Restore next handler and frame pointer, discard handler state.
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
+ pop(Operand::StaticVariable(handler_address));
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
+ pop(ebp);
+ pop(edx); // Remove state.
+
+ // Before returning we restore the context from the frame pointer if
+ // not NULL. The frame pointer is NULL in the exception handler of
+ // a JS entry frame.
+ Set(esi, Immediate(0)); // Tentatively set context pointer to NULL.
+ NearLabel skip;
+ cmp(ebp, 0);
+ j(equal, &skip, not_taken);
+ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+ bind(&skip);
+
+ STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
+ ret(0);
+}
+
+
+void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
+ Register value) {
+ // Adjust this code if not the case.
+ STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
+
+ // eax must hold the exception.
+ if (!value.is(eax)) {
+ mov(eax, value);
+ }
+
+ // Drop sp to the top stack handler.
+ ExternalReference handler_address(Top::k_handler_address);
+ mov(esp, Operand::StaticVariable(handler_address));
+
+ // Unwind the handlers until the ENTRY handler is found.
+ NearLabel loop, done;
+ bind(&loop);
+ // Load the type of the current stack handler.
+ const int kStateOffset = StackHandlerConstants::kStateOffset;
+ cmp(Operand(esp, kStateOffset), Immediate(StackHandler::ENTRY));
+ j(equal, &done);
+ // Fetch the next handler in the list.
+ const int kNextOffset = StackHandlerConstants::kNextOffset;
+ mov(esp, Operand(esp, kNextOffset));
+ jmp(&loop);
+ bind(&done);
+
+ // Set the top handler address to next handler past the current ENTRY handler.
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
+ pop(Operand::StaticVariable(handler_address));
+
+ if (type == OUT_OF_MEMORY) {
+ // Set external caught exception to false.
+ ExternalReference external_caught(Top::k_external_caught_exception_address);
+ mov(eax, false);
+ mov(Operand::StaticVariable(external_caught), eax);
+
+ // Set pending exception and eax to out of memory exception.
+ ExternalReference pending_exception(Top::k_pending_exception_address);
+ mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
+ mov(Operand::StaticVariable(pending_exception), eax);
+ }
+
+ // Clear the context pointer.
+ Set(esi, Immediate(0));
+
+ // Restore fp from handler and discard handler state.
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
+ pop(ebp);
+ pop(edx); // State.
+
+ STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
+ ret(0);
+}
+
+
void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
Register scratch,
Label* miss) {
@@ -625,11 +694,11 @@
ExternalReference new_space_allocation_limit =
ExternalReference::new_space_allocation_limit_address();
- if (top_reg.is(result)) {
- add(Operand(top_reg), Immediate(object_size));
- } else {
- lea(top_reg, Operand(result, object_size));
+ if (!top_reg.is(result)) {
+ mov(top_reg, result);
}
+ add(Operand(top_reg), Immediate(object_size));
+ j(carry, gc_required, not_taken);
cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
j(above, gc_required, not_taken);
@@ -678,7 +747,12 @@
// Calculate new top and bail out if new space is exhausted.
ExternalReference new_space_allocation_limit =
ExternalReference::new_space_allocation_limit_address();
- lea(result_end, Operand(result, element_count, element_size, header_size));
+
+ // We assume that element_count*element_size + header_size does not
+ // overflow.
+ lea(result_end, Operand(element_count, element_size, header_size));
+ add(result_end, Operand(result));
+ j(carry, gc_required);
cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
j(above, gc_required);
@@ -723,6 +797,7 @@
mov(result_end, object_size);
}
add(result_end, Operand(result));
+ j(carry, gc_required, not_taken);
cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
j(above, gc_required, not_taken);
@@ -898,55 +973,53 @@
Immediate(Factory::cons_ascii_string_map()));
}
-// All registers must be distinct. Only current_string needs valid contents
-// on entry. All registers may be invalid on exit. result_operand is
-// unchanged, padding_chars is updated correctly.
-void MacroAssembler::AppendStringToTopOfNewSpace(
- Register current_string, // Tagged pointer to string to copy.
- Register current_string_length,
- Register result_pos,
- Register scratch,
- Register new_padding_chars,
- Operand operand_result,
- Operand operand_padding_chars,
- Label* bailout) {
- mov(current_string_length,
- FieldOperand(current_string, String::kLengthOffset));
- shr(current_string_length, 1);
- sub(current_string_length, operand_padding_chars);
- mov(new_padding_chars, current_string_length);
- add(Operand(current_string_length), Immediate(kObjectAlignmentMask));
- and_(Operand(current_string_length), Immediate(~kObjectAlignmentMask));
- sub(new_padding_chars, Operand(current_string_length));
- neg(new_padding_chars);
- // We need an allocation even if current_string_length is 0, to fetch
- // result_pos. Consider using a faster fetch of result_pos in that case.
- AllocateInNewSpace(current_string_length, result_pos, scratch, no_reg,
- bailout, NO_ALLOCATION_FLAGS);
- sub(result_pos, operand_padding_chars);
- mov(operand_padding_chars, new_padding_chars);
- Register scratch_2 = new_padding_chars; // Used to compute total length.
- // Copy string to the end of result.
- mov(current_string_length,
- FieldOperand(current_string, String::kLengthOffset));
- mov(scratch, operand_result);
- mov(scratch_2, current_string_length);
- add(scratch_2, FieldOperand(scratch, String::kLengthOffset));
- mov(FieldOperand(scratch, String::kLengthOffset), scratch_2);
- shr(current_string_length, 1);
- lea(current_string,
- FieldOperand(current_string, SeqAsciiString::kHeaderSize));
- // Loop condition: while (--current_string_length >= 0).
- Label copy_loop;
- Label copy_loop_entry;
- jmp(&copy_loop_entry);
- bind(&copy_loop);
- mov_b(scratch, Operand(current_string, current_string_length, times_1, 0));
- mov_b(Operand(result_pos, current_string_length, times_1, 0), scratch);
- bind(&copy_loop_entry);
- sub(Operand(current_string_length), Immediate(1));
- j(greater_equal, &copy_loop);
+// Copy memory, byte-by-byte, from source to destination. Not optimized for
+// long or aligned copies. The contents of scratch and length are destroyed.
+// Source and destination are incremented by length.
+// Many variants of movsb, loop unrolling, word moves, and indexed operands
+// have been tried here already, and this is fastest.
+// A simpler loop is faster on small copies, but 30% slower on large ones.
+// The cld() instruction must have been emitted, to set the direction flag(),
+// before calling this function.
+void MacroAssembler::CopyBytes(Register source,
+ Register destination,
+ Register length,
+ Register scratch) {
+ Label loop, done, short_string, short_loop;
+ // Experimentation shows that the short string loop is faster if length < 10.
+ cmp(Operand(length), Immediate(10));
+ j(less_equal, &short_string);
+
+ ASSERT(source.is(esi));
+ ASSERT(destination.is(edi));
+ ASSERT(length.is(ecx));
+
+ // Because source is 4-byte aligned in our uses of this function,
+ // we keep source aligned for the rep_movs call by copying the odd bytes
+ // at the end of the ranges.
+ mov(scratch, Operand(source, length, times_1, -4));
+ mov(Operand(destination, length, times_1, -4), scratch);
+ mov(scratch, ecx);
+ shr(ecx, 2);
+ rep_movs();
+ and_(Operand(scratch), Immediate(0x3));
+ add(destination, Operand(scratch));
+ jmp(&done);
+
+ bind(&short_string);
+ test(length, Operand(length));
+ j(zero, &done);
+
+ bind(&short_loop);
+ mov_b(scratch, Operand(source, 0));
+ mov_b(Operand(destination, 0), scratch);
+ inc(source);
+ inc(destination);
+ dec(length);
+ j(not_zero, &short_loop);
+
+ bind(&done);
}
@@ -1310,7 +1383,7 @@
ExternalReference scheduled_exception_address =
ExternalReference::scheduled_exception_address();
cmp(Operand::StaticVariable(scheduled_exception_address),
- Immediate(Factory::the_hole_value()));
+ Immediate(Factory::the_hole_value()));
j(not_equal, &promote_scheduled_exception, not_taken);
LeaveApiExitFrame();
ret(stack_space * kPointerSize);
@@ -1545,12 +1618,22 @@
mov(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
}
- // The context may be an intermediate context, not a function context.
- mov(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
- } else { // Slot is in the current function context.
- // The context may be an intermediate context, not a function context.
- mov(dst, Operand(esi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
+ } else {
+ // Slot is in the current function context. Move it into the
+ // destination register in case we store into it (the write barrier
+ // cannot be allowed to destroy the context in esi).
+ mov(dst, esi);
}
+
+ // We should not have found a 'with' context by walking the context chain
+ // (i.e., the static scope chain and runtime context chain do not agree).
+ // A variable occurring in such a scope should have slot type LOOKUP and
+ // not CONTEXT.
+ if (FLAG_debug_code) {
+ cmp(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
+ Check(equal, "Yo dawg, I heard you liked function contexts "
+ "so I put function contexts in all your contexts");
+ }
}
@@ -1593,6 +1676,20 @@
}
+void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
+ if (is_uint16(bytes_dropped)) {
+ ret(bytes_dropped);
+ } else {
+ pop(scratch);
+ add(Operand(esp), Immediate(bytes_dropped));
+ push(scratch);
+ ret(0);
+ }
+}
+
+
+
+
void MacroAssembler::Drop(int stack_elements) {
if (stack_elements > 0) {
add(Operand(esp), Immediate(stack_elements * kPointerSize));
@@ -1735,7 +1832,7 @@
}
#endif
// Disable stub call restrictions to always allow calls to abort.
- set_allow_stub_calls(true);
+ AllowStubCallsScope allow_scope(this, true);
push(eax);
push(Immediate(p0));
« no previous file with comments | « src/ia32/macro-assembler-ia32.h ('k') | src/ia32/stub-cache-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698