Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(599)

Unified Diff: src/ia32/full-codegen-ia32.cc

Issue 6529032: Merge 6168:6800 from bleeding_edge to experimental/gc branch. (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/gc/
Patch Set: Created 9 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « src/ia32/disasm-ia32.cc ('k') | src/ia32/ic-ia32.cc » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: src/ia32/full-codegen-ia32.cc
===================================================================
--- src/ia32/full-codegen-ia32.cc (revision 6800)
+++ src/ia32/full-codegen-ia32.cc (working copy)
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -47,8 +47,7 @@
class JumpPatchSite BASE_EMBEDDED {
public:
- explicit JumpPatchSite(MacroAssembler* masm)
- : masm_(masm) {
+ explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
#ifdef DEBUG
info_emitted_ = false;
#endif
@@ -60,7 +59,7 @@
void EmitJumpIfNotSmi(Register reg, NearLabel* target) {
__ test(reg, Immediate(kSmiTagMask));
- EmitJump(not_carry, target); // Always taken before patched.
+ EmitJump(not_carry, target); // Always taken before patched.
}
void EmitJumpIfSmi(Register reg, NearLabel* target) {
@@ -206,45 +205,48 @@
Move(dot_arguments_slot, ecx, ebx, edx);
}
- { Comment cmnt(masm_, "[ Declarations");
- // For named function expressions, declare the function name as a
- // constant.
- if (scope()->is_function_scope() && scope()->function() != NULL) {
- EmitDeclaration(scope()->function(), Variable::CONST, NULL);
- }
- // Visit all the explicit declarations unless there is an illegal
- // redeclaration.
- if (scope()->HasIllegalRedeclaration()) {
- scope()->VisitIllegalRedeclaration(this);
- } else {
- VisitDeclarations(scope()->declarations());
- }
- }
-
if (FLAG_trace) {
__ CallRuntime(Runtime::kTraceEnter, 0);
}
- { Comment cmnt(masm_, "[ Stack check");
- PrepareForBailout(info->function(), NO_REGISTERS);
- NearLabel ok;
- ExternalReference stack_limit =
- ExternalReference::address_of_stack_limit();
- __ cmp(esp, Operand::StaticVariable(stack_limit));
- __ j(above_equal, &ok, taken);
- StackCheckStub stub;
- __ CallStub(&stub);
- __ bind(&ok);
- }
+ // Visit the declarations and body unless there is an illegal
+ // redeclaration.
+ if (scope()->HasIllegalRedeclaration()) {
+ Comment cmnt(masm_, "[ Declarations");
+ scope()->VisitIllegalRedeclaration(this);
- { Comment cmnt(masm_, "[ Body");
- ASSERT(loop_depth() == 0);
- VisitStatements(function()->body());
- ASSERT(loop_depth() == 0);
+ } else {
+ { Comment cmnt(masm_, "[ Declarations");
+ // For named function expressions, declare the function name as a
+ // constant.
+ if (scope()->is_function_scope() && scope()->function() != NULL) {
+ EmitDeclaration(scope()->function(), Variable::CONST, NULL);
+ }
+ VisitDeclarations(scope()->declarations());
+ }
+
+ { Comment cmnt(masm_, "[ Stack check");
+ PrepareForBailout(info->function(), NO_REGISTERS);
+ NearLabel ok;
+ ExternalReference stack_limit =
+ ExternalReference::address_of_stack_limit();
+ __ cmp(esp, Operand::StaticVariable(stack_limit));
+ __ j(above_equal, &ok, taken);
+ StackCheckStub stub;
+ __ CallStub(&stub);
+ __ bind(&ok);
+ }
+
+ { Comment cmnt(masm_, "[ Body");
+ ASSERT(loop_depth() == 0);
+ VisitStatements(function()->body());
+ ASSERT(loop_depth() == 0);
+ }
}
+ // Always emit a 'return undefined' in case control fell off the end of
+ // the body.
{ Comment cmnt(masm_, "[ return <undefined>;");
- // Emit a 'return undefined' in case control fell off the end of the body.
__ mov(eax, Factory::undefined_value());
EmitReturnSequence();
}
@@ -264,16 +266,24 @@
__ j(above_equal, &ok, taken);
StackCheckStub stub;
__ CallStub(&stub);
+ // Record a mapping of this PC offset to the OSR id. This is used to find
+ // the AST id from the unoptimized code in order to use it as a key into
+ // the deoptimization input data found in the optimized code.
+ RecordStackCheck(stmt->OsrEntryId());
+
+ // Loop stack checks can be patched to perform on-stack replacement. In
+ // order to decide whether or not to perform OSR we embed the loop depth
+ // in a test instruction after the call so we can extract it from the OSR
+ // builtin.
+ ASSERT(loop_depth() > 0);
+ __ test(eax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
+
__ bind(&ok);
PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
+ // Record a mapping of the OSR id to this PC. This is used if the OSR
+ // entry becomes the target of a bailout. We don't expect it to be, but
+ // we want it to work if it is.
PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
- RecordStackCheck(stmt->OsrEntryId());
- // Loop stack checks can be patched to perform on-stack
- // replacement. In order to decide whether or not to perform OSR we
- // embed the loop depth in a test instruction after the call so we
- // can extract it from the OSR builtin.
- ASSERT(loop_depth() > 0);
- __ test(eax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
}
@@ -299,12 +309,14 @@
// patch with the code required by the debugger.
__ mov(esp, ebp);
__ pop(ebp);
- __ ret((scope()->num_parameters() + 1) * kPointerSize);
+
+ int arguments_bytes = (scope()->num_parameters() + 1) * kPointerSize;
+ __ Ret(arguments_bytes, ecx);
#ifdef ENABLE_DEBUGGER_SUPPORT
- // Check that the size of the code used for returning matches what is
- // expected by the debugger.
- ASSERT_EQ(Assembler::kJSReturnSequenceLength,
- masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
+ // Check that the size of the code used for returning is large enough
+ // for the debugger's requirements.
+ ASSERT(Assembler::kJSReturnSequenceLength <=
+ masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
#endif
}
}
@@ -603,7 +615,8 @@
// Emit the write barrier code if the location is in the heap.
if (dst->type() == Slot::CONTEXT) {
- int offset = FixedArray::kHeaderSize + dst->index() * kPointerSize;
+ int offset = Context::SlotOffset(dst->index());
+ ASSERT(!scratch1.is(esi) && !src.is(esi) && !scratch2.is(esi));
__ RecordWrite(scratch1, offset, src, scratch2, kDontSaveFPRegs);
}
}
@@ -659,10 +672,11 @@
// We bypass the general EmitSlotSearch because we know more about
// this specific context.
- // The variable in the decl always resides in the current context.
+ // The variable in the decl always resides in the current function
+ // context.
ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
if (FLAG_debug_code) {
- // Check if we have the correct context pointer.
+ // Check that we're not inside a 'with'.
__ mov(ebx, ContextOperand(esi, Context::FCONTEXT_INDEX));
__ cmp(ebx, Operand(esi));
__ Check(equal, "Unexpected declaration in current context.");
@@ -706,18 +720,25 @@
} else if (prop != NULL) {
if (function != NULL || mode == Variable::CONST) {
// We are declaring a function or constant that rewrites to a
- // property. Use (keyed) IC to set the initial value.
- VisitForStackValue(prop->obj());
+ // property. Use (keyed) IC to set the initial value. We cannot
+ // visit the rewrite because it's shared and we risk recording
+ // duplicate AST IDs for bailouts from optimized code.
+ ASSERT(prop->obj()->AsVariableProxy() != NULL);
+ { AccumulatorValueContext for_object(this);
+ EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
+ }
+
if (function != NULL) {
- VisitForStackValue(prop->key());
+ __ push(eax);
VisitForAccumulatorValue(function);
- __ pop(ecx);
+ __ pop(edx);
} else {
- VisitForAccumulatorValue(prop->key());
- __ mov(ecx, result_register());
- __ mov(result_register(), Factory::the_hole_value());
+ __ mov(edx, eax);
+ __ mov(eax, Factory::the_hole_value());
}
- __ pop(edx);
+ ASSERT(prop->key()->AsLiteral() != NULL &&
+ prop->key()->AsLiteral()->handle()->IsSmi());
+ __ Set(ecx, Immediate(prop->key()->AsLiteral()->handle()));
Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -757,6 +778,8 @@
// Compile all the tests with branches to their bodies.
for (int i = 0; i < clauses->length(); i++) {
CaseClause* clause = clauses->at(i);
+ clause->body_target()->entry_label()->Unuse();
+
// The default is not a test, but remember it as final fall through.
if (clause->is_default()) {
default_clause = clause;
@@ -1115,8 +1138,11 @@
// Check that last extension is NULL.
__ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
__ j(not_equal, slow);
- __ mov(temp, ContextOperand(context, Context::FCONTEXT_INDEX));
- return ContextOperand(temp, slot->index());
+
+ // This function is used only for loads, not stores, so it's safe to
+ // return an esi-based operand (the write barrier cannot be allowed to
+ // destroy the esi register).
+ return ContextOperand(context, slot->index());
}
@@ -1498,7 +1524,9 @@
if (expr->is_compound()) {
if (property->is_arguments_access()) {
VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
- __ push(EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx));
+ MemOperand slot_operand =
+ EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx);
+ __ push(slot_operand);
__ mov(eax, Immediate(property->key()->AsLiteral()->handle()));
} else {
VisitForStackValue(property->obj());
@@ -1509,7 +1537,9 @@
} else {
if (property->is_arguments_access()) {
VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
- __ push(EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx));
+ MemOperand slot_operand =
+ EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx);
+ __ push(slot_operand);
__ push(Immediate(property->key()->AsLiteral()->handle()));
} else {
VisitForStackValue(property->obj());
@@ -1955,10 +1985,20 @@
}
case KEYED_PROPERTY: {
__ push(eax); // Preserve value.
- VisitForStackValue(prop->obj());
- VisitForAccumulatorValue(prop->key());
- __ mov(ecx, eax);
- __ pop(edx);
+ if (prop->is_synthetic()) {
+ ASSERT(prop->obj()->AsVariableProxy() != NULL);
+ ASSERT(prop->key()->AsLiteral() != NULL);
+ { AccumulatorValueContext for_object(this);
+ EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
+ }
+ __ mov(edx, eax);
+ __ Set(ecx, Immediate(prop->key()->AsLiteral()->handle()));
+ } else {
+ VisitForStackValue(prop->obj());
+ VisitForAccumulatorValue(prop->key());
+ __ mov(ecx, eax);
+ __ pop(edx);
+ }
__ pop(eax); // Restore value.
Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -1984,61 +2024,81 @@
// ecx, and the global object on the stack.
__ mov(ecx, var->name());
__ mov(edx, GlobalObjectOperand());
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
- EmitCallIC(ic, RelocInfo::CODE_TARGET);
+ Handle<Code> ic(Builtins::builtin(
+ is_strict() ? Builtins::StoreIC_Initialize_Strict
+ : Builtins::StoreIC_Initialize));
+ EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
- } else if (var->mode() != Variable::CONST || op == Token::INIT_CONST) {
- // Perform the assignment for non-const variables and for initialization
- // of const variables. Const assignments are simply skipped.
- Label done;
+ } else if (op == Token::INIT_CONST) {
+ // Like var declarations, const declarations are hoisted to function
+ // scope. However, unlike var initializers, const initializers are able
+ // to drill a hole to that function context, even from inside a 'with'
+ // context. We thus bypass the normal static scope lookup.
Slot* slot = var->AsSlot();
+ Label skip;
switch (slot->type()) {
case Slot::PARAMETER:
+ // No const parameters.
+ UNREACHABLE();
+ break;
case Slot::LOCAL:
- if (op == Token::INIT_CONST) {
- // Detect const reinitialization by checking for the hole value.
- __ mov(edx, Operand(ebp, SlotOffset(slot)));
- __ cmp(edx, Factory::the_hole_value());
- __ j(not_equal, &done);
- }
+ __ mov(edx, Operand(ebp, SlotOffset(slot)));
+ __ cmp(edx, Factory::the_hole_value());
+ __ j(not_equal, &skip);
+ __ mov(Operand(ebp, SlotOffset(slot)), eax);
+ break;
+ case Slot::CONTEXT: {
+ __ mov(ecx, ContextOperand(esi, Context::FCONTEXT_INDEX));
+ __ mov(edx, ContextOperand(ecx, slot->index()));
+ __ cmp(edx, Factory::the_hole_value());
+ __ j(not_equal, &skip);
+ __ mov(ContextOperand(ecx, slot->index()), eax);
+ int offset = Context::SlotOffset(slot->index());
+ __ mov(edx, eax); // Preserve the stored value in eax.
+ __ RecordWrite(ecx, offset, edx, ebx, kDontSaveFPRegs);
+ break;
+ }
+ case Slot::LOOKUP:
+ __ push(eax);
+ __ push(esi);
+ __ push(Immediate(var->name()));
+ __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
+ break;
+ }
+ __ bind(&skip);
+
+ } else if (var->mode() != Variable::CONST) {
+ // Perform the assignment for non-const variables. Const assignments
+ // are simply skipped.
+ Slot* slot = var->AsSlot();
+ switch (slot->type()) {
+ case Slot::PARAMETER:
+ case Slot::LOCAL:
// Perform the assignment.
__ mov(Operand(ebp, SlotOffset(slot)), eax);
break;
case Slot::CONTEXT: {
MemOperand target = EmitSlotSearch(slot, ecx);
- if (op == Token::INIT_CONST) {
- // Detect const reinitialization by checking for the hole value.
- __ mov(edx, target);
- __ cmp(edx, Factory::the_hole_value());
- __ j(not_equal, &done);
- }
// Perform the assignment and issue the write barrier.
__ mov(target, eax);
// The value of the assignment is in eax. RecordWrite clobbers its
// register arguments.
__ mov(edx, eax);
- int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
+ int offset = Context::SlotOffset(slot->index());
__ RecordWrite(ecx, offset, edx, ebx, kDontSaveFPRegs);
break;
}
case Slot::LOOKUP:
- // Call the runtime for the assignment. The runtime will ignore
- // const reinitialization.
+ // Call the runtime for the assignment.
__ push(eax); // Value.
__ push(esi); // Context.
__ push(Immediate(var->name()));
- if (op == Token::INIT_CONST) {
- // The runtime will ignore const redeclaration.
- __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
- } else {
- __ CallRuntime(Runtime::kStoreContextSlot, 3);
- }
+ __ CallRuntime(Runtime::kStoreContextSlot, 3);
break;
}
- __ bind(&done);
}
}
@@ -2258,7 +2318,9 @@
// Push the receiver of the enclosing function and do runtime call.
__ push(Operand(ebp, (2 + scope()->num_parameters()) * kPointerSize));
- __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 3);
+ // Push the strict mode flag.
+ __ push(Immediate(Smi::FromInt(strict_mode_flag())));
+ __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 4);
// The runtime call returns a pair of values in eax (function) and
// edx (receiver). Touch up the stack with the right values.
@@ -2329,16 +2391,21 @@
// Call to a keyed property.
// For a synthetic property use keyed load IC followed by function call,
// for a regular property use keyed EmitCallIC.
- { PreservePositionScope scope(masm()->positions_recorder());
- VisitForStackValue(prop->obj());
- }
if (prop->is_synthetic()) {
- { PreservePositionScope scope(masm()->positions_recorder());
- VisitForAccumulatorValue(prop->key());
- }
+ // Do not visit the object and key subexpressions (they are shared
+ // by all occurrences of the same rewritten parameter).
+ ASSERT(prop->obj()->AsVariableProxy() != NULL);
+ ASSERT(prop->obj()->AsVariableProxy()->var()->AsSlot() != NULL);
+ Slot* slot = prop->obj()->AsVariableProxy()->var()->AsSlot();
+ MemOperand operand = EmitSlotSearch(slot, edx);
+ __ mov(edx, operand);
+
+ ASSERT(prop->key()->AsLiteral() != NULL);
+ ASSERT(prop->key()->AsLiteral()->handle()->IsSmi());
+ __ mov(eax, prop->key()->AsLiteral()->handle());
+
// Record source code position for IC call.
SetSourcePosition(prop->position());
- __ pop(edx); // We do not need to keep the receiver.
Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -2349,6 +2416,9 @@
__ push(FieldOperand(ecx, GlobalObject::kGlobalReceiverOffset));
EmitCallWithStub(expr);
} else {
+ { PreservePositionScope scope(masm()->positions_recorder());
+ VisitForStackValue(prop->obj());
+ }
EmitKeyedCallWithIC(expr, prop->key(), RelocInfo::CODE_TARGET);
}
}
@@ -3343,39 +3413,37 @@
void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) {
- Label bailout;
- Label done;
+ Label bailout, done, one_char_separator, long_separator,
+ non_trivial_array, not_size_one_array, loop, loop_condition,
+ loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
ASSERT(args->length() == 2);
// We will leave the separator on the stack until the end of the function.
VisitForStackValue(args->at(1));
// Load this to eax (= array)
VisitForAccumulatorValue(args->at(0));
-
// All aliases of the same register have disjoint lifetimes.
Register array = eax;
- Register result_pos = no_reg;
+ Register elements = no_reg; // Will be eax.
- Register index = edi;
+ Register index = edx;
- Register current_string_length = ecx; // Will be ecx when live.
+ Register string_length = ecx;
- Register current_string = edx;
+ Register string = esi;
Register scratch = ebx;
- Register scratch_2 = esi;
- Register new_padding_chars = scratch_2;
+ Register array_length = edi;
+ Register result_pos = no_reg; // Will be edi.
- Operand separator = Operand(esp, 4 * kPointerSize); // Already pushed.
- Operand elements = Operand(esp, 3 * kPointerSize);
- Operand result = Operand(esp, 2 * kPointerSize);
- Operand padding_chars = Operand(esp, 1 * kPointerSize);
- Operand array_length = Operand(esp, 0);
- __ sub(Operand(esp), Immediate(4 * kPointerSize));
-
-
- // Check that eax is a JSArray
+ // Separator operand is already pushed.
+ Operand separator_operand = Operand(esp, 2 * kPointerSize);
+ Operand result_operand = Operand(esp, 1 * kPointerSize);
+ Operand array_length_operand = Operand(esp, 0);
+ __ sub(Operand(esp), Immediate(2 * kPointerSize));
+ __ cld();
+ // Check that the array is a JSArray
__ test(array, Immediate(kSmiTagMask));
__ j(zero, &bailout);
__ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
@@ -3386,140 +3454,226 @@
1 << Map::kHasFastElements);
__ j(zero, &bailout);
- // If the array is empty, return the empty string.
- __ mov(scratch, FieldOperand(array, JSArray::kLengthOffset));
- __ sar(scratch, 1);
- Label non_trivial;
- __ j(not_zero, &non_trivial);
- __ mov(result, Factory::empty_string());
+ // If the array has length zero, return the empty string.
+ __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
+ __ sar(array_length, 1);
+ __ j(not_zero, &non_trivial_array);
+ __ mov(result_operand, Factory::empty_string());
__ jmp(&done);
- __ bind(&non_trivial);
- __ mov(array_length, scratch);
+ // Save the array length.
+ __ bind(&non_trivial_array);
+ __ mov(array_length_operand, array_length);
- __ mov(scratch, FieldOperand(array, JSArray::kElementsOffset));
- __ mov(elements, scratch);
-
+ // Save the FixedArray containing array's elements.
// End of array's live range.
- result_pos = array;
+ elements = array;
+ __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
array = no_reg;
- // Check that the separator is a flat ascii string.
- __ mov(current_string, separator);
- __ test(current_string, Immediate(kSmiTagMask));
+ // Check that all array elements are sequential ASCII strings, and
+ // accumulate the sum of their lengths, as a smi-encoded value.
+ __ Set(index, Immediate(0));
+ __ Set(string_length, Immediate(0));
+ // Loop condition: while (index < length).
+ // Live loop registers: index, array_length, string,
+ // scratch, string_length, elements.
+ __ jmp(&loop_condition);
+ __ bind(&loop);
+ __ cmp(index, Operand(array_length));
+ __ j(greater_equal, &done);
+
+ __ mov(string, FieldOperand(elements, index,
+ times_pointer_size,
+ FixedArray::kHeaderSize));
+ __ test(string, Immediate(kSmiTagMask));
__ j(zero, &bailout);
- __ mov(scratch, FieldOperand(current_string, HeapObject::kMapOffset));
- __ mov_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
+ __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
+ __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
__ and_(scratch, Immediate(
kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
__ cmp(scratch, kStringTag | kAsciiStringTag | kSeqStringTag);
__ j(not_equal, &bailout);
- // If the separator is the empty string, replace it with NULL.
- // The test for NULL is quicker than the empty string test, in a loop.
- __ cmp(FieldOperand(current_string, SeqAsciiString::kLengthOffset),
- Immediate(0));
- Label separator_checked;
- __ j(not_zero, &separator_checked);
- __ mov(separator, Immediate(0));
- __ bind(&separator_checked);
+ __ add(string_length,
+ FieldOperand(string, SeqAsciiString::kLengthOffset));
+ __ j(overflow, &bailout);
+ __ add(Operand(index), Immediate(1));
+ __ bind(&loop_condition);
+ __ cmp(index, Operand(array_length));
+ __ j(less, &loop);
- // Check that elements[0] is a flat ascii string, and copy it in new space.
- __ mov(scratch, elements);
- __ mov(current_string, FieldOperand(scratch, FixedArray::kHeaderSize));
- __ test(current_string, Immediate(kSmiTagMask));
+ // If array_length is 1, return elements[0], a string.
+ __ cmp(array_length, 1);
+ __ j(not_equal, &not_size_one_array);
+ __ mov(scratch, FieldOperand(elements, FixedArray::kHeaderSize));
+ __ mov(result_operand, scratch);
+ __ jmp(&done);
+
+ __ bind(&not_size_one_array);
+
+ // End of array_length live range.
+ result_pos = array_length;
+ array_length = no_reg;
+
+ // Live registers:
+ // string_length: Sum of string lengths, as a smi.
+ // elements: FixedArray of strings.
+
+ // Check that the separator is a flat ASCII string.
+ __ mov(string, separator_operand);
+ __ test(string, Immediate(kSmiTagMask));
__ j(zero, &bailout);
- __ mov(scratch, FieldOperand(current_string, HeapObject::kMapOffset));
- __ mov_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
+ __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
+ __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
__ and_(scratch, Immediate(
kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
__ cmp(scratch, kStringTag | kAsciiStringTag | kSeqStringTag);
__ j(not_equal, &bailout);
- // Allocate space to copy it. Round up the size to the alignment granularity.
- __ mov(current_string_length,
- FieldOperand(current_string, String::kLengthOffset));
- __ shr(current_string_length, 1);
+ // Add (separator length times array_length) - separator length
+ // to string_length.
+ __ mov(scratch, separator_operand);
+ __ mov(scratch, FieldOperand(scratch, SeqAsciiString::kLengthOffset));
+ __ sub(string_length, Operand(scratch)); // May be negative, temporarily.
+ __ imul(scratch, array_length_operand);
+ __ j(overflow, &bailout);
+ __ add(string_length, Operand(scratch));
+ __ j(overflow, &bailout);
+ __ shr(string_length, 1);
// Live registers and stack values:
- // current_string_length: length of elements[0].
+ // string_length
+ // elements
+ __ AllocateAsciiString(result_pos, string_length, scratch,
+ index, string, &bailout);
+ __ mov(result_operand, result_pos);
+ __ lea(result_pos, FieldOperand(result_pos, SeqAsciiString::kHeaderSize));
- // New string result in new space = elements[0]
- __ AllocateAsciiString(result_pos, current_string_length, scratch_2,
- index, no_reg, &bailout);
- __ mov(result, result_pos);
- // Adjust current_string_length to include padding bytes at end of string.
- // Keep track of the number of padding bytes.
- __ mov(new_padding_chars, current_string_length);
- __ add(Operand(current_string_length), Immediate(kObjectAlignmentMask));
- __ and_(Operand(current_string_length), Immediate(~kObjectAlignmentMask));
- __ sub(new_padding_chars, Operand(current_string_length));
- __ neg(new_padding_chars);
- __ mov(padding_chars, new_padding_chars);
+ __ mov(string, separator_operand);
+ __ cmp(FieldOperand(string, SeqAsciiString::kLengthOffset),
+ Immediate(Smi::FromInt(1)));
+ __ j(equal, &one_char_separator);
+ __ j(greater, &long_separator);
- Label copy_loop_1_done;
- Label copy_loop_1;
- __ test(current_string_length, Operand(current_string_length));
- __ j(zero, &copy_loop_1_done);
- __ bind(&copy_loop_1);
- __ sub(Operand(current_string_length), Immediate(kPointerSize));
- __ mov(scratch, FieldOperand(current_string, current_string_length,
- times_1, SeqAsciiString::kHeaderSize));
- __ mov(FieldOperand(result_pos, current_string_length,
- times_1, SeqAsciiString::kHeaderSize),
- scratch);
- __ j(not_zero, &copy_loop_1);
- __ bind(&copy_loop_1_done);
- __ mov(index, Immediate(1));
+ // Empty separator case
+ __ mov(index, Immediate(0));
+ __ jmp(&loop_1_condition);
// Loop condition: while (index < length).
- Label loop;
- __ bind(&loop);
- __ cmp(index, array_length);
- __ j(greater_equal, &done);
+ __ bind(&loop_1);
+ // Each iteration of the loop concatenates one string to the result.
+ // Live values in registers:
+ // index: which element of the elements array we are adding to the result.
+ // result_pos: the position to which we are currently copying characters.
+ // elements: the FixedArray of strings we are joining.
- // If the separator is the empty string, signalled by NULL, skip it.
- Label separator_done;
- __ mov(current_string, separator);
- __ test(current_string, Operand(current_string));
- __ j(zero, &separator_done);
+ // Get string = array[index].
+ __ mov(string, FieldOperand(elements, index,
+ times_pointer_size,
+ FixedArray::kHeaderSize));
+ __ mov(string_length,
+ FieldOperand(string, String::kLengthOffset));
+ __ shr(string_length, 1);
+ __ lea(string,
+ FieldOperand(string, SeqAsciiString::kHeaderSize));
+ __ CopyBytes(string, result_pos, string_length, scratch);
+ __ add(Operand(index), Immediate(1));
+ __ bind(&loop_1_condition);
+ __ cmp(index, array_length_operand);
+ __ j(less, &loop_1); // End while (index < length).
+ __ jmp(&done);
- // Append separator to result. It is known to be a flat ascii string.
- __ AppendStringToTopOfNewSpace(current_string, current_string_length,
- result_pos, scratch, scratch_2, result,
- padding_chars, &bailout);
- __ bind(&separator_done);
- // Add next element of array to the end of the result.
- // Get current_string = array[index].
- __ mov(scratch, elements);
- __ mov(current_string, FieldOperand(scratch, index,
- times_pointer_size,
- FixedArray::kHeaderSize));
- // If current != flat ascii string drop result, return undefined.
- __ test(current_string, Immediate(kSmiTagMask));
- __ j(zero, &bailout);
- __ mov(scratch, FieldOperand(current_string, HeapObject::kMapOffset));
- __ mov_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
- __ and_(scratch, Immediate(
- kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
- __ cmp(scratch, kStringTag | kAsciiStringTag | kSeqStringTag);
- __ j(not_equal, &bailout);
- // Append current to the result.
- __ AppendStringToTopOfNewSpace(current_string, current_string_length,
- result_pos, scratch, scratch_2, result,
- padding_chars, &bailout);
+ // One-character separator case
+ __ bind(&one_char_separator);
+ // Replace separator with its ascii character value.
+ __ mov_b(scratch, FieldOperand(string, SeqAsciiString::kHeaderSize));
+ __ mov_b(separator_operand, scratch);
+
+ __ Set(index, Immediate(0));
+ // Jump into the loop after the code that copies the separator, so the first
+ // element is not preceded by a separator
+ __ jmp(&loop_2_entry);
+ // Loop condition: while (index < length).
+ __ bind(&loop_2);
+ // Each iteration of the loop concatenates one string to the result.
+ // Live values in registers:
+ // index: which element of the elements array we are adding to the result.
+ // result_pos: the position to which we are currently copying characters.
+
+ // Copy the separator character to the result.
+ __ mov_b(scratch, separator_operand);
+ __ mov_b(Operand(result_pos, 0), scratch);
+ __ inc(result_pos);
+
+ __ bind(&loop_2_entry);
+ // Get string = array[index].
+ __ mov(string, FieldOperand(elements, index,
+ times_pointer_size,
+ FixedArray::kHeaderSize));
+ __ mov(string_length,
+ FieldOperand(string, String::kLengthOffset));
+ __ shr(string_length, 1);
+ __ lea(string,
+ FieldOperand(string, SeqAsciiString::kHeaderSize));
+ __ CopyBytes(string, result_pos, string_length, scratch);
__ add(Operand(index), Immediate(1));
- __ jmp(&loop); // End while (index < length).
+ __ cmp(index, array_length_operand);
+ __ j(less, &loop_2); // End while (index < length).
+ __ jmp(&done);
+
+
+ // Long separator case (separator is more than one character).
+ __ bind(&long_separator);
+
+ __ Set(index, Immediate(0));
+ // Jump into the loop after the code that copies the separator, so the first
+ // element is not preceded by a separator
+ __ jmp(&loop_3_entry);
+ // Loop condition: while (index < length).
+ __ bind(&loop_3);
+ // Each iteration of the loop concatenates one string to the result.
+ // Live values in registers:
+ // index: which element of the elements array we are adding to the result.
+ // result_pos: the position to which we are currently copying characters.
+
+ // Copy the separator to the result.
+ __ mov(string, separator_operand);
+ __ mov(string_length,
+ FieldOperand(string, String::kLengthOffset));
+ __ shr(string_length, 1);
+ __ lea(string,
+ FieldOperand(string, SeqAsciiString::kHeaderSize));
+ __ CopyBytes(string, result_pos, string_length, scratch);
+
+ __ bind(&loop_3_entry);
+ // Get string = array[index].
+ __ mov(string, FieldOperand(elements, index,
+ times_pointer_size,
+ FixedArray::kHeaderSize));
+ __ mov(string_length,
+ FieldOperand(string, String::kLengthOffset));
+ __ shr(string_length, 1);
+ __ lea(string,
+ FieldOperand(string, SeqAsciiString::kHeaderSize));
+ __ CopyBytes(string, result_pos, string_length, scratch);
+ __ add(Operand(index), Immediate(1));
+
+ __ cmp(index, array_length_operand);
+ __ j(less, &loop_3); // End while (index < length).
+ __ jmp(&done);
+
+
__ bind(&bailout);
- __ mov(result, Factory::undefined_value());
+ __ mov(result_operand, Factory::undefined_value());
__ bind(&done);
- __ mov(eax, result);
+ __ mov(eax, result_operand);
// Drop temp values from the stack, and restore context register.
- __ add(Operand(esp), Immediate(5 * kPointerSize));
+ __ add(Operand(esp), Immediate(3 * kPointerSize));
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
context()->Plug(eax);
@@ -3571,38 +3725,47 @@
Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
Property* prop = expr->expression()->AsProperty();
Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
- if (prop == NULL && var == NULL) {
- // Result of deleting non-property, non-variable reference is true.
- // The subexpression may have side effects.
- VisitForEffect(expr->expression());
- context()->Plug(true);
- } else if (var != NULL &&
- !var->is_global() &&
- var->AsSlot() != NULL &&
- var->AsSlot()->type() != Slot::LOOKUP) {
- // Result of deleting non-global, non-dynamic variables is false.
- // The subexpression does not have side effects.
- context()->Plug(false);
- } else {
- // Property or variable reference. Call the delete builtin with
- // object and property name as arguments.
- if (prop != NULL) {
+
+ if (prop != NULL) {
+ if (prop->is_synthetic()) {
+ // Result of deleting parameters is false, even when they rewrite
+ // to accesses on the arguments object.
+ context()->Plug(false);
+ } else {
VisitForStackValue(prop->obj());
VisitForStackValue(prop->key());
- } else if (var->is_global()) {
+ __ push(Immediate(Smi::FromInt(strict_mode_flag())));
+ __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
+ context()->Plug(eax);
+ }
+ } else if (var != NULL) {
+ // Delete of an unqualified identifier is disallowed in strict mode
+ // so this code can only be reached in non-strict mode.
+ ASSERT(strict_mode_flag() == kNonStrictMode);
+ if (var->is_global()) {
__ push(GlobalObjectOperand());
__ push(Immediate(var->name()));
+ __ push(Immediate(Smi::FromInt(kNonStrictMode)));
+ __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
+ context()->Plug(eax);
+ } else if (var->AsSlot() != NULL &&
+ var->AsSlot()->type() != Slot::LOOKUP) {
+ // Result of deleting non-global, non-dynamic variables is false.
+ // The subexpression does not have side effects.
+ context()->Plug(false);
} else {
- // Non-global variable. Call the runtime to look up the context
- // where the variable was introduced.
+ // Non-global variable. Call the runtime to try to delete from the
+ // context where the variable was introduced.
__ push(context_register());
__ push(Immediate(var->name()));
- __ CallRuntime(Runtime::kLookupContext, 2);
- __ push(eax);
- __ push(Immediate(var->name()));
+ __ CallRuntime(Runtime::kDeleteContextSlot, 2);
+ context()->Plug(eax);
}
- __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
- context()->Plug(eax);
+ } else {
+ // Result of deleting non-property, non-variable reference is true.
+ // The subexpression may have side effects.
+ VisitForEffect(expr->expression());
+ context()->Plug(true);
}
break;
}
@@ -3646,8 +3809,8 @@
Label no_conversion;
__ test(result_register(), Immediate(kSmiTagMask));
__ j(zero, &no_conversion);
- __ push(result_register());
- __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
+ ToNumberStub convert_stub;
+ __ CallStub(&convert_stub);
__ bind(&no_conversion);
context()->Plug(result_register());
break;
@@ -3743,7 +3906,9 @@
} else {
if (prop->is_arguments_access()) {
VariableProxy* obj_proxy = prop->obj()->AsVariableProxy();
- __ push(EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx));
+ MemOperand slot_operand =
+ EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx);
+ __ push(slot_operand);
__ mov(eax, Immediate(prop->key()->AsLiteral()->handle()));
} else {
VisitForStackValue(prop->obj());
@@ -3765,8 +3930,8 @@
__ test(eax, Immediate(kSmiTagMask));
__ j(zero, &no_conversion);
}
- __ push(eax);
- __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
+ ToNumberStub convert_stub;
+ __ CallStub(&convert_stub);
__ bind(&no_conversion);
// Save result for postfix expressions.
@@ -3819,8 +3984,7 @@
// Call stub for +1/-1.
__ mov(edx, eax);
__ mov(eax, Immediate(Smi::FromInt(1)));
- TypeRecordingBinaryOpStub stub(expr->binary_op(),
- NO_OVERWRITE);
+ TypeRecordingBinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
EmitCallIC(stub.GetCode(), &patch_site);
__ bind(&done);
@@ -4046,7 +4210,6 @@
case Token::INSTANCEOF: {
VisitForStackValue(expr->right());
- __ IncrementCounter(&Counters::instance_of_full, 1);
InstanceofStub stub(InstanceofStub::kNoFlags);
__ CallStub(&stub);
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
« no previous file with comments | « src/ia32/disasm-ia32.cc ('k') | src/ia32/ic-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698