Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(694)

Unified Diff: src/x64/fast-codegen-x64.cc

Issue 660095: Merge revision 3813 to 3930 from bleeding_edge to partial snapshots branch. (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/partial_snapshots/
Patch Set: '' Created 10 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « src/x64/disasm-x64.cc ('k') | src/x64/full-codegen-x64.cc » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: src/x64/fast-codegen-x64.cc
===================================================================
--- src/x64/fast-codegen-x64.cc (revision 3935)
+++ src/x64/fast-codegen-x64.cc (working copy)
@@ -35,54 +35,91 @@
#define __ ACCESS_MASM(masm())
-void FastCodeGenerator::EmitLoadReceiver(Register reg) {
+Register FastCodeGenerator::accumulator0() { return rax; }
+Register FastCodeGenerator::accumulator1() { return rdx; }
+Register FastCodeGenerator::scratch0() { return rcx; }
+Register FastCodeGenerator::scratch1() { return rdi; }
+Register FastCodeGenerator::receiver_reg() { return rbx; }
+Register FastCodeGenerator::context_reg() { return rsi; }
+
+
+void FastCodeGenerator::EmitLoadReceiver() {
// Offset 2 is due to return address and saved frame pointer.
int index = 2 + scope()->num_parameters();
- __ movq(reg, Operand(rbp, index * kPointerSize));
+ __ movq(receiver_reg(), Operand(rbp, index * kPointerSize));
}
-void FastCodeGenerator::EmitReceiverMapCheck() {
- Comment cmnt(masm(), ";; MapCheck(this)");
- if (FLAG_print_ir) {
- PrintF("MapCheck(this)\n");
+void FastCodeGenerator::EmitGlobalVariableLoad(Handle<Object> cell) {
+ ASSERT(!destination().is(no_reg));
+ ASSERT(cell->IsJSGlobalPropertyCell());
+
+ __ Move(destination(), cell);
+ __ movq(destination(),
+ FieldOperand(destination(), JSGlobalPropertyCell::kValueOffset));
+ if (FLAG_debug_code) {
+ __ Cmp(destination(), Factory::the_hole_value());
+ __ Check(not_equal, "DontDelete cells can't contain the hole");
}
- ASSERT(info()->has_receiver() && info()->receiver()->IsHeapObject());
- Handle<HeapObject> object = Handle<HeapObject>::cast(info()->receiver());
- Handle<Map> map(object->map());
-
- EmitLoadReceiver(rdx);
- __ CheckMap(rdx, map, bailout(), false);
+ // The loaded value is not known to be a smi.
+ clear_as_smi(destination());
}
-void FastCodeGenerator::EmitGlobalMapCheck() {
- Comment cmnt(masm(), ";; GlobalMapCheck");
- if (FLAG_print_ir) {
- PrintF(";; GlobalMapCheck()");
- }
+void FastCodeGenerator::EmitThisPropertyStore(Handle<String> name) {
+ LookupResult lookup;
+ info()->receiver()->Lookup(*name, &lookup);
- ASSERT(info()->has_global_object());
- Handle<Map> map(info()->global_object()->map());
+ ASSERT(lookup.holder() == *info()->receiver());
+ ASSERT(lookup.type() == FIELD);
+ Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map());
+ int index = lookup.GetFieldIndex() - map->inobject_properties();
+ int offset = index * kPointerSize;
- __ movq(rbx, CodeGenerator::GlobalObject());
- __ CheckMap(rbx, map, bailout(), true);
-}
+ // We will emit the write barrier unless the stored value is statically
+ // known to be a smi.
+ bool needs_write_barrier = !is_smi(accumulator0());
+ // Perform the store. Negative offsets are inobject properties.
+ if (offset < 0) {
+ offset += map->instance_size();
+ __ movq(FieldOperand(receiver_reg(), offset), accumulator0());
+ if (needs_write_barrier) {
+ // Preserve receiver from write barrier.
+ __ movq(scratch0(), receiver_reg());
+ }
+ } else {
+ offset += FixedArray::kHeaderSize;
+ __ movq(scratch0(),
+ FieldOperand(receiver_reg(), JSObject::kPropertiesOffset));
+ __ movq(FieldOperand(scratch0(), offset), accumulator0());
+ }
-void FastCodeGenerator::EmitGlobalVariableLoad(Handle<Object> cell) {
- ASSERT(cell->IsJSGlobalPropertyCell());
- __ Move(rax, cell);
- __ movq(rax, FieldOperand(rax, JSGlobalPropertyCell::kValueOffset));
- if (FLAG_debug_code) {
- __ Cmp(rax, Factory::the_hole_value());
- __ Check(not_equal, "DontDelete cells can't contain the hole");
+ if (needs_write_barrier) {
+ if (destination().is(no_reg)) {
+ // After RecordWrite accumulator0 is only accidently a smi, but it is
+ // already marked as not known to be one.
+ __ RecordWrite(scratch0(), offset, accumulator0(), scratch1());
+ } else {
+ // Copy the value to the other accumulator to preserve a copy from the
+ // write barrier. One of the accumulators is available as a scratch
+ // register. Neither is a smi.
+ __ movq(accumulator1(), accumulator0());
+ clear_as_smi(accumulator1());
+ Register value_scratch = other_accumulator(destination());
+ __ RecordWrite(scratch0(), offset, value_scratch, scratch1());
+ }
+ } else if (destination().is(accumulator1())) {
+ __ movq(accumulator1(), accumulator0());
+ // Is a smi because we do not need the write barrier.
+ set_as_smi(accumulator1());
}
}
-void FastCodeGenerator::EmitThisPropertyStore(Handle<String> name) {
+void FastCodeGenerator::EmitThisPropertyLoad(Handle<String> name) {
+ ASSERT(!destination().is(no_reg));
LookupResult lookup;
info()->receiver()->Lookup(*name, &lookup);
@@ -92,22 +129,62 @@
int index = lookup.GetFieldIndex() - map->inobject_properties();
int offset = index * kPointerSize;
- // Negative offsets are inobject properties.
+ // Perform the load. Negative offsets are inobject properties.
if (offset < 0) {
offset += map->instance_size();
- __ movq(rcx, rdx); // Copy receiver for write barrier.
+ __ movq(destination(), FieldOperand(receiver_reg(), offset));
} else {
offset += FixedArray::kHeaderSize;
- __ movq(rcx, FieldOperand(rdx, JSObject::kPropertiesOffset));
+ __ movq(scratch0(),
+ FieldOperand(receiver_reg(), JSObject::kPropertiesOffset));
+ __ movq(destination(), FieldOperand(scratch0(), offset));
}
- // Perform the store.
- __ movq(FieldOperand(rcx, offset), rax);
- // Preserve value from write barrier in case it's needed.
- __ movq(rbx, rax);
- __ RecordWrite(rcx, offset, rbx, rdi);
+
+ // The loaded value is not known to be a smi.
+ clear_as_smi(destination());
}
+void FastCodeGenerator::EmitBitOr() {
+ if (is_smi(accumulator0()) && is_smi(accumulator1())) {
+ // If both operands are known to be a smi then there is no need to check
+ // the operands or result.
+ if (destination().is(no_reg)) {
+ __ or_(accumulator1(), accumulator0());
+ } else {
+ // Leave the result in the destination register. Bitwise or is
+ // commutative.
+ __ or_(destination(), other_accumulator(destination()));
+ }
+ } else {
+ // Left is in accumulator1, right in accumulator0.
+ if (destination().is(accumulator0())) {
+ __ movq(scratch0(), accumulator0());
+ __ or_(destination(), accumulator1()); // Or is commutative.
+ Label* bailout =
+ info()->AddBailout(accumulator1(), scratch0()); // Left, right.
+ __ JumpIfNotSmi(destination(), bailout);
+ } else if (destination().is(accumulator1())) {
+ __ movq(scratch0(), accumulator1());
+ __ or_(destination(), accumulator0());
+ Label* bailout = info()->AddBailout(scratch0(), accumulator0());
+ __ JumpIfNotSmi(destination(), bailout);
+ } else {
+ ASSERT(destination().is(no_reg));
+ __ movq(scratch0(), accumulator1());
+ __ or_(scratch0(), accumulator0());
+ Label* bailout = info()->AddBailout(accumulator1(), accumulator0());
+ __ JumpIfNotSmi(scratch0(), bailout);
+ }
+ }
+
+ // If we didn't bailout, the result (in fact, both inputs too) is known to
+ // be a smi.
+ set_as_smi(accumulator0());
+ set_as_smi(accumulator1());
+}
+
+
void FastCodeGenerator::Generate(CompilationInfo* compilation_info) {
ASSERT(info_ == NULL);
info_ = compilation_info;
@@ -121,24 +198,43 @@
// Note that we keep a live register reference to esi (context) at this
// point.
- // Receiver (this) is allocated to rdx if there are this properties.
- if (info()->has_this_properties()) EmitReceiverMapCheck();
+ Label* bailout_to_beginning = info()->AddBailout();
+ // Receiver (this) is allocated to a fixed register.
+ if (info()->has_this_properties()) {
+ Comment cmnt(masm(), ";; MapCheck(this)");
+ if (FLAG_print_ir) {
+ PrintF("MapCheck(this)\n");
+ }
+ ASSERT(info()->has_receiver() && info()->receiver()->IsHeapObject());
+ Handle<HeapObject> object = Handle<HeapObject>::cast(info()->receiver());
+ Handle<Map> map(object->map());
+ EmitLoadReceiver();
+ __ CheckMap(receiver_reg(), map, bailout_to_beginning, false);
+ }
- // If there is a global variable access check if the global object
- // is the same as at lazy-compilation time.
- if (info()->has_globals()) EmitGlobalMapCheck();
+ // If there is a global variable access check if the global object is the
+ // same as at lazy-compilation time.
+ if (info()->has_globals()) {
+ Comment cmnt(masm(), ";; MapCheck(GLOBAL)");
+ if (FLAG_print_ir) {
+ PrintF("MapCheck(GLOBAL)\n");
+ }
+ ASSERT(info()->has_global_object());
+ Handle<Map> map(info()->global_object()->map());
+ __ movq(scratch0(), CodeGenerator::GlobalObject());
+ __ CheckMap(scratch0(), map, bailout_to_beginning, true);
+ }
VisitStatements(info()->function()->body());
Comment return_cmnt(masm(), ";; Return(<undefined>)");
+ if (FLAG_print_ir) {
+ PrintF("Return(<undefined>)\n");
+ }
__ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
-
- Comment epilogue_cmnt(masm(), ";; Epilogue");
__ movq(rsp, rbp);
__ pop(rbp);
__ ret((scope()->num_parameters() + 1) * kPointerSize);
-
- __ bind(&bailout_);
}
« no previous file with comments | « src/x64/disasm-x64.cc ('k') | src/x64/full-codegen-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698