Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(59)

Side by Side Diff: src/x64/fast-codegen-x64.cc

Issue 660095: Merge revision 3813 to 3930 from bleeding_edge to partial snapshots branch. (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/partial_snapshots/
Patch Set: '' Created 10 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/disasm-x64.cc ('k') | src/x64/full-codegen-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 17 matching lines...) Expand all
28 #include "v8.h" 28 #include "v8.h"
29 29
30 #include "codegen-inl.h" 30 #include "codegen-inl.h"
31 #include "fast-codegen.h" 31 #include "fast-codegen.h"
32 32
33 namespace v8 { 33 namespace v8 {
34 namespace internal { 34 namespace internal {
35 35
36 #define __ ACCESS_MASM(masm()) 36 #define __ ACCESS_MASM(masm())
37 37
38 void FastCodeGenerator::EmitLoadReceiver(Register reg) { 38 Register FastCodeGenerator::accumulator0() { return rax; }
39 Register FastCodeGenerator::accumulator1() { return rdx; }
40 Register FastCodeGenerator::scratch0() { return rcx; }
41 Register FastCodeGenerator::scratch1() { return rdi; }
42 Register FastCodeGenerator::receiver_reg() { return rbx; }
43 Register FastCodeGenerator::context_reg() { return rsi; }
44
45
46 void FastCodeGenerator::EmitLoadReceiver() {
39 // Offset 2 is due to return address and saved frame pointer. 47 // Offset 2 is due to return address and saved frame pointer.
40 int index = 2 + scope()->num_parameters(); 48 int index = 2 + scope()->num_parameters();
41 __ movq(reg, Operand(rbp, index * kPointerSize)); 49 __ movq(receiver_reg(), Operand(rbp, index * kPointerSize));
42 }
43
44
45 void FastCodeGenerator::EmitReceiverMapCheck() {
46 Comment cmnt(masm(), ";; MapCheck(this)");
47 if (FLAG_print_ir) {
48 PrintF("MapCheck(this)\n");
49 }
50
51 ASSERT(info()->has_receiver() && info()->receiver()->IsHeapObject());
52 Handle<HeapObject> object = Handle<HeapObject>::cast(info()->receiver());
53 Handle<Map> map(object->map());
54
55 EmitLoadReceiver(rdx);
56 __ CheckMap(rdx, map, bailout(), false);
57 }
58
59
60 void FastCodeGenerator::EmitGlobalMapCheck() {
61 Comment cmnt(masm(), ";; GlobalMapCheck");
62 if (FLAG_print_ir) {
63 PrintF(";; GlobalMapCheck()");
64 }
65
66 ASSERT(info()->has_global_object());
67 Handle<Map> map(info()->global_object()->map());
68
69 __ movq(rbx, CodeGenerator::GlobalObject());
70 __ CheckMap(rbx, map, bailout(), true);
71 } 50 }
72 51
73 52
74 void FastCodeGenerator::EmitGlobalVariableLoad(Handle<Object> cell) { 53 void FastCodeGenerator::EmitGlobalVariableLoad(Handle<Object> cell) {
54 ASSERT(!destination().is(no_reg));
75 ASSERT(cell->IsJSGlobalPropertyCell()); 55 ASSERT(cell->IsJSGlobalPropertyCell());
76 __ Move(rax, cell); 56
77 __ movq(rax, FieldOperand(rax, JSGlobalPropertyCell::kValueOffset)); 57 __ Move(destination(), cell);
58 __ movq(destination(),
59 FieldOperand(destination(), JSGlobalPropertyCell::kValueOffset));
78 if (FLAG_debug_code) { 60 if (FLAG_debug_code) {
79 __ Cmp(rax, Factory::the_hole_value()); 61 __ Cmp(destination(), Factory::the_hole_value());
80 __ Check(not_equal, "DontDelete cells can't contain the hole"); 62 __ Check(not_equal, "DontDelete cells can't contain the hole");
81 } 63 }
64
65 // The loaded value is not known to be a smi.
66 clear_as_smi(destination());
82 } 67 }
83 68
84 69
85 void FastCodeGenerator::EmitThisPropertyStore(Handle<String> name) { 70 void FastCodeGenerator::EmitThisPropertyStore(Handle<String> name) {
86 LookupResult lookup; 71 LookupResult lookup;
87 info()->receiver()->Lookup(*name, &lookup); 72 info()->receiver()->Lookup(*name, &lookup);
88 73
74 ASSERT(lookup.holder() == *info()->receiver());
75 ASSERT(lookup.type() == FIELD);
76 Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map());
77 int index = lookup.GetFieldIndex() - map->inobject_properties();
78 int offset = index * kPointerSize;
79
80 // We will emit the write barrier unless the stored value is statically
81 // known to be a smi.
82 bool needs_write_barrier = !is_smi(accumulator0());
83
84 // Perform the store. Negative offsets are inobject properties.
85 if (offset < 0) {
86 offset += map->instance_size();
87 __ movq(FieldOperand(receiver_reg(), offset), accumulator0());
88 if (needs_write_barrier) {
89 // Preserve receiver from write barrier.
90 __ movq(scratch0(), receiver_reg());
91 }
92 } else {
93 offset += FixedArray::kHeaderSize;
94 __ movq(scratch0(),
95 FieldOperand(receiver_reg(), JSObject::kPropertiesOffset));
96 __ movq(FieldOperand(scratch0(), offset), accumulator0());
97 }
98
99 if (needs_write_barrier) {
100 if (destination().is(no_reg)) {
101 // After RecordWrite accumulator0 is only accidently a smi, but it is
102 // already marked as not known to be one.
103 __ RecordWrite(scratch0(), offset, accumulator0(), scratch1());
104 } else {
105 // Copy the value to the other accumulator to preserve a copy from the
106 // write barrier. One of the accumulators is available as a scratch
107 // register. Neither is a smi.
108 __ movq(accumulator1(), accumulator0());
109 clear_as_smi(accumulator1());
110 Register value_scratch = other_accumulator(destination());
111 __ RecordWrite(scratch0(), offset, value_scratch, scratch1());
112 }
113 } else if (destination().is(accumulator1())) {
114 __ movq(accumulator1(), accumulator0());
115 // Is a smi because we do not need the write barrier.
116 set_as_smi(accumulator1());
117 }
118 }
119
120
121 void FastCodeGenerator::EmitThisPropertyLoad(Handle<String> name) {
122 ASSERT(!destination().is(no_reg));
123 LookupResult lookup;
124 info()->receiver()->Lookup(*name, &lookup);
125
89 ASSERT(lookup.holder() == *info()->receiver()); 126 ASSERT(lookup.holder() == *info()->receiver());
90 ASSERT(lookup.type() == FIELD); 127 ASSERT(lookup.type() == FIELD);
91 Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map()); 128 Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map());
92 int index = lookup.GetFieldIndex() - map->inobject_properties(); 129 int index = lookup.GetFieldIndex() - map->inobject_properties();
93 int offset = index * kPointerSize; 130 int offset = index * kPointerSize;
94 131
95 // Negative offsets are inobject properties. 132 // Perform the load. Negative offsets are inobject properties.
96 if (offset < 0) { 133 if (offset < 0) {
97 offset += map->instance_size(); 134 offset += map->instance_size();
98 __ movq(rcx, rdx); // Copy receiver for write barrier. 135 __ movq(destination(), FieldOperand(receiver_reg(), offset));
99 } else { 136 } else {
100 offset += FixedArray::kHeaderSize; 137 offset += FixedArray::kHeaderSize;
101 __ movq(rcx, FieldOperand(rdx, JSObject::kPropertiesOffset)); 138 __ movq(scratch0(),
139 FieldOperand(receiver_reg(), JSObject::kPropertiesOffset));
140 __ movq(destination(), FieldOperand(scratch0(), offset));
102 } 141 }
103 // Perform the store. 142
104 __ movq(FieldOperand(rcx, offset), rax); 143 // The loaded value is not known to be a smi.
105 // Preserve value from write barrier in case it's needed. 144 clear_as_smi(destination());
106 __ movq(rbx, rax); 145 }
107 __ RecordWrite(rcx, offset, rbx, rdi); 146
147
148 void FastCodeGenerator::EmitBitOr() {
149 if (is_smi(accumulator0()) && is_smi(accumulator1())) {
150 // If both operands are known to be a smi then there is no need to check
151 // the operands or result.
152 if (destination().is(no_reg)) {
153 __ or_(accumulator1(), accumulator0());
154 } else {
155 // Leave the result in the destination register. Bitwise or is
156 // commutative.
157 __ or_(destination(), other_accumulator(destination()));
158 }
159 } else {
160 // Left is in accumulator1, right in accumulator0.
161 if (destination().is(accumulator0())) {
162 __ movq(scratch0(), accumulator0());
163 __ or_(destination(), accumulator1()); // Or is commutative.
164 Label* bailout =
165 info()->AddBailout(accumulator1(), scratch0()); // Left, right.
166 __ JumpIfNotSmi(destination(), bailout);
167 } else if (destination().is(accumulator1())) {
168 __ movq(scratch0(), accumulator1());
169 __ or_(destination(), accumulator0());
170 Label* bailout = info()->AddBailout(scratch0(), accumulator0());
171 __ JumpIfNotSmi(destination(), bailout);
172 } else {
173 ASSERT(destination().is(no_reg));
174 __ movq(scratch0(), accumulator1());
175 __ or_(scratch0(), accumulator0());
176 Label* bailout = info()->AddBailout(accumulator1(), accumulator0());
177 __ JumpIfNotSmi(scratch0(), bailout);
178 }
179 }
180
181 // If we didn't bailout, the result (in fact, both inputs too) is known to
182 // be a smi.
183 set_as_smi(accumulator0());
184 set_as_smi(accumulator1());
108 } 185 }
109 186
110 187
111 void FastCodeGenerator::Generate(CompilationInfo* compilation_info) { 188 void FastCodeGenerator::Generate(CompilationInfo* compilation_info) {
112 ASSERT(info_ == NULL); 189 ASSERT(info_ == NULL);
113 info_ = compilation_info; 190 info_ = compilation_info;
114 191
115 // Save the caller's frame pointer and set up our own. 192 // Save the caller's frame pointer and set up our own.
116 Comment prologue_cmnt(masm(), ";; Prologue"); 193 Comment prologue_cmnt(masm(), ";; Prologue");
117 __ push(rbp); 194 __ push(rbp);
118 __ movq(rbp, rsp); 195 __ movq(rbp, rsp);
119 __ push(rsi); // Context. 196 __ push(rsi); // Context.
120 __ push(rdi); // Closure. 197 __ push(rdi); // Closure.
121 // Note that we keep a live register reference to esi (context) at this 198 // Note that we keep a live register reference to esi (context) at this
122 // point. 199 // point.
123 200
124 // Receiver (this) is allocated to rdx if there are this properties. 201 Label* bailout_to_beginning = info()->AddBailout();
125 if (info()->has_this_properties()) EmitReceiverMapCheck(); 202 // Receiver (this) is allocated to a fixed register.
203 if (info()->has_this_properties()) {
204 Comment cmnt(masm(), ";; MapCheck(this)");
205 if (FLAG_print_ir) {
206 PrintF("MapCheck(this)\n");
207 }
208 ASSERT(info()->has_receiver() && info()->receiver()->IsHeapObject());
209 Handle<HeapObject> object = Handle<HeapObject>::cast(info()->receiver());
210 Handle<Map> map(object->map());
211 EmitLoadReceiver();
212 __ CheckMap(receiver_reg(), map, bailout_to_beginning, false);
213 }
126 214
127 // If there is a global variable access check if the global object 215 // If there is a global variable access check if the global object is the
128 // is the same as at lazy-compilation time. 216 // same as at lazy-compilation time.
129 if (info()->has_globals()) EmitGlobalMapCheck(); 217 if (info()->has_globals()) {
218 Comment cmnt(masm(), ";; MapCheck(GLOBAL)");
219 if (FLAG_print_ir) {
220 PrintF("MapCheck(GLOBAL)\n");
221 }
222 ASSERT(info()->has_global_object());
223 Handle<Map> map(info()->global_object()->map());
224 __ movq(scratch0(), CodeGenerator::GlobalObject());
225 __ CheckMap(scratch0(), map, bailout_to_beginning, true);
226 }
130 227
131 VisitStatements(info()->function()->body()); 228 VisitStatements(info()->function()->body());
132 229
133 Comment return_cmnt(masm(), ";; Return(<undefined>)"); 230 Comment return_cmnt(masm(), ";; Return(<undefined>)");
231 if (FLAG_print_ir) {
232 PrintF("Return(<undefined>)\n");
233 }
134 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); 234 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
135
136 Comment epilogue_cmnt(masm(), ";; Epilogue");
137 __ movq(rsp, rbp); 235 __ movq(rsp, rbp);
138 __ pop(rbp); 236 __ pop(rbp);
139 __ ret((scope()->num_parameters() + 1) * kPointerSize); 237 __ ret((scope()->num_parameters() + 1) * kPointerSize);
140
141 __ bind(&bailout_);
142 } 238 }
143 239
144 240
145 #undef __ 241 #undef __
146 242
147 243
148 } } // namespace v8::internal 244 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/x64/disasm-x64.cc ('k') | src/x64/full-codegen-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698