Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(5)

Side by Side Diff: src/code-stub-assembler.cc

Issue 1875583003: Separate CodeAssembler and CodeStubAssembler (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fix gn build Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/code-stub-assembler.h"
6 #include "src/code-factory.h"
7
8 namespace v8 {
9 namespace internal {
10
11 using compiler::Node;
12
13 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone,
14 const CallInterfaceDescriptor& descriptor,
15 Code::Flags flags, const char* name,
16 size_t result_size)
17 : compiler::CodeAssembler(isolate, zone, descriptor, flags, name,
18 result_size) {}
19
20 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone,
21 int parameter_count, Code::Flags flags,
22 const char* name)
23 : compiler::CodeAssembler(isolate, zone, parameter_count, flags, name) {}
24
25 Node* CodeStubAssembler::SmiToWord32(Node* value) {
26 Node* result = WordSar(value, SmiShiftBitsConstant());
27 if (Is64()) {
28 result = TruncateInt64ToInt32(result);
29 }
30 return result;
31 }
32
33 Node* CodeStubAssembler::SmiToFloat64(Node* value) {
34 return ChangeInt32ToFloat64(SmiUntag(value));
35 }
36
37 Node* CodeStubAssembler::SmiAdd(Node* a, Node* b) { return IntPtrAdd(a, b); }
38
39 Node* CodeStubAssembler::SmiAddWithOverflow(Node* a, Node* b) {
40 return IntPtrAddWithOverflow(a, b);
41 }
42
43 Node* CodeStubAssembler::SmiSub(Node* a, Node* b) { return IntPtrSub(a, b); }
44
45 Node* CodeStubAssembler::SmiSubWithOverflow(Node* a, Node* b) {
46 return IntPtrSubWithOverflow(a, b);
47 }
48
49 Node* CodeStubAssembler::SmiMin(Node* a, Node* b) {
50 // TODO(bmeurer): Consider using Select once available.
51 Variable min(this, MachineRepresentation::kTagged);
52 Label if_a(this), if_b(this), join(this);
53 BranchIfSmiLessThan(a, b, &if_a, &if_b);
54 Bind(&if_a);
55 min.Bind(a);
56 Goto(&join);
57 Bind(&if_b);
58 min.Bind(b);
59 Goto(&join);
60 Bind(&join);
61 return min.value();
62 }
63
64 Node* CodeStubAssembler::SmiEqual(Node* a, Node* b) { return WordEqual(a, b); }
65
66 Node* CodeStubAssembler::SmiLessThan(Node* a, Node* b) {
67 return IntPtrLessThan(a, b);
68 }
69
70 Node* CodeStubAssembler::SmiLessThanOrEqual(Node* a, Node* b) {
71 return IntPtrLessThanOrEqual(a, b);
72 }
73
74 Node* CodeStubAssembler::WordIsSmi(Node* a) {
75 return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask)), IntPtrConstant(0));
76 }
77
78 Node* CodeStubAssembler::WordIsPositiveSmi(Node* a) {
79 return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask | kSmiSignMask)),
80 IntPtrConstant(0));
81 }
82
83 Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset,
84 MachineType rep) {
85 return Load(rep, buffer, IntPtrConstant(offset));
86 }
87
88 Node* CodeStubAssembler::LoadObjectField(Node* object, int offset,
89 MachineType rep) {
90 return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag));
91 }
92
93 Node* CodeStubAssembler::LoadHeapNumberValue(Node* object) {
94 return Load(MachineType::Float64(), object,
95 IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag));
96 }
97
98 Node* CodeStubAssembler::LoadMap(Node* object) {
99 return LoadObjectField(object, HeapObject::kMapOffset);
100 }
101
102 Node* CodeStubAssembler::LoadInstanceType(Node* object) {
103 return LoadMapInstanceType(LoadMap(object));
104 }
105
106 Node* CodeStubAssembler::LoadElements(Node* object) {
107 return LoadObjectField(object, JSObject::kElementsOffset);
108 }
109
110 Node* CodeStubAssembler::LoadFixedArrayBaseLength(Node* array) {
111 return LoadObjectField(array, FixedArrayBase::kLengthOffset);
112 }
113
114 Node* CodeStubAssembler::LoadMapBitField(Node* map) {
115 return Load(MachineType::Uint8(), map,
116 IntPtrConstant(Map::kBitFieldOffset - kHeapObjectTag));
117 }
118
119 Node* CodeStubAssembler::LoadMapBitField2(Node* map) {
120 return Load(MachineType::Uint8(), map,
121 IntPtrConstant(Map::kBitField2Offset - kHeapObjectTag));
122 }
123
124 Node* CodeStubAssembler::LoadMapBitField3(Node* map) {
125 return Load(MachineType::Uint32(), map,
126 IntPtrConstant(Map::kBitField3Offset - kHeapObjectTag));
127 }
128
129 Node* CodeStubAssembler::LoadMapInstanceType(Node* map) {
130 return Load(MachineType::Uint8(), map,
131 IntPtrConstant(Map::kInstanceTypeOffset - kHeapObjectTag));
132 }
133
134 Node* CodeStubAssembler::LoadMapDescriptors(Node* map) {
135 return LoadObjectField(map, Map::kDescriptorsOffset);
136 }
137
138 Node* CodeStubAssembler::LoadNameHash(Node* name) {
139 return Load(MachineType::Uint32(), name,
140 IntPtrConstant(Name::kHashFieldOffset - kHeapObjectTag));
141 }
142
143 Node* CodeStubAssembler::LoadFixedArrayElementInt32Index(
144 Node* object, Node* int32_index, int additional_offset) {
145 Node* header_size = IntPtrConstant(additional_offset +
146 FixedArray::kHeaderSize - kHeapObjectTag);
147 Node* scaled_index = WordShl(int32_index, IntPtrConstant(kPointerSizeLog2));
148 Node* offset = IntPtrAdd(scaled_index, header_size);
149 return Load(MachineType::AnyTagged(), object, offset);
150 }
151
152 Node* CodeStubAssembler::LoadFixedArrayElementSmiIndex(Node* object,
153 Node* smi_index,
154 int additional_offset) {
155 int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
156 Node* header_size = IntPtrConstant(additional_offset +
157 FixedArray::kHeaderSize - kHeapObjectTag);
158 Node* scaled_index =
159 (kSmiShiftBits > kPointerSizeLog2)
160 ? WordSar(smi_index, IntPtrConstant(kSmiShiftBits - kPointerSizeLog2))
161 : WordShl(smi_index,
162 IntPtrConstant(kPointerSizeLog2 - kSmiShiftBits));
163 Node* offset = IntPtrAdd(scaled_index, header_size);
164 return Load(MachineType::AnyTagged(), object, offset);
165 }
166
167 Node* CodeStubAssembler::LoadFixedArrayElementConstantIndex(Node* object,
168 int index) {
169 Node* offset = IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag +
170 index * kPointerSize);
171 return Load(MachineType::AnyTagged(), object, offset);
172 }
173
174 Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) {
175 return StoreNoWriteBarrier(
176 MachineRepresentation::kTagged, object,
177 IntPtrConstant(HeapNumber::kMapOffset - kHeapObjectTag), map);
178 }
179
180 Node* CodeStubAssembler::StoreFixedArrayElementNoWriteBarrier(Node* object,
181 Node* index,
182 Node* value) {
183 Node* offset =
184 IntPtrAdd(WordShl(index, IntPtrConstant(kPointerSizeLog2)),
185 IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag));
186 return StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset,
187 value);
188 }
189
190 Node* CodeStubAssembler::StoreHeapNumberValue(Node* object, Node* value) {
191 return StoreNoWriteBarrier(
192 MachineRepresentation::kFloat64, object,
193 IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag), value);
194 }
195
196 Node* CodeStubAssembler::AllocateHeapNumber() {
197 Node* result = Allocate(HeapNumber::kSize, kNone);
198 StoreMapNoWriteBarrier(result, HeapNumberMapConstant());
199 return result;
200 }
201
202 Node* CodeStubAssembler::AllocateHeapNumberWithValue(Node* value) {
203 Node* result = AllocateHeapNumber();
204 StoreHeapNumberValue(result, value);
205 return result;
206 }
207
208 Node* CodeStubAssembler::BitFieldDecode(Node* word32, uint32_t shift,
209 uint32_t mask) {
210 return Word32Shr(Word32And(word32, Int32Constant(mask)),
211 Int32Constant(shift));
212 }
213
214 Node* CodeStubAssembler::ChangeFloat64ToTagged(Node* value) {
215 Node* value32 = TruncateFloat64ToInt32RoundToZero(value);
216 Node* value64 = ChangeInt32ToFloat64(value32);
217
218 Label if_valueisint32(this), if_valueisheapnumber(this), if_join(this);
219
220 Label if_valueisequal(this), if_valueisnotequal(this);
221 Branch(Float64Equal(value, value64), &if_valueisequal, &if_valueisnotequal);
222 Bind(&if_valueisequal);
223 {
224 Label if_valueiszero(this), if_valueisnotzero(this);
225 Branch(Float64Equal(value, Float64Constant(0.0)), &if_valueiszero,
226 &if_valueisnotzero);
227
228 Bind(&if_valueiszero);
229 BranchIfInt32LessThan(Float64ExtractHighWord32(value), Int32Constant(0),
230 &if_valueisheapnumber, &if_valueisint32);
231
232 Bind(&if_valueisnotzero);
233 Goto(&if_valueisint32);
234 }
235 Bind(&if_valueisnotequal);
236 Goto(&if_valueisheapnumber);
237
238 Variable var_result(this, MachineRepresentation::kTagged);
239 Bind(&if_valueisint32);
240 {
241 if (Is64()) {
242 Node* result = SmiTag(ChangeInt32ToInt64(value32));
243 var_result.Bind(result);
244 Goto(&if_join);
245 } else {
246 Node* pair = Int32AddWithOverflow(value32, value32);
247 Node* overflow = Projection(1, pair);
248 Label if_overflow(this, Label::kDeferred), if_notoverflow(this);
249 Branch(overflow, &if_overflow, &if_notoverflow);
250 Bind(&if_overflow);
251 Goto(&if_valueisheapnumber);
252 Bind(&if_notoverflow);
253 {
254 Node* result = Projection(0, pair);
255 var_result.Bind(result);
256 Goto(&if_join);
257 }
258 }
259 }
260 Bind(&if_valueisheapnumber);
261 {
262 Node* result = AllocateHeapNumberWithValue(value);
263 var_result.Bind(result);
264 Goto(&if_join);
265 }
266 Bind(&if_join);
267 return var_result.value();
268 }
269
270 Node* CodeStubAssembler::ChangeInt32ToTagged(Node* value) {
271 if (Is64()) {
272 return SmiTag(ChangeInt32ToInt64(value));
273 }
274 Variable var_result(this, MachineRepresentation::kTagged);
275 Node* pair = Int32AddWithOverflow(value, value);
276 Node* overflow = Projection(1, pair);
277 Label if_overflow(this, Label::kDeferred), if_notoverflow(this),
278 if_join(this);
279 Branch(overflow, &if_overflow, &if_notoverflow);
280 Bind(&if_overflow);
281 {
282 Node* value64 = ChangeInt32ToFloat64(value);
283 Node* result = AllocateHeapNumberWithValue(value64);
284 var_result.Bind(result);
285 }
286 Goto(&if_join);
287 Bind(&if_notoverflow);
288 {
289 Node* result = Projection(0, pair);
290 var_result.Bind(result);
291 }
292 Goto(&if_join);
293 Bind(&if_join);
294 return var_result.value();
295 }
296
297 Node* CodeStubAssembler::TruncateTaggedToFloat64(Node* context, Node* value) {
298 // We might need to loop once due to ToNumber conversion.
299 Variable var_value(this, MachineRepresentation::kTagged),
300 var_result(this, MachineRepresentation::kFloat64);
301 Label loop(this, &var_value), done_loop(this, &var_result);
302 var_value.Bind(value);
303 Goto(&loop);
304 Bind(&loop);
305 {
306 // Load the current {value}.
307 value = var_value.value();
308
309 // Check if the {value} is a Smi or a HeapObject.
310 Label if_valueissmi(this), if_valueisnotsmi(this);
311 Branch(WordIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
312
313 Bind(&if_valueissmi);
314 {
315 // Convert the Smi {value}.
316 var_result.Bind(SmiToFloat64(value));
317 Goto(&done_loop);
318 }
319
320 Bind(&if_valueisnotsmi);
321 {
322 // Check if {value} is a HeapNumber.
323 Label if_valueisheapnumber(this),
324 if_valueisnotheapnumber(this, Label::kDeferred);
325 Branch(WordEqual(LoadMap(value), HeapNumberMapConstant()),
326 &if_valueisheapnumber, &if_valueisnotheapnumber);
327
328 Bind(&if_valueisheapnumber);
329 {
330 // Load the floating point value.
331 var_result.Bind(LoadHeapNumberValue(value));
332 Goto(&done_loop);
333 }
334
335 Bind(&if_valueisnotheapnumber);
336 {
337 // Convert the {value} to a Number first.
338 Callable callable = CodeFactory::NonNumberToNumber(isolate());
339 var_value.Bind(CallStub(callable, context, value));
340 Goto(&loop);
341 }
342 }
343 }
344 Bind(&done_loop);
345 return var_result.value();
346 }
347
348 Node* CodeStubAssembler::TruncateTaggedToWord32(Node* context, Node* value) {
349 // We might need to loop once due to ToNumber conversion.
350 Variable var_value(this, MachineRepresentation::kTagged),
351 var_result(this, MachineRepresentation::kWord32);
352 Label loop(this, &var_value), done_loop(this, &var_result);
353 var_value.Bind(value);
354 Goto(&loop);
355 Bind(&loop);
356 {
357 // Load the current {value}.
358 value = var_value.value();
359
360 // Check if the {value} is a Smi or a HeapObject.
361 Label if_valueissmi(this), if_valueisnotsmi(this);
362 Branch(WordIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
363
364 Bind(&if_valueissmi);
365 {
366 // Convert the Smi {value}.
367 var_result.Bind(SmiToWord32(value));
368 Goto(&done_loop);
369 }
370
371 Bind(&if_valueisnotsmi);
372 {
373 // Check if {value} is a HeapNumber.
374 Label if_valueisheapnumber(this),
375 if_valueisnotheapnumber(this, Label::kDeferred);
376 Branch(WordEqual(LoadMap(value), HeapNumberMapConstant()),
377 &if_valueisheapnumber, &if_valueisnotheapnumber);
378
379 Bind(&if_valueisheapnumber);
380 {
381 // Truncate the floating point value.
382 var_result.Bind(TruncateHeapNumberValueToWord32(value));
383 Goto(&done_loop);
384 }
385
386 Bind(&if_valueisnotheapnumber);
387 {
388 // Convert the {value} to a Number first.
389 Callable callable = CodeFactory::NonNumberToNumber(isolate());
390 var_value.Bind(CallStub(callable, context, value));
391 Goto(&loop);
392 }
393 }
394 }
395 Bind(&done_loop);
396 return var_result.value();
397 }
398
399 Node* CodeStubAssembler::TruncateHeapNumberValueToWord32(Node* object) {
400 Node* value = LoadHeapNumberValue(object);
401 return TruncateFloat64ToInt32JavaScript(value);
402 }
403
404 } // namespace internal
405 } // namespace v8
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698