Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(528)

Side by Side Diff: src/code-stub-assembler.cc

Issue 1893383002: Complete separation of CodeAssembler and CodeStubAssembler (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Review feedback Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/code-stub-assembler.h ('k') | src/compiler/code-assembler.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2016 the V8 project authors. All rights reserved. 1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/code-stub-assembler.h" 5 #include "src/code-stub-assembler.h"
6 #include "src/code-factory.h" 6 #include "src/code-factory.h"
7 7
8 namespace v8 { 8 namespace v8 {
9 namespace internal { 9 namespace internal {
10 10
11 using compiler::Node; 11 using compiler::Node;
12 12
13 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone, 13 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone,
14 const CallInterfaceDescriptor& descriptor, 14 const CallInterfaceDescriptor& descriptor,
15 Code::Flags flags, const char* name, 15 Code::Flags flags, const char* name,
16 size_t result_size) 16 size_t result_size)
17 : compiler::CodeAssembler(isolate, zone, descriptor, flags, name, 17 : compiler::CodeAssembler(isolate, zone, descriptor, flags, name,
18 result_size) {} 18 result_size) {}
19 19
20 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone, 20 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone,
21 int parameter_count, Code::Flags flags, 21 int parameter_count, Code::Flags flags,
22 const char* name) 22 const char* name)
23 : compiler::CodeAssembler(isolate, zone, parameter_count, flags, name) {} 23 : compiler::CodeAssembler(isolate, zone, parameter_count, flags, name) {}
24 24
25 Node* CodeStubAssembler::BooleanMapConstant() {
26 return HeapConstant(isolate()->factory()->boolean_map());
27 }
28
29 Node* CodeStubAssembler::EmptyStringConstant() {
30 return LoadRoot(Heap::kempty_stringRootIndex);
31 }
32
33 Node* CodeStubAssembler::HeapNumberMapConstant() {
34 return HeapConstant(isolate()->factory()->heap_number_map());
35 }
36
37 Node* CodeStubAssembler::NoContextConstant() {
38 return SmiConstant(Smi::FromInt(0));
39 }
40
41 Node* CodeStubAssembler::NullConstant() {
42 return LoadRoot(Heap::kNullValueRootIndex);
43 }
44
45 Node* CodeStubAssembler::UndefinedConstant() {
46 return LoadRoot(Heap::kUndefinedValueRootIndex);
47 }
48
25 Node* CodeStubAssembler::Float64Round(Node* x) { 49 Node* CodeStubAssembler::Float64Round(Node* x) {
26 Node* one = Float64Constant(1.0); 50 Node* one = Float64Constant(1.0);
27 Node* one_half = Float64Constant(0.5); 51 Node* one_half = Float64Constant(0.5);
28 52
29 Variable var_x(this, MachineRepresentation::kFloat64); 53 Variable var_x(this, MachineRepresentation::kFloat64);
30 Label return_x(this); 54 Label return_x(this);
31 55
32 // Round up {x} towards Infinity. 56 // Round up {x} towards Infinity.
33 var_x.Bind(Float64Ceil(x)); 57 var_x.Bind(Float64Ceil(x));
34 58
(...skipping 174 matching lines...) Expand 10 before | Expand all | Expand 10 after
209 return var_x.value(); 233 return var_x.value();
210 } 234 }
211 235
212 Node* CodeStubAssembler::SmiFromWord32(Node* value) { 236 Node* CodeStubAssembler::SmiFromWord32(Node* value) {
213 if (Is64()) { 237 if (Is64()) {
214 value = ChangeInt32ToInt64(value); 238 value = ChangeInt32ToInt64(value);
215 } 239 }
216 return WordShl(value, SmiShiftBitsConstant()); 240 return WordShl(value, SmiShiftBitsConstant());
217 } 241 }
218 242
243 Node* CodeStubAssembler::SmiTag(Node* value) {
244 return WordShl(value, SmiShiftBitsConstant());
245 }
246
247 Node* CodeStubAssembler::SmiUntag(Node* value) {
248 return WordSar(value, SmiShiftBitsConstant());
249 }
250
219 Node* CodeStubAssembler::SmiToWord32(Node* value) { 251 Node* CodeStubAssembler::SmiToWord32(Node* value) {
220 Node* result = WordSar(value, SmiShiftBitsConstant()); 252 Node* result = WordSar(value, SmiShiftBitsConstant());
221 if (Is64()) { 253 if (Is64()) {
222 result = TruncateInt64ToInt32(result); 254 result = TruncateInt64ToInt32(result);
223 } 255 }
224 return result; 256 return result;
225 } 257 }
226 258
227 Node* CodeStubAssembler::SmiToFloat64(Node* value) { 259 Node* CodeStubAssembler::SmiToFloat64(Node* value) {
228 return ChangeInt32ToFloat64(SmiUntag(value)); 260 return ChangeInt32ToFloat64(SmiUntag(value));
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
271 303
272 Node* CodeStubAssembler::WordIsSmi(Node* a) { 304 Node* CodeStubAssembler::WordIsSmi(Node* a) {
273 return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask)), IntPtrConstant(0)); 305 return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask)), IntPtrConstant(0));
274 } 306 }
275 307
276 Node* CodeStubAssembler::WordIsPositiveSmi(Node* a) { 308 Node* CodeStubAssembler::WordIsPositiveSmi(Node* a) {
277 return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask | kSmiSignMask)), 309 return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask | kSmiSignMask)),
278 IntPtrConstant(0)); 310 IntPtrConstant(0));
279 } 311 }
280 312
313 Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes,
314 AllocationFlags flags,
315 Node* top_address,
316 Node* limit_address) {
317 Node* top = Load(MachineType::Pointer(), top_address);
318 Node* limit = Load(MachineType::Pointer(), limit_address);
319
320 // If there's not enough space, call the runtime.
321 Variable result(this, MachineRepresentation::kTagged);
322 Label runtime_call(this, Label::kDeferred), no_runtime_call(this);
323 Label merge_runtime(this, &result);
324
325 Branch(IntPtrLessThan(IntPtrSub(limit, top), size_in_bytes), &runtime_call,
326 &no_runtime_call);
327
328 Bind(&runtime_call);
329 // AllocateInTargetSpace does not use the context.
330 Node* context = IntPtrConstant(0);
331 Node* runtime_flags = SmiTag(Int32Constant(
332 AllocateDoubleAlignFlag::encode(false) |
333 AllocateTargetSpace::encode(flags & kPretenured
334 ? AllocationSpace::OLD_SPACE
335 : AllocationSpace::NEW_SPACE)));
336 Node* runtime_result = CallRuntime(Runtime::kAllocateInTargetSpace, context,
337 SmiTag(size_in_bytes), runtime_flags);
338 result.Bind(runtime_result);
339 Goto(&merge_runtime);
340
341 // When there is enough space, return `top' and bump it up.
342 Bind(&no_runtime_call);
343 Node* no_runtime_result = top;
344 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
345 IntPtrAdd(top, size_in_bytes));
346 no_runtime_result =
347 IntPtrAdd(no_runtime_result, IntPtrConstant(kHeapObjectTag));
348 result.Bind(no_runtime_result);
349 Goto(&merge_runtime);
350
351 Bind(&merge_runtime);
352 return result.value();
353 }
354
355 Node* CodeStubAssembler::AllocateRawAligned(Node* size_in_bytes,
356 AllocationFlags flags,
357 Node* top_address,
358 Node* limit_address) {
359 Node* top = Load(MachineType::Pointer(), top_address);
360 Node* limit = Load(MachineType::Pointer(), limit_address);
361 Variable adjusted_size(this, MachineType::PointerRepresentation());
362 adjusted_size.Bind(size_in_bytes);
363 if (flags & kDoubleAlignment) {
364 // TODO(epertoso): Simd128 alignment.
365 Label aligned(this), not_aligned(this), merge(this, &adjusted_size);
366 Branch(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), &not_aligned,
367 &aligned);
368
369 Bind(&not_aligned);
370 Node* not_aligned_size =
371 IntPtrAdd(size_in_bytes, IntPtrConstant(kPointerSize));
372 adjusted_size.Bind(not_aligned_size);
373 Goto(&merge);
374
375 Bind(&aligned);
376 Goto(&merge);
377
378 Bind(&merge);
379 }
380
381 Variable address(this, MachineRepresentation::kTagged);
382 address.Bind(AllocateRawUnaligned(adjusted_size.value(), kNone, top, limit));
383
384 Label needs_filler(this), doesnt_need_filler(this),
385 merge_address(this, &address);
386 Branch(IntPtrEqual(adjusted_size.value(), size_in_bytes), &doesnt_need_filler,
387 &needs_filler);
388
389 Bind(&needs_filler);
390 // Store a filler and increase the address by kPointerSize.
391 // TODO(epertoso): this code assumes that we only align to kDoubleSize. Change
392 // it when Simd128 alignment is supported.
393 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top,
394 LoadRoot(Heap::kOnePointerFillerMapRootIndex));
395 address.Bind(IntPtrAdd(address.value(), IntPtrConstant(kPointerSize)));
396 Goto(&merge_address);
397
398 Bind(&doesnt_need_filler);
399 Goto(&merge_address);
400
401 Bind(&merge_address);
402 // Update the top.
403 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
404 IntPtrAdd(top, adjusted_size.value()));
405 return address.value();
406 }
407
408 Node* CodeStubAssembler::Allocate(int size_in_bytes, AllocationFlags flags) {
409 bool const new_space = !(flags & kPretenured);
410 Node* top_address = ExternalConstant(
411 new_space
412 ? ExternalReference::new_space_allocation_top_address(isolate())
413 : ExternalReference::old_space_allocation_top_address(isolate()));
414 Node* limit_address = ExternalConstant(
415 new_space
416 ? ExternalReference::new_space_allocation_limit_address(isolate())
417 : ExternalReference::old_space_allocation_limit_address(isolate()));
418
419 #ifdef V8_HOST_ARCH_32_BIT
420 if (flags & kDoubleAlignment) {
421 return AllocateRawAligned(IntPtrConstant(size_in_bytes), flags, top_address,
422 limit_address);
423 }
424 #endif
425
426 return AllocateRawUnaligned(IntPtrConstant(size_in_bytes), flags, top_address,
427 limit_address);
428 }
429
430 Node* CodeStubAssembler::InnerAllocate(Node* previous, int offset) {
431 return IntPtrAdd(previous, IntPtrConstant(offset));
432 }
433
281 Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset, 434 Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset,
282 MachineType rep) { 435 MachineType rep) {
283 return Load(rep, buffer, IntPtrConstant(offset)); 436 return Load(rep, buffer, IntPtrConstant(offset));
284 } 437 }
285 438
286 Node* CodeStubAssembler::LoadObjectField(Node* object, int offset, 439 Node* CodeStubAssembler::LoadObjectField(Node* object, int offset,
287 MachineType rep) { 440 MachineType rep) {
288 return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag)); 441 return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag));
289 } 442 }
290 443
(...skipping 679 matching lines...) Expand 10 before | Expand all | Expand 10 after
970 } 1123 }
971 1124
972 Node* CodeStubAssembler::BitFieldDecode(Node* word32, uint32_t shift, 1125 Node* CodeStubAssembler::BitFieldDecode(Node* word32, uint32_t shift,
973 uint32_t mask) { 1126 uint32_t mask) {
974 return Word32Shr(Word32And(word32, Int32Constant(mask)), 1127 return Word32Shr(Word32And(word32, Int32Constant(mask)),
975 Int32Constant(shift)); 1128 Int32Constant(shift));
976 } 1129 }
977 1130
978 } // namespace internal 1131 } // namespace internal
979 } // namespace v8 1132 } // namespace v8
OLDNEW
« no previous file with comments | « src/code-stub-assembler.h ('k') | src/compiler/code-assembler.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698