Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(589)

Side by Side Diff: src/code-stub-assembler.cc

Issue 1893383002: Complete separation of CodeAssembler and CodeStubAssembler (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2016 the V8 project authors. All rights reserved. 1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/code-stub-assembler.h" 5 #include "src/code-stub-assembler.h"
6 #include "src/code-factory.h" 6 #include "src/code-factory.h"
7 7
8 namespace v8 { 8 namespace v8 {
9 namespace internal { 9 namespace internal {
10 10
(...skipping 198 matching lines...) Expand 10 before | Expand all | Expand 10 after
209 return var_x.value(); 209 return var_x.value();
210 } 210 }
211 211
212 Node* CodeStubAssembler::SmiFromWord32(Node* value) { 212 Node* CodeStubAssembler::SmiFromWord32(Node* value) {
213 if (Is64()) { 213 if (Is64()) {
214 value = ChangeInt32ToInt64(value); 214 value = ChangeInt32ToInt64(value);
215 } 215 }
216 return WordShl(value, SmiShiftBitsConstant()); 216 return WordShl(value, SmiShiftBitsConstant());
217 } 217 }
218 218
219 Node* CodeStubAssembler::SmiTag(Node* value) {
220 return WordShl(value, SmiShiftBitsConstant());
221 }
222
223 Node* CodeStubAssembler::SmiUntag(Node* value) {
224 return WordSar(value, SmiShiftBitsConstant());
225 }
226
219 Node* CodeStubAssembler::SmiToWord32(Node* value) { 227 Node* CodeStubAssembler::SmiToWord32(Node* value) {
220 Node* result = WordSar(value, SmiShiftBitsConstant()); 228 Node* result = WordSar(value, SmiShiftBitsConstant());
221 if (Is64()) { 229 if (Is64()) {
222 result = TruncateInt64ToInt32(result); 230 result = TruncateInt64ToInt32(result);
223 } 231 }
224 return result; 232 return result;
225 } 233 }
226 234
227 Node* CodeStubAssembler::SmiToFloat64(Node* value) { 235 Node* CodeStubAssembler::SmiToFloat64(Node* value) {
228 return ChangeInt32ToFloat64(SmiUntag(value)); 236 return ChangeInt32ToFloat64(SmiUntag(value));
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
271 279
272 Node* CodeStubAssembler::WordIsSmi(Node* a) { 280 Node* CodeStubAssembler::WordIsSmi(Node* a) {
273 return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask)), IntPtrConstant(0)); 281 return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask)), IntPtrConstant(0));
274 } 282 }
275 283
276 Node* CodeStubAssembler::WordIsPositiveSmi(Node* a) { 284 Node* CodeStubAssembler::WordIsPositiveSmi(Node* a) {
277 return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask | kSmiSignMask)), 285 return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask | kSmiSignMask)),
278 IntPtrConstant(0)); 286 IntPtrConstant(0));
279 } 287 }
280 288
289 Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes,
290 AllocationFlags flags,
291 Node* top_address,
292 Node* limit_address) {
293 Node* top = Load(MachineType::Pointer(), top_address);
294 Node* limit = Load(MachineType::Pointer(), limit_address);
295
296 // If there's not enough space, call the runtime.
297 Variable result(this, MachineRepresentation::kTagged);
298 Label runtime_call(this, Label::kDeferred), no_runtime_call(this);
299 Label merge_runtime(this, &result);
300
301 Branch(IntPtrLessThan(IntPtrSub(limit, top), size_in_bytes), &runtime_call,
302 &no_runtime_call);
303
304 Bind(&runtime_call);
305 // AllocateInTargetSpace does not use the context.
306 Node* context = IntPtrConstant(0);
307 Node* runtime_flags = SmiTag(Int32Constant(
308 AllocateDoubleAlignFlag::encode(false) |
309 AllocateTargetSpace::encode(flags & kPretenured
310 ? AllocationSpace::OLD_SPACE
311 : AllocationSpace::NEW_SPACE)));
312 Node* runtime_result = CallRuntime(Runtime::kAllocateInTargetSpace, context,
313 SmiTag(size_in_bytes), runtime_flags);
314 result.Bind(runtime_result);
315 Goto(&merge_runtime);
316
317 // When there is enough space, return `top' and bump it up.
318 Bind(&no_runtime_call);
319 Node* no_runtime_result = top;
320 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
321 IntPtrAdd(top, size_in_bytes));
322 no_runtime_result =
323 IntPtrAdd(no_runtime_result, IntPtrConstant(kHeapObjectTag));
324 result.Bind(no_runtime_result);
325 Goto(&merge_runtime);
326
327 Bind(&merge_runtime);
328 return result.value();
329 }
330
331 Node* CodeStubAssembler::AllocateRawAligned(Node* size_in_bytes,
332 AllocationFlags flags,
333 Node* top_address,
334 Node* limit_address) {
335 Node* top = Load(MachineType::Pointer(), top_address);
336 Node* limit = Load(MachineType::Pointer(), limit_address);
337 Variable adjusted_size(this, MachineType::PointerRepresentation());
338 adjusted_size.Bind(size_in_bytes);
339 if (flags & kDoubleAlignment) {
340 // TODO(epertoso): Simd128 alignment.
341 Label aligned(this), not_aligned(this), merge(this, &adjusted_size);
342 Branch(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), &not_aligned,
343 &aligned);
344
345 Bind(&not_aligned);
346 Node* not_aligned_size =
347 IntPtrAdd(size_in_bytes, IntPtrConstant(kPointerSize));
348 adjusted_size.Bind(not_aligned_size);
349 Goto(&merge);
350
351 Bind(&aligned);
352 Goto(&merge);
353 }
354
355 Variable address(this, MachineRepresentation::kTagged);
356 address.Bind(AllocateRawUnaligned(adjusted_size.value(), kNone, top, limit));
357
358 Label needs_filler(this), doesnt_need_filler(this),
359 merge_address(this, &address);
360 Branch(IntPtrEqual(adjusted_size.value(), size_in_bytes), &doesnt_need_filler,
361 &needs_filler);
362
363 Bind(&needs_filler);
364 // Store a filler and increase the address by kPointerSize.
365 // TODO(epertoso): this code assumes that we only align to kDoubleSize. Change
366 // it when Simd128 alignment is supported.
367 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top,
368 LoadRoot(Heap::kOnePointerFillerMapRootIndex));
369 address.Bind(IntPtrAdd(address.value(), IntPtrConstant(kPointerSize)));
370 Goto(&merge_address);
371
372 Bind(&doesnt_need_filler);
373 Goto(&merge_address);
374
375 Bind(&merge_address);
376 // Update the top.
377 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
378 IntPtrAdd(top, adjusted_size.value()));
379 return address.value();
380 }
381
382 Node* CodeStubAssembler::Allocate(int size_in_bytes, AllocationFlags flags) {
383 bool const new_space = !(flags & kPretenured);
384 Node* top_address = ExternalConstant(
385 new_space
386 ? ExternalReference::new_space_allocation_top_address(isolate())
387 : ExternalReference::old_space_allocation_top_address(isolate()));
388 Node* limit_address = ExternalConstant(
389 new_space
390 ? ExternalReference::new_space_allocation_limit_address(isolate())
391 : ExternalReference::old_space_allocation_limit_address(isolate()));
392
393 #ifdef V8_HOST_ARCH_32_BIT
394 if (flags & kDoubleAlignment) {
395 return AllocateRawAligned(IntPtrConstant(size_in_bytes), flags, top_address,
396 limit_address);
397 }
398 #endif
399
400 return AllocateRawUnaligned(IntPtrConstant(size_in_bytes), flags, top_address,
401 limit_address);
402 }
403
404 Node* CodeStubAssembler::InnerAllocate(Node* previous, int offset) {
405 return IntPtrAdd(previous, IntPtrConstant(offset));
406 }
407
281 Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset, 408 Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset,
282 MachineType rep) { 409 MachineType rep) {
283 return Load(rep, buffer, IntPtrConstant(offset)); 410 return Load(rep, buffer, IntPtrConstant(offset));
284 } 411 }
285 412
286 Node* CodeStubAssembler::LoadObjectField(Node* object, int offset, 413 Node* CodeStubAssembler::LoadObjectField(Node* object, int offset,
287 MachineType rep) { 414 MachineType rep) {
288 return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag)); 415 return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag));
289 } 416 }
290 417
(...skipping 679 matching lines...) Expand 10 before | Expand all | Expand 10 after
970 } 1097 }
971 1098
972 Node* CodeStubAssembler::BitFieldDecode(Node* word32, uint32_t shift, 1099 Node* CodeStubAssembler::BitFieldDecode(Node* word32, uint32_t shift,
973 uint32_t mask) { 1100 uint32_t mask) {
974 return Word32Shr(Word32And(word32, Int32Constant(mask)), 1101 return Word32Shr(Word32And(word32, Int32Constant(mask)),
975 Int32Constant(shift)); 1102 Int32Constant(shift));
976 } 1103 }
977 1104
978 } // namespace internal 1105 } // namespace internal
979 } // namespace v8 1106 } // namespace v8
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698