Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(89)

Side by Side Diff: src/arm/lithium-codegen-arm.cc

Issue 12114054: Supporting AllocationSiteInfo for Nested arrays (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Now with ports to arm and x64 Created 7 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 5426 matching lines...) Expand 10 before | Expand all | Expand 10 after
5437 // Load map into r2. 5437 // Load map into r2.
5438 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); 5438 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
5439 // Load the map's "bit field 2". 5439 // Load the map's "bit field 2".
5440 __ ldrb(r2, FieldMemOperand(r2, Map::kBitField2Offset)); 5440 __ ldrb(r2, FieldMemOperand(r2, Map::kBitField2Offset));
5441 // Retrieve elements_kind from bit field 2. 5441 // Retrieve elements_kind from bit field 2.
5442 __ ubfx(r2, r2, Map::kElementsKindShift, Map::kElementsKindBitCount); 5442 __ ubfx(r2, r2, Map::kElementsKindShift, Map::kElementsKindBitCount);
5443 __ cmp(r2, Operand(boilerplate_elements_kind)); 5443 __ cmp(r2, Operand(boilerplate_elements_kind));
5444 DeoptimizeIf(ne, instr->environment()); 5444 DeoptimizeIf(ne, instr->environment());
5445 } 5445 }
5446 5446
5447 int flags = allocation_site_mode == TRACK_ALLOCATION_SITE
5448 ? ArrayLiteral::kAllocationSiteInfoAllowed
5449 : ArrayLiteral::kNoFlags;
5450
5447 // Set up the parameters to the stub/runtime call. 5451 // Set up the parameters to the stub/runtime call.
5448 __ LoadHeapObject(r3, literals); 5452 __ LoadHeapObject(r4, literals);
5449 __ mov(r2, Operand(Smi::FromInt(instr->hydrogen()->literal_index()))); 5453 __ mov(r3, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
5450 // Boilerplate already exists, constant elements are never accessed. 5454 // Boilerplate already exists, constant elements are never accessed.
5451 // Pass an empty fixed array. 5455 // Pass an empty fixed array.
5452 __ mov(r1, Operand(isolate()->factory()->empty_fixed_array())); 5456 __ mov(r2, Operand(isolate()->factory()->empty_fixed_array()));
5453 __ Push(r3, r2, r1); 5457 __ mov(r1, Operand(Smi::FromInt(flags)));
5458 __ Push(r4, r3, r2, r1);
5454 5459
5455 // Pick the right runtime function or stub to call. 5460 // Pick the right runtime function or stub to call.
5456 int length = instr->hydrogen()->length(); 5461 int length = instr->hydrogen()->length();
5457 if (instr->hydrogen()->IsCopyOnWrite()) { 5462 if (instr->hydrogen()->IsCopyOnWrite()) {
5458 ASSERT(instr->hydrogen()->depth() == 1); 5463 ASSERT(instr->hydrogen()->depth() == 1);
5459 FastCloneShallowArrayStub::Mode mode = 5464 FastCloneShallowArrayStub::Mode mode =
5460 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS; 5465 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
5461 FastCloneShallowArrayStub stub(mode, DONT_TRACK_ALLOCATION_SITE, length); 5466 FastCloneShallowArrayStub stub(mode, DONT_TRACK_ALLOCATION_SITE, length);
5462 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 5467 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
5463 } else if (instr->hydrogen()->depth() > 1) { 5468 } else if (instr->hydrogen()->depth() > 1) {
5464 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr); 5469 CallRuntime(Runtime::kCreateArrayLiteral, 4, instr);
5465 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { 5470 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
5466 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr); 5471 CallRuntime(Runtime::kCreateArrayLiteralShallow, 4, instr);
5467 } else { 5472 } else {
5468 FastCloneShallowArrayStub::Mode mode = 5473 FastCloneShallowArrayStub::Mode mode =
5469 boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS 5474 boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS
5470 ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS 5475 ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
5471 : FastCloneShallowArrayStub::CLONE_ELEMENTS; 5476 : FastCloneShallowArrayStub::CLONE_ELEMENTS;
5472 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length); 5477 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
5473 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 5478 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
5474 } 5479 }
5475 } 5480 }
5476 5481
5477 5482
5478 void LCodeGen::EmitDeepCopy(Handle<JSObject> object, 5483 void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
5484 Handle<JSObject> original_object,
5479 Register result, 5485 Register result,
5480 Register source, 5486 Register source,
5481 int* offset, 5487 int* offset,
5482 AllocationSiteMode mode) { 5488 AllocationSiteMode mode) {
5483 ASSERT(!source.is(r2)); 5489 ASSERT(!source.is(r2));
5484 ASSERT(!result.is(r2)); 5490 ASSERT(!result.is(r2));
5485 5491
5492 // Should we track allocation info for *this* object in the tree?
5486 bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE && 5493 bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE &&
5487 object->map()->CanTrackAllocationSite(); 5494 object->map()->CanTrackAllocationSite();
5488 5495
5496 if (create_allocation_site_info && object->IsJSArray()) {
5497 create_allocation_site_info = AllocationSiteInfo::GetMode(
5498 object->GetElementsKind()) == TRACK_ALLOCATION_SITE;
5499 }
5500
5489 // Only elements backing stores for non-COW arrays need to be copied. 5501 // Only elements backing stores for non-COW arrays need to be copied.
5490 Handle<FixedArrayBase> elements(object->elements()); 5502 Handle<FixedArrayBase> elements(object->elements());
5503 Handle<FixedArrayBase> original_elements(original_object->elements());
5491 bool has_elements = elements->length() > 0 && 5504 bool has_elements = elements->length() > 0 &&
5492 elements->map() != isolate()->heap()->fixed_cow_array_map(); 5505 elements->map() != isolate()->heap()->fixed_cow_array_map();
5493 5506
5494 // Increase the offset so that subsequent objects end up right after 5507 // Increase the offset so that subsequent objects end up right after
5495 // this object and its backing store. 5508 // this object and its backing store.
5496 int object_offset = *offset; 5509 int object_offset = *offset;
5497 int object_size = object->map()->instance_size(); 5510 int object_size = object->map()->instance_size();
5498 int elements_size = has_elements ? elements->Size() : 0; 5511 int elements_size = has_elements ? elements->Size() : 0;
5499 int elements_offset = *offset + object_size; 5512 int elements_offset = *offset + object_size;
5500 if (create_allocation_site_info) { 5513 if (create_allocation_site_info) {
(...skipping 15 matching lines...) Expand all
5516 } 5529 }
5517 __ str(r2, FieldMemOperand(result, object_offset + i)); 5530 __ str(r2, FieldMemOperand(result, object_offset + i));
5518 } 5531 }
5519 5532
5520 // Copy in-object properties. 5533 // Copy in-object properties.
5521 for (int i = 0; i < inobject_properties; i++) { 5534 for (int i = 0; i < inobject_properties; i++) {
5522 int total_offset = object_offset + object->GetInObjectPropertyOffset(i); 5535 int total_offset = object_offset + object->GetInObjectPropertyOffset(i);
5523 Handle<Object> value = Handle<Object>(object->InObjectPropertyAt(i)); 5536 Handle<Object> value = Handle<Object>(object->InObjectPropertyAt(i));
5524 if (value->IsJSObject()) { 5537 if (value->IsJSObject()) {
5525 Handle<JSObject> value_object = Handle<JSObject>::cast(value); 5538 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
5539 Handle<JSObject> original_value_object = Handle<JSObject>::cast(
5540 Handle<Object>(original_object->InObjectPropertyAt(i)));
5541
5526 __ add(r2, result, Operand(*offset)); 5542 __ add(r2, result, Operand(*offset));
5527 __ str(r2, FieldMemOperand(result, total_offset)); 5543 __ str(r2, FieldMemOperand(result, total_offset));
5528 __ LoadHeapObject(source, value_object); 5544 __ LoadHeapObject(source, value_object);
5529 EmitDeepCopy(value_object, result, source, offset, 5545 EmitDeepCopy(value_object, original_value_object, result, source,
5530 DONT_TRACK_ALLOCATION_SITE); 5546 offset, mode);
5531 } else if (value->IsHeapObject()) { 5547 } else if (value->IsHeapObject()) {
5532 __ LoadHeapObject(r2, Handle<HeapObject>::cast(value)); 5548 __ LoadHeapObject(r2, Handle<HeapObject>::cast(value));
5533 __ str(r2, FieldMemOperand(result, total_offset)); 5549 __ str(r2, FieldMemOperand(result, total_offset));
5534 } else { 5550 } else {
5535 __ mov(r2, Operand(value)); 5551 __ mov(r2, Operand(value));
5536 __ str(r2, FieldMemOperand(result, total_offset)); 5552 __ str(r2, FieldMemOperand(result, total_offset));
5537 } 5553 }
5538 } 5554 }
5539 5555
5540 // Build Allocation Site Info if desired 5556 // Build Allocation Site Info if desired
5541 if (create_allocation_site_info) { 5557 if (create_allocation_site_info) {
5542 __ mov(r2, Operand(Handle<Map>(isolate()->heap()-> 5558 __ mov(r2, Operand(Handle<Map>(isolate()->heap()->
5543 allocation_site_info_map()))); 5559 allocation_site_info_map())));
5544 __ str(r2, FieldMemOperand(result, object_size)); 5560 __ str(r2, FieldMemOperand(result, object_size + object_offset));
5545 __ str(source, FieldMemOperand(result, object_size + kPointerSize)); 5561 __ LoadHeapObject(r2, original_object);
5562 __ str(r2, FieldMemOperand(result,
5563 object_size + object_offset + kPointerSize));
5546 } 5564 }
5547 5565
5548 if (has_elements) { 5566 if (has_elements) {
5549 // Copy elements backing store header. 5567 // Copy elements backing store header.
5550 __ LoadHeapObject(source, elements); 5568 __ LoadHeapObject(source, elements);
5551 for (int i = 0; i < FixedArray::kHeaderSize; i += kPointerSize) { 5569 for (int i = 0; i < FixedArray::kHeaderSize; i += kPointerSize) {
5552 __ ldr(r2, FieldMemOperand(source, i)); 5570 __ ldr(r2, FieldMemOperand(source, i));
5553 __ str(r2, FieldMemOperand(result, elements_offset + i)); 5571 __ str(r2, FieldMemOperand(result, elements_offset + i));
5554 } 5572 }
5555 5573
5556 // Copy elements backing store content. 5574 // Copy elements backing store content.
5557 int elements_length = has_elements ? elements->length() : 0; 5575 int elements_length = has_elements ? elements->length() : 0;
5558 if (elements->IsFixedDoubleArray()) { 5576 if (elements->IsFixedDoubleArray()) {
5559 Handle<FixedDoubleArray> double_array = 5577 Handle<FixedDoubleArray> double_array =
5560 Handle<FixedDoubleArray>::cast(elements); 5578 Handle<FixedDoubleArray>::cast(elements);
5561 for (int i = 0; i < elements_length; i++) { 5579 for (int i = 0; i < elements_length; i++) {
5562 int64_t value = double_array->get_representation(i); 5580 int64_t value = double_array->get_representation(i);
5563 // We only support little endian mode... 5581 // We only support little endian mode...
5564 int32_t value_low = static_cast<int32_t>(value & 0xFFFFFFFF); 5582 int32_t value_low = static_cast<int32_t>(value & 0xFFFFFFFF);
5565 int32_t value_high = static_cast<int32_t>(value >> 32); 5583 int32_t value_high = static_cast<int32_t>(value >> 32);
5566 int total_offset = 5584 int total_offset =
5567 elements_offset + FixedDoubleArray::OffsetOfElementAt(i); 5585 elements_offset + FixedDoubleArray::OffsetOfElementAt(i);
5568 __ mov(r2, Operand(value_low)); 5586 __ mov(r2, Operand(value_low));
5569 __ str(r2, FieldMemOperand(result, total_offset)); 5587 __ str(r2, FieldMemOperand(result, total_offset));
5570 __ mov(r2, Operand(value_high)); 5588 __ mov(r2, Operand(value_high));
5571 __ str(r2, FieldMemOperand(result, total_offset + 4)); 5589 __ str(r2, FieldMemOperand(result, total_offset + 4));
5572 } 5590 }
5573 } else if (elements->IsFixedArray()) { 5591 } else if (elements->IsFixedArray()) {
5574 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements); 5592 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
5593 Handle<FixedArray> original_fast_elements =
5594 Handle<FixedArray>::cast(original_elements);
5575 for (int i = 0; i < elements_length; i++) { 5595 for (int i = 0; i < elements_length; i++) {
5576 int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i); 5596 int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i);
5577 Handle<Object> value(fast_elements->get(i)); 5597 Handle<Object> value(fast_elements->get(i));
5578 if (value->IsJSObject()) { 5598 if (value->IsJSObject()) {
5579 Handle<JSObject> value_object = Handle<JSObject>::cast(value); 5599 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
5600 Handle<JSObject> original_value_object = Handle<JSObject>::cast(
5601 Handle<Object>(original_fast_elements->get(i)));
5580 __ add(r2, result, Operand(*offset)); 5602 __ add(r2, result, Operand(*offset));
5581 __ str(r2, FieldMemOperand(result, total_offset)); 5603 __ str(r2, FieldMemOperand(result, total_offset));
5582 __ LoadHeapObject(source, value_object); 5604 __ LoadHeapObject(source, value_object);
5583 EmitDeepCopy(value_object, result, source, offset, 5605
5584 DONT_TRACK_ALLOCATION_SITE); 5606 // TODO(mvstanton): do we have to worry that the original object
5607 // changed from a fixed array to a fixeddoublearray? If that happened
5608 // then the original_value_object expression might point to garbage
5609 // memory, right?
danno 2013/02/08 13:44:38 No, I don't think so, since if the array already c
mvstanton 2013/02/11 11:11:24 Good point, thx.
5610 ASSERT(!value_object.is_identical_to(original_value_object));
5611 EmitDeepCopy(value_object, original_value_object, result, source,
5612 offset, mode);
5585 } else if (value->IsHeapObject()) { 5613 } else if (value->IsHeapObject()) {
5586 __ LoadHeapObject(r2, Handle<HeapObject>::cast(value)); 5614 __ LoadHeapObject(r2, Handle<HeapObject>::cast(value));
5587 __ str(r2, FieldMemOperand(result, total_offset)); 5615 __ str(r2, FieldMemOperand(result, total_offset));
5588 } else { 5616 } else {
5589 __ mov(r2, Operand(value)); 5617 __ mov(r2, Operand(value));
5590 __ str(r2, FieldMemOperand(result, total_offset)); 5618 __ str(r2, FieldMemOperand(result, total_offset));
5591 } 5619 }
5592 } 5620 }
5593 } else { 5621 } else {
5594 UNREACHABLE(); 5622 UNREACHABLE();
5595 } 5623 }
5596 } 5624 }
5597 } 5625 }
5598 5626
5599 5627
5600 void LCodeGen::DoFastLiteral(LFastLiteral* instr) { 5628 void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
5601 int size = instr->hydrogen()->total_size(); 5629 int size = instr->hydrogen()->total_size();
5602 ElementsKind boilerplate_elements_kind =
5603 instr->hydrogen()->boilerplate()->GetElementsKind();
5604 5630
5605 // Deopt if the array literal boilerplate ElementsKind is of a type different
5606 // than the expected one. The check isn't necessary if the boilerplate has
5607 // already been converted to TERMINAL_FAST_ELEMENTS_KIND.
5608 if (CanTransitionToMoreGeneralFastElementsKind(
5609 boilerplate_elements_kind, true)) {
5610 __ LoadHeapObject(r1, instr->hydrogen()->boilerplate());
5611 // Load map into r2.
5612 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
5613 // Load the map's "bit field 2".
5614 __ ldrb(r2, FieldMemOperand(r2, Map::kBitField2Offset));
5615 // Retrieve elements_kind from bit field 2.
5616 __ ubfx(r2, r2, Map::kElementsKindShift, Map::kElementsKindBitCount);
5617 __ cmp(r2, Operand(boilerplate_elements_kind));
5618 DeoptimizeIf(ne, instr->environment());
5619 }
5620
5621 // Allocate all objects that are part of the literal in one big
5622 // allocation. This avoids multiple limit checks. 5631 // allocation. This avoids multiple limit checks.
5623 Label allocated, runtime_allocate; 5632 Label allocated, runtime_allocate;
5624 __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT); 5633 __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
5625 __ jmp(&allocated); 5634 __ jmp(&allocated);
5626 5635
5627 __ bind(&runtime_allocate); 5636 __ bind(&runtime_allocate);
5628 __ mov(r0, Operand(Smi::FromInt(size))); 5637 __ mov(r0, Operand(Smi::FromInt(size)));
5629 __ push(r0); 5638 __ push(r0);
5630 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); 5639 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
5631 5640
5632 __ bind(&allocated); 5641 __ bind(&allocated);
5633 int offset = 0; 5642 int offset = 0;
5634 __ LoadHeapObject(r1, instr->hydrogen()->boilerplate()); 5643 __ LoadHeapObject(r1, instr->hydrogen()->boilerplate());
5635 EmitDeepCopy(instr->hydrogen()->boilerplate(), r0, r1, &offset, 5644 EmitDeepCopy(instr->hydrogen()->boilerplate(),
5645 instr->hydrogen()->original_boilerplate(),
5646 r0, r1, &offset,
5636 instr->hydrogen()->allocation_site_mode()); 5647 instr->hydrogen()->allocation_site_mode());
5637 ASSERT_EQ(size, offset); 5648 ASSERT_EQ(size, offset);
5638 } 5649 }
5639 5650
5640 5651
5641 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { 5652 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
5642 Handle<FixedArray> literals(instr->environment()->closure()->literals()); 5653 Handle<FixedArray> literals(instr->environment()->closure()->literals());
5643 Handle<FixedArray> constant_properties = 5654 Handle<FixedArray> constant_properties =
5644 instr->hydrogen()->constant_properties(); 5655 instr->hydrogen()->constant_properties();
5645 5656
5646 // Set up the parameters to the stub/runtime call. 5657 // Set up the parameters to the stub/runtime call.
5647 __ LoadHeapObject(r4, literals); 5658 __ LoadHeapObject(r4, literals);
5648 __ mov(r3, Operand(Smi::FromInt(instr->hydrogen()->literal_index()))); 5659 __ mov(r3, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
5649 __ mov(r2, Operand(constant_properties)); 5660 __ mov(r2, Operand(constant_properties));
5650 int flags = instr->hydrogen()->fast_elements() 5661 int flags = instr->hydrogen()->fast_elements()
5651 ? ObjectLiteral::kFastElements 5662 ? ObjectLiteral::kFastElements
5652 : ObjectLiteral::kNoFlags; 5663 : ObjectLiteral::kNoFlags;
5664
5665 if (instr->hydrogen()->allocation_site_mode() == TRACK_ALLOCATION_SITE) {
5666 flags |= ObjectLiteral::kAllocationSiteInfoAllowed;
5667 }
5668
5653 __ mov(r1, Operand(Smi::FromInt(flags))); 5669 __ mov(r1, Operand(Smi::FromInt(flags)));
5654 __ Push(r4, r3, r2, r1); 5670 __ Push(r4, r3, r2, r1);
5655 5671
5656 // Pick the right runtime function or stub to call. 5672 // Pick the right runtime function or stub to call.
5657 int properties_count = constant_properties->length() / 2; 5673 int properties_count = constant_properties->length() / 2;
5658 if (instr->hydrogen()->depth() > 1) { 5674 if (instr->hydrogen()->depth() > 1) {
5659 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr); 5675 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
5660 } else if (flags != ObjectLiteral::kFastElements || 5676 } else if (flags != ObjectLiteral::kFastElements ||
5661 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { 5677 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
5662 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr); 5678 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
(...skipping 442 matching lines...) Expand 10 before | Expand all | Expand 10 after
6105 __ sub(scratch, result, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize)); 6121 __ sub(scratch, result, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize));
6106 __ ldr(result, FieldMemOperand(scratch, 6122 __ ldr(result, FieldMemOperand(scratch,
6107 FixedArray::kHeaderSize - kPointerSize)); 6123 FixedArray::kHeaderSize - kPointerSize));
6108 __ bind(&done); 6124 __ bind(&done);
6109 } 6125 }
6110 6126
6111 6127
6112 #undef __ 6128 #undef __
6113 6129
6114 } } // namespace v8::internal 6130 } } // namespace v8::internal
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698