Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(141)

Side by Side Diff: src/x64/lithium-codegen-x64.cc

Issue 12114054: Supporting AllocationSiteInfo for Nested arrays (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Addressing a port compile failure Created 7 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/lithium-codegen-x64.h ('k') | test/mjsunit/allocation-site-info.js » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 5157 matching lines...) Expand 10 before | Expand all | Expand 10 after
5168 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); 5168 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
5169 // Load the map's "bit field 2". 5169 // Load the map's "bit field 2".
5170 __ movb(rbx, FieldOperand(rbx, Map::kBitField2Offset)); 5170 __ movb(rbx, FieldOperand(rbx, Map::kBitField2Offset));
5171 // Retrieve elements_kind from bit field 2. 5171 // Retrieve elements_kind from bit field 2.
5172 __ and_(rbx, Immediate(Map::kElementsKindMask)); 5172 __ and_(rbx, Immediate(Map::kElementsKindMask));
5173 __ cmpb(rbx, Immediate(boilerplate_elements_kind << 5173 __ cmpb(rbx, Immediate(boilerplate_elements_kind <<
5174 Map::kElementsKindShift)); 5174 Map::kElementsKindShift));
5175 DeoptimizeIf(not_equal, instr->environment()); 5175 DeoptimizeIf(not_equal, instr->environment());
5176 } 5176 }
5177 5177
5178 int flags = allocation_site_mode == TRACK_ALLOCATION_SITE
5179 ? ArrayLiteral::kCreateAllocationSiteInfos
5180 : ArrayLiteral::kNoFlags;
5181
5178 // Set up the parameters to the stub/runtime call. 5182 // Set up the parameters to the stub/runtime call.
5179 __ PushHeapObject(literals); 5183 __ PushHeapObject(literals);
5180 __ Push(Smi::FromInt(instr->hydrogen()->literal_index())); 5184 __ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
5181 // Boilerplate already exists, constant elements are never accessed. 5185 // Boilerplate already exists, constant elements are never accessed.
5182 // Pass an empty fixed array. 5186 // Pass an empty fixed array.
5183 __ Push(isolate()->factory()->empty_fixed_array()); 5187 __ Push(isolate()->factory()->empty_fixed_array());
5184 5188
5185 // Pick the right runtime function or stub to call. 5189 // Pick the right runtime function or stub to call.
5186 int length = instr->hydrogen()->length(); 5190 int length = instr->hydrogen()->length();
5187 if (instr->hydrogen()->IsCopyOnWrite()) { 5191 if (instr->hydrogen()->IsCopyOnWrite()) {
5188 ASSERT(instr->hydrogen()->depth() == 1); 5192 ASSERT(instr->hydrogen()->depth() == 1);
5189 FastCloneShallowArrayStub::Mode mode = 5193 FastCloneShallowArrayStub::Mode mode =
5190 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS; 5194 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
5191 FastCloneShallowArrayStub stub(mode, DONT_TRACK_ALLOCATION_SITE, length); 5195 FastCloneShallowArrayStub stub(mode, DONT_TRACK_ALLOCATION_SITE, length);
5192 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 5196 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
5193 } else if (instr->hydrogen()->depth() > 1) { 5197 } else if (instr->hydrogen()->depth() > 1) {
5194 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr); 5198 __ Push(Smi::FromInt(flags));
5199 CallRuntime(Runtime::kCreateArrayLiteral, 4, instr);
5195 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { 5200 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
5196 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr); 5201 __ Push(Smi::FromInt(flags));
5202 CallRuntime(Runtime::kCreateArrayLiteralShallow, 4, instr);
5197 } else { 5203 } else {
5198 FastCloneShallowArrayStub::Mode mode = 5204 FastCloneShallowArrayStub::Mode mode =
5199 boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS 5205 boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS
5200 ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS 5206 ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
5201 : FastCloneShallowArrayStub::CLONE_ELEMENTS; 5207 : FastCloneShallowArrayStub::CLONE_ELEMENTS;
5202 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length); 5208 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
5203 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 5209 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
5204 } 5210 }
5205 } 5211 }
5206 5212
5207 5213
5208 void LCodeGen::EmitDeepCopy(Handle<JSObject> object, 5214 void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
5215 Handle<JSObject> original_object,
5209 Register result, 5216 Register result,
5210 Register source, 5217 Register source,
5211 int* offset, 5218 int* offset,
5212 AllocationSiteMode mode) { 5219 AllocationSiteMode mode) {
5213 ASSERT(!source.is(rcx)); 5220 ASSERT(!source.is(rcx));
5214 ASSERT(!result.is(rcx)); 5221 ASSERT(!result.is(rcx));
5215 5222
5223 // Should we track allocation info for *this* object in the tree?
5216 bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE && 5224 bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE &&
5217 object->map()->CanTrackAllocationSite(); 5225 object->ShouldTrackAllocationInfo();
5218 5226
5219 // Only elements backing stores for non-COW arrays need to be copied. 5227 // Only elements backing stores for non-COW arrays need to be copied.
5220 Handle<FixedArrayBase> elements(object->elements()); 5228 Handle<FixedArrayBase> elements(object->elements());
5229 Handle<FixedArrayBase> original_elements(original_object->elements());
5221 bool has_elements = elements->length() > 0 && 5230 bool has_elements = elements->length() > 0 &&
5222 elements->map() != isolate()->heap()->fixed_cow_array_map(); 5231 elements->map() != isolate()->heap()->fixed_cow_array_map();
5223 5232
5224 // Increase the offset so that subsequent objects end up right after 5233 // Increase the offset so that subsequent objects end up right after
5225 // this object and its backing store. 5234 // this object and its backing store.
5226 int object_offset = *offset; 5235 int object_offset = *offset;
5227 int object_size = object->map()->instance_size(); 5236 int object_size = object->map()->instance_size();
5228 int elements_size = has_elements ? elements->Size() : 0; 5237 int elements_size = has_elements ? elements->Size() : 0;
5229 int elements_offset = *offset + object_size; 5238 int elements_offset = *offset + object_size;
5230 if (create_allocation_site_info) { 5239 if (create_allocation_site_info) {
(...skipping 16 matching lines...) Expand all
5247 __ movq(FieldOperand(result, object_offset + i), rcx); 5256 __ movq(FieldOperand(result, object_offset + i), rcx);
5248 } 5257 }
5249 5258
5250 // Copy in-object properties. 5259 // Copy in-object properties.
5251 for (int i = 0; i < inobject_properties; i++) { 5260 for (int i = 0; i < inobject_properties; i++) {
5252 int total_offset = object_offset + object->GetInObjectPropertyOffset(i); 5261 int total_offset = object_offset + object->GetInObjectPropertyOffset(i);
5253 Handle<Object> value = Handle<Object>(object->InObjectPropertyAt(i), 5262 Handle<Object> value = Handle<Object>(object->InObjectPropertyAt(i),
5254 isolate()); 5263 isolate());
5255 if (value->IsJSObject()) { 5264 if (value->IsJSObject()) {
5256 Handle<JSObject> value_object = Handle<JSObject>::cast(value); 5265 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
5266 Handle<JSObject> original_value_object = Handle<JSObject>::cast(
5267 Handle<Object>(original_object->InObjectPropertyAt(i), isolate()));
5268
5257 __ lea(rcx, Operand(result, *offset)); 5269 __ lea(rcx, Operand(result, *offset));
5258 __ movq(FieldOperand(result, total_offset), rcx); 5270 __ movq(FieldOperand(result, total_offset), rcx);
5259 __ LoadHeapObject(source, value_object); 5271 __ LoadHeapObject(source, value_object);
5260 EmitDeepCopy(value_object, result, source, offset, 5272 EmitDeepCopy(value_object, original_value_object, result, source,
5261 DONT_TRACK_ALLOCATION_SITE); 5273 offset, mode);
5262 } else if (value->IsHeapObject()) { 5274 } else if (value->IsHeapObject()) {
5263 __ LoadHeapObject(rcx, Handle<HeapObject>::cast(value)); 5275 __ LoadHeapObject(rcx, Handle<HeapObject>::cast(value));
5264 __ movq(FieldOperand(result, total_offset), rcx); 5276 __ movq(FieldOperand(result, total_offset), rcx);
5265 } else { 5277 } else {
5266 __ movq(rcx, value, RelocInfo::NONE64); 5278 __ movq(rcx, value, RelocInfo::NONE64);
5267 __ movq(FieldOperand(result, total_offset), rcx); 5279 __ movq(FieldOperand(result, total_offset), rcx);
5268 } 5280 }
5269 } 5281 }
5270 5282
5271 // Build Allocation Site Info if desired 5283 // Build Allocation Site Info if desired
5272 if (create_allocation_site_info) { 5284 if (create_allocation_site_info) {
5273 __ LoadRoot(kScratchRegister, Heap::kAllocationSiteInfoMapRootIndex); 5285 __ LoadRoot(kScratchRegister, Heap::kAllocationSiteInfoMapRootIndex);
5274 __ movq(FieldOperand(result, object_size), kScratchRegister); 5286 __ movq(FieldOperand(result, object_size + object_offset),
5275 __ movq(FieldOperand(result, object_size + kPointerSize), source); 5287 kScratchRegister);
5288 __ LoadHeapObject(rcx, original_object);
5289 __ movq(FieldOperand(result, object_size + object_offset + kPointerSize),
5290 rcx);
5276 } 5291 }
5277 5292
5278 if (has_elements) { 5293 if (has_elements) {
5279 // Copy elements backing store header. 5294 // Copy elements backing store header.
5280 __ LoadHeapObject(source, elements); 5295 __ LoadHeapObject(source, elements);
5281 for (int i = 0; i < FixedArray::kHeaderSize; i += kPointerSize) { 5296 for (int i = 0; i < FixedArray::kHeaderSize; i += kPointerSize) {
5282 __ movq(rcx, FieldOperand(source, i)); 5297 __ movq(rcx, FieldOperand(source, i));
5283 __ movq(FieldOperand(result, elements_offset + i), rcx); 5298 __ movq(FieldOperand(result, elements_offset + i), rcx);
5284 } 5299 }
5285 5300
5286 // Copy elements backing store content. 5301 // Copy elements backing store content.
5287 int elements_length = elements->length(); 5302 int elements_length = elements->length();
5288 if (elements->IsFixedDoubleArray()) { 5303 if (elements->IsFixedDoubleArray()) {
5289 Handle<FixedDoubleArray> double_array = 5304 Handle<FixedDoubleArray> double_array =
5290 Handle<FixedDoubleArray>::cast(elements); 5305 Handle<FixedDoubleArray>::cast(elements);
5291 for (int i = 0; i < elements_length; i++) { 5306 for (int i = 0; i < elements_length; i++) {
5292 int64_t value = double_array->get_representation(i); 5307 int64_t value = double_array->get_representation(i);
5293 int total_offset = 5308 int total_offset =
5294 elements_offset + FixedDoubleArray::OffsetOfElementAt(i); 5309 elements_offset + FixedDoubleArray::OffsetOfElementAt(i);
5295 __ movq(rcx, value, RelocInfo::NONE64); 5310 __ movq(rcx, value, RelocInfo::NONE64);
5296 __ movq(FieldOperand(result, total_offset), rcx); 5311 __ movq(FieldOperand(result, total_offset), rcx);
5297 } 5312 }
5298 } else if (elements->IsFixedArray()) { 5313 } else if (elements->IsFixedArray()) {
5299 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements); 5314 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
5315 ASSERT(original_object->HasFastObjectElements());
5316 Handle<FixedArray> original_fast_elements =
5317 Handle<FixedArray>::cast(original_elements);
5300 for (int i = 0; i < elements_length; i++) { 5318 for (int i = 0; i < elements_length; i++) {
5301 int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i); 5319 int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i);
5302 Handle<Object> value(fast_elements->get(i), isolate()); 5320 Handle<Object> value(fast_elements->get(i), isolate());
5303 if (value->IsJSObject()) { 5321 if (value->IsJSObject()) {
5304 Handle<JSObject> value_object = Handle<JSObject>::cast(value); 5322 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
5323 Handle<JSObject> original_value_object = Handle<JSObject>::cast(
5324 Handle<Object>(original_fast_elements->get(i), isolate()));
5305 __ lea(rcx, Operand(result, *offset)); 5325 __ lea(rcx, Operand(result, *offset));
5306 __ movq(FieldOperand(result, total_offset), rcx); 5326 __ movq(FieldOperand(result, total_offset), rcx);
5307 __ LoadHeapObject(source, value_object); 5327 __ LoadHeapObject(source, value_object);
5308 EmitDeepCopy(value_object, result, source, offset, 5328 ASSERT(!value_object.is_identical_to(original_value_object));
5309 DONT_TRACK_ALLOCATION_SITE); 5329 EmitDeepCopy(value_object, original_value_object, result, source,
5330 offset, mode);
5310 } else if (value->IsHeapObject()) { 5331 } else if (value->IsHeapObject()) {
5311 __ LoadHeapObject(rcx, Handle<HeapObject>::cast(value)); 5332 __ LoadHeapObject(rcx, Handle<HeapObject>::cast(value));
5312 __ movq(FieldOperand(result, total_offset), rcx); 5333 __ movq(FieldOperand(result, total_offset), rcx);
5313 } else { 5334 } else {
5314 __ movq(rcx, value, RelocInfo::NONE64); 5335 __ movq(rcx, value, RelocInfo::NONE64);
5315 __ movq(FieldOperand(result, total_offset), rcx); 5336 __ movq(FieldOperand(result, total_offset), rcx);
5316 } 5337 }
5317 } 5338 }
5318 } else { 5339 } else {
5319 UNREACHABLE(); 5340 UNREACHABLE();
5320 } 5341 }
5321 } 5342 }
5322 } 5343 }
5323 5344
5324 5345
5325 void LCodeGen::DoFastLiteral(LFastLiteral* instr) { 5346 void LCodeGen::FastLiteralHelper(LFastLiteral* instr, AllocationSiteMode mode) {
5326 int size = instr->hydrogen()->total_size(); 5347 int size = mode == DONT_TRACK_ALLOCATION_SITE
5327 ElementsKind boilerplate_elements_kind = 5348 ? instr->hydrogen()->size_without_allocation_sites()
5328 instr->hydrogen()->boilerplate()->GetElementsKind(); 5349 : instr->hydrogen()->total_size();
5329
5330 // Deopt if the array literal boilerplate ElementsKind is of a type different
5331 // than the expected one. The check isn't necessary if the boilerplate has
5332 // already been converted to TERMINAL_FAST_ELEMENTS_KIND.
5333 if (CanTransitionToMoreGeneralFastElementsKind(
5334 boilerplate_elements_kind, true)) {
5335 __ LoadHeapObject(rbx, instr->hydrogen()->boilerplate());
5336 __ movq(rcx, FieldOperand(rbx, HeapObject::kMapOffset));
5337 // Load the map's "bit field 2".
5338 __ movb(rcx, FieldOperand(rcx, Map::kBitField2Offset));
5339 // Retrieve elements_kind from bit field 2.
5340 __ and_(rcx, Immediate(Map::kElementsKindMask));
5341 __ cmpb(rcx, Immediate(boilerplate_elements_kind <<
5342 Map::kElementsKindShift));
5343 DeoptimizeIf(not_equal, instr->environment());
5344 }
5345 5350
5346 // Allocate all objects that are part of the literal in one big 5351 // Allocate all objects that are part of the literal in one big
5347 // allocation. This avoids multiple limit checks. 5352 // allocation. This avoids multiple limit checks.
5348 Label allocated, runtime_allocate; 5353 Label allocated, runtime_allocate;
5349 __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT); 5354 __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
5350 __ jmp(&allocated); 5355 __ jmp(&allocated);
5351 5356
5352 __ bind(&runtime_allocate); 5357 __ bind(&runtime_allocate);
5353 __ Push(Smi::FromInt(size)); 5358 __ Push(Smi::FromInt(size));
5354 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); 5359 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
5355 5360
5356 __ bind(&allocated); 5361 __ bind(&allocated);
5357 int offset = 0; 5362 int offset = 0;
5358 __ LoadHeapObject(rbx, instr->hydrogen()->boilerplate()); 5363 __ LoadHeapObject(rbx, instr->hydrogen()->boilerplate());
5359 EmitDeepCopy(instr->hydrogen()->boilerplate(), rax, rbx, &offset, 5364 EmitDeepCopy(instr->hydrogen()->boilerplate(),
5365 instr->hydrogen()->original_boilerplate(),
5366 rax, rbx, &offset,
5360 instr->hydrogen()->allocation_site_mode()); 5367 instr->hydrogen()->allocation_site_mode());
5361 ASSERT_EQ(size, offset); 5368 ASSERT_EQ(size, offset);
5362 } 5369 }
5363 5370
5371 void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
5372 // TODO(mvstanton): Revisit this heuristic as site info matures.
5373 // If allocation site mode is on, then we need the ability to turn it off
5374 // after "awhile." Later, better options should be available, but for
5375 // now just allow a certain number of gcs to pass.
5376 if (instr->hydrogen()->allocation_site_mode() == TRACK_ALLOCATION_SITE) {
5377 // How many gcs have passed?
5378 const int maxCount = 3;
5379 ExternalReference gc_count_address =
5380 ExternalReference::gc_count_address(isolate());
5381 Label continue_using, done;
5382 __ movl(rax, masm()->ExternalOperand(gc_count_address));
5383 __ cmpl(rax, Immediate(maxCount));
5384 __ j(less, &continue_using);
5385 FastLiteralHelper(instr, DONT_TRACK_ALLOCATION_SITE);
5386 __ jmp(&done);
5387 __ bind(&continue_using);
5388 FastLiteralHelper(instr, TRACK_ALLOCATION_SITE);
5389 __ bind(&done);
5390 } else {
5391 FastLiteralHelper(instr, DONT_TRACK_ALLOCATION_SITE);
5392 }
5393 }
5394
5364 5395
5365 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { 5396 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
5366 Handle<FixedArray> literals(instr->environment()->closure()->literals()); 5397 Handle<FixedArray> literals(instr->environment()->closure()->literals());
5367 Handle<FixedArray> constant_properties = 5398 Handle<FixedArray> constant_properties =
5368 instr->hydrogen()->constant_properties(); 5399 instr->hydrogen()->constant_properties();
5369 5400
5370 int flags = instr->hydrogen()->fast_elements() 5401 int flags = instr->hydrogen()->fast_elements()
5371 ? ObjectLiteral::kFastElements 5402 ? ObjectLiteral::kFastElements
5372 : ObjectLiteral::kNoFlags; 5403 : ObjectLiteral::kNoFlags;
5373 flags |= instr->hydrogen()->has_function() 5404 flags |= instr->hydrogen()->has_function()
5374 ? ObjectLiteral::kHasFunction 5405 ? ObjectLiteral::kHasFunction
5375 : ObjectLiteral::kNoFlags; 5406 : ObjectLiteral::kNoFlags;
5376 5407
5408 if (instr->hydrogen()->allocation_site_mode() == TRACK_ALLOCATION_SITE) {
5409 flags |= ObjectLiteral::kCreateAllocationSiteInfos;
5410 }
5411
5377 // Set up the parameters to the stub/runtime call and pick the right 5412 // Set up the parameters to the stub/runtime call and pick the right
5378 // runtime function or stub to call. 5413 // runtime function or stub to call.
5379 int properties_count = constant_properties->length() / 2; 5414 int properties_count = constant_properties->length() / 2;
5380 if (instr->hydrogen()->depth() > 1) { 5415 if (instr->hydrogen()->depth() > 1) {
5381 __ PushHeapObject(literals); 5416 __ PushHeapObject(literals);
5382 __ Push(Smi::FromInt(instr->hydrogen()->literal_index())); 5417 __ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
5383 __ Push(constant_properties); 5418 __ Push(constant_properties);
5384 __ Push(Smi::FromInt(flags)); 5419 __ Push(Smi::FromInt(flags));
5385 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr); 5420 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
5386 } else if (flags != ObjectLiteral::kFastElements || 5421 } else if (flags != ObjectLiteral::kFastElements ||
(...skipping 460 matching lines...) Expand 10 before | Expand all | Expand 10 after
5847 FixedArray::kHeaderSize - kPointerSize)); 5882 FixedArray::kHeaderSize - kPointerSize));
5848 __ bind(&done); 5883 __ bind(&done);
5849 } 5884 }
5850 5885
5851 5886
5852 #undef __ 5887 #undef __
5853 5888
5854 } } // namespace v8::internal 5889 } } // namespace v8::internal
5855 5890
5856 #endif // V8_TARGET_ARCH_X64 5891 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/lithium-codegen-x64.h ('k') | test/mjsunit/allocation-site-info.js » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698