Chromium Code Reviews| Index: src/x64/lithium-codegen-x64.cc |
| diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc |
| index d8e392147684ec6e2ab4054a624bba899e92f28b..681bff7ba0d55697735e6198c5aa993bc53551b8 100644 |
| --- a/src/x64/lithium-codegen-x64.cc |
| +++ b/src/x64/lithium-codegen-x64.cc |
| @@ -2894,6 +2894,210 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { |
| } |
| +#ifdef DEBUG |
| +void LCodeGen::EmitCheckMap(Register object, Handle<Map> map) { |
| + __ CompareMap(object, map); |
| + __ Check(equal, kObjectAccessCheckFailed); |
| +} |
| + |
| + |
| +void LCodeGen::EmitCheckInstanceType(Register object, InstanceType type, |
| + Condition condition) { |
| + __ movp(kScratchRegister, FieldOperand(object, HeapObject::kMapOffset)); |
| + __ CmpInstanceType(kScratchRegister, type); |
| + __ Check(condition, kObjectAccessCheckFailed); |
| +} |
| + |
| + |
| +void LCodeGen::EmitObjectAccessChecks(HObjectAccess* access, |
| + Register object, |
| + bool is_store) { |
| + if (!FLAG_debug_code) return; |
| + Comment(";;; HObjectAccess check <%d>", static_cast<int>(access->purpose())); |
| + switch (access->purpose()) { |
| + case HObjectAccess::FOR_HEAP_NUMBER_VALUE: |
| + case HObjectAccess::FOR_HEAP_NUMBER_VALUE_HIGHEST_BITS: |
| + case HObjectAccess::FOR_HEAP_NUMBER_VALUE_LOWEST_BITS: |
| + EmitCheckMap(object, isolate()->factory()->heap_number_map()); |
| + break; |
| + |
| + case HObjectAccess::FOR_ALLOCATION_MEMENTO_SITE: |
| + EmitCheckMap(object, isolate()->factory()->allocation_memento_map()); |
| + break; |
| + |
| + case HObjectAccess::FOR_ALLOCATION_SITE_OFFSET: |
| + EmitCheckMap(object, isolate()->factory()->allocation_site_map()); |
| + break; |
| + |
| + case HObjectAccess::FOR_CODE_OFFSET: |
| + case HObjectAccess::FOR_OPTIMIZED_CODE_MAP: |
| + EmitCheckMap(object, isolate()->factory()->shared_function_info_map()); |
| + break; |
| + |
| + case HObjectAccess::FOR_MAP_INSTANCE_SIZE: |
| + case HObjectAccess::FOR_MAP_INSTANCE_TYPE: |
| + EmitCheckMap(object, isolate()->factory()->meta_map()); |
| + break; |
| + |
| + case HObjectAccess::FOR_CELL_VALUE: |
| + case HObjectAccess::FOR_PROPERTY_CELL_VALUE: |
| + EmitCheckMap(object, isolate()->factory()->cell_map()); |
| + break; |
| + |
| + case HObjectAccess::FOR_CONTEXT_SLOT: |
| + EmitCheckInstanceType(object, FIXED_ARRAY_TYPE); |
| + break; |
| + |
| + case HObjectAccess::FOR_CODE_ENTRY_POINTER: |
| + case HObjectAccess::FOR_FUNCTION_CONTEXT_POINTER: |
| + case HObjectAccess::FOR_LITERALS_POINTER: |
| + case HObjectAccess::FOR_NEXT_FUNCTION_LINK_POINTER: |
| + case HObjectAccess::FOR_PROTOTYPE_OR_INITIAL_MAP: |
| + case HObjectAccess::FOR_SHARED_FUNCTION_INFO_POINTER: |
| + EmitCheckInstanceType(object, JS_FUNCTION_TYPE); |
| + break; |
| + |
| + case HObjectAccess::FOR_ARRAY_LENGTH: |
| + case HObjectAccess::FOR_JSARRAY_OFFSET: |
| + EmitCheckInstanceType(object, JS_ARRAY_TYPE); |
| + break; |
| + |
| + case HObjectAccess::FOR_JSTYPEDARRAY_LENGTH: |
| + EmitCheckInstanceType(object, JS_TYPED_ARRAY_TYPE); |
| + break; |
| + |
| + case HObjectAccess::FOR_JSARRAYBUFFER_BACKING_STORE: |
| + case HObjectAccess::FOR_JSARRAYBUFFER_BYTE_LENGTH: |
| + case HObjectAccess::FOR_JSARRAYBUFFER_WEAK_FIRST_VIEW: |
| + EmitCheckInstanceType(object, JS_ARRAY_BUFFER_TYPE); |
| + break; |
| + |
| + case HObjectAccess::FOR_STRING_HASH_FIELD: |
| + case HObjectAccess::FOR_STRING_LENGTH: |
| + EmitCheckInstanceType(object, FIRST_NONSTRING_TYPE, below); |
| + break; |
| + |
| + case HObjectAccess::FOR_ELEMENTS_POINTER: |
| + case HObjectAccess::FOR_PROPERTIES_POINTER: |
| + EmitCheckInstanceType(object, FIRST_JS_OBJECT_TYPE, above_equal); |
| + break; |
| + |
| + case HObjectAccess::FOR_JSARRAYBUFFERVIEW_BUFFER: |
| + case HObjectAccess::FOR_JSARRAYBUFFERVIEW_BYTE_LENGTH: |
| + case HObjectAccess::FOR_JSARRAYBUFFERVIEW_BYTE_OFFSET: |
| + case HObjectAccess::FOR_JSARRAYBUFFERVIEW_WEAK_NEXT: { |
| + Label ok; |
| + __ movp(kScratchRegister, FieldOperand(object, HeapObject::kMapOffset)); |
| + __ CmpInstanceType(kScratchRegister, JS_TYPED_ARRAY_TYPE); |
| + __ j(equal, &ok, Label::kNear); |
| + __ CmpInstanceType(kScratchRegister, JS_DATA_VIEW_TYPE); |
| + __ j(equal, &ok, Label::kNear); |
| + __ Abort(kObjectAccessCheckFailed); |
| + __ bind(&ok); |
| + break; |
| + } |
| + case HObjectAccess::FOR_EXTERNAL_ARRAY_EXTERNAL_POINTER: { |
| + Label ok, bad; |
| + __ movp(kScratchRegister, FieldOperand(object, HeapObject::kMapOffset)); |
| + __ CmpInstanceType(kScratchRegister, FIRST_EXTERNAL_ARRAY_TYPE); |
| + __ j(below, &bad, Label::kNear); |
| + __ CmpInstanceType(kScratchRegister, LAST_EXTERNAL_ARRAY_TYPE); |
| + __ j(below_equal, &ok, Label::kNear); |
| + __ bind(&bad); |
| + __ Abort(kObjectAccessCheckFailed); |
| + __ bind(&ok); |
| + break; |
| + } |
| + case HObjectAccess::FOR_FIXED_ARRAY_HEADER: |
| + if (access->offset() == 0) break; // Setting the map. |
| + // Else fall through. |
| + case HObjectAccess::FOR_FIXED_ARRAY_LENGTH: { |
| + Label ok, bad; |
| + __ movp(kScratchRegister, FieldOperand(object, HeapObject::kMapOffset)); |
| + __ CmpInstanceType(kScratchRegister, FIRST_FIXED_ARRAY_TYPE); |
| + __ j(below, &bad, Label::kNear); |
| + __ CmpInstanceType(kScratchRegister, LAST_FIXED_ARRAY_TYPE); |
| + __ j(below_equal, &ok, Label::kNear); |
| + __ bind(&bad); |
| + __ Abort(kObjectAccessCheckFailed); |
| + __ bind(&ok); |
| + break; |
| + } |
| + case HObjectAccess::FOR_CONS_STRING_FIRST: |
| + case HObjectAccess::FOR_CONS_STRING_SECOND: { |
| + Label ok; |
| + __ CompareMap(object, isolate()->factory()->cons_string_map()); |
| + __ j(equal, &ok, Label::kNear); |
| + __ CompareMap(object, isolate()->factory()->cons_ascii_string_map()); |
| + __ j(equal, &ok, Label::kNear); |
| + __ Abort(kObjectAccessCheckFailed); |
| + __ bind(&ok); |
| + break; |
| + } |
| + |
| + case HObjectAccess::FOR_CELL_PAYLOAD: { |
| + Label ok; |
| + __ CompareMap(object, isolate()->factory()->cell_map()); |
| + __ j(equal, &ok, Label::kNear); |
| + __ CompareMap(object, isolate()->factory()->global_property_cell_map()); |
| + __ j(equal, &ok, Label::kNear); |
| + __ Abort(kObjectAccessCheckFailed); |
| + __ bind(&ok); |
| + break; |
| + } |
| + |
| + case HObjectAccess::FOR_BACKING_STORE_OFFSET: { |
| + ASSERT(!access->IsInobject()); |
| + // Load backing store. |
| + __ movp(kScratchRegister, |
| + FieldOperand(object, JSObject::kPropertiesOffset)); |
| + // Load backing store length, with implicit Smi untagging. |
|
Igor Sheludko
2014/06/05 09:04:30
STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 32);
Jakob Kummerow
2014/06/05 14:33:31
Done.
|
| + __ movl(kScratchRegister, |
| + FieldOperand(kScratchRegister, |
| + FixedArray::kLengthOffset + kPointerSize / 2)); |
| + // The loaded length does not include the backing store's header size, |
| + // but access->offset() does. |
| + ASSERT(access->offset() >= FixedArray::kHeaderSize); |
| + int accessed_index = |
| + (access->offset() - FixedArray::kHeaderSize) / kPointerSize; |
| + __ cmpl(kScratchRegister, Immediate(accessed_index)); |
| + __ Check(above, kObjectAccessCheckFailed); |
| + break; |
| + } |
| + case HObjectAccess::FOR_FIELD: { |
| + ASSERT(access->IsInobject()); |
| + Label ok; |
| + __ movp(kScratchRegister, FieldOperand(object, HeapObject::kMapOffset)); |
| + // Some objects have variable instance size (e.g. FixedArray). |
| + // To check those, we'd need an equivalent of HeapObject::SizeFromMap. |
| + // For now we just skip such objects. |
| + __ cmpb(FieldOperand(kScratchRegister, Map::kInstanceSizeOffset), |
| + Immediate(kVariableSizeSentinel)); |
| + __ j(equal, &ok, Label::kNear); |
| + |
| + __ cmpb(FieldOperand(kScratchRegister, Map::kInstanceSizeOffset), |
| + Immediate(access->offset() / kPointerSize)); |
| + __ Check(above, kObjectAccessCheckFailed); |
| + __ bind(&ok); |
| + break; |
| + } |
| + case HObjectAccess::FOR_GLOBAL_OBJECT_NATIVE_CONTEXT: |
| + case HObjectAccess::FOR_MAP: |
| + // Unimplemented. That's OK for now. |
| + break; |
| + |
| + case HObjectAccess::FOR_ALLOCATION_SITE_LIST: |
| + case HObjectAccess::FOR_COUNTER: |
| + // External. Handled differently. |
| + case HObjectAccess::UNKNOWN_PURPOSE: |
| + // Don't use UNKNOWN_PURPOSE. |
|
Igor Sheludko
2014/06/05 09:04:30
default:
Jakob Kummerow
2014/06/05 14:33:31
Nope. We generally don't use "default:" cases when
|
| + UNREACHABLE(); |
| + break; |
| + } |
| +} |
| +#endif |
| + |
| + |
| void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { |
| HObjectAccess access = instr->hydrogen()->access(); |
| int offset = access.offset(); |
| @@ -2911,6 +3115,9 @@ void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { |
| } |
| Register object = ToRegister(instr->object()); |
| +#ifdef DEBUG |
| + EmitObjectAccessChecks(&access, object, false); |
| +#endif |
| if (instr->hydrogen()->representation().IsDouble()) { |
| XMMRegister result = ToDoubleRegister(instr->result()); |
| __ movsd(result, FieldOperand(object, offset)); |
| @@ -3031,6 +3238,38 @@ void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) { |
| } |
| +#ifdef DEBUG |
| +void LCodeGen::EmitKeyedAccessCheck(LOperand* elements, Operand operand) { |
| + if (!FLAG_debug_code) return; |
| + Register elements_reg = ToRegister(elements); |
| + Register length_scratch = kScratchRegister; |
| + Register access_scratch = r11; |
| + __ Push(access_scratch); |
| + // 64-bit platforms make it easy :-) |
| + STATIC_ASSERT(kPointerSize == kDoubleSize); |
| + const int kHeaderSize = FixedArrayBase::kHeaderSize; |
| + STATIC_ASSERT(kHeaderSize == FixedArray::kHeaderSize); |
| + STATIC_ASSERT(kHeaderSize == FixedDoubleArray::kHeaderSize); |
| + STATIC_ASSERT(kHeaderSize == FixedTypedArrayBase::kHeaderSize); |
| + |
| + // Load elements length, with implicit Smi untagging. |
|
Igor Sheludko
2014/06/05 09:04:30
STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 32);
Jakob Kummerow
2014/06/05 14:33:31
Done.
|
| + __ movl(length_scratch, |
| + FieldOperand(elements_reg, |
| + FixedArrayBase::kLengthOffset + kPointerSize / 2)); |
| + // Multiply with pointer size and add header size to get the object length. |
| + __ leaq(length_scratch, |
| + Operand(length_scratch, times_pointer_size, |
| + kHeaderSize - kHeapObjectTag)); |
| + // Compute the actually accessed offset in access_scratch. |
| + __ leaq(access_scratch, operand); |
| + __ subq(access_scratch, elements_reg); |
| + __ cmpq(length_scratch, access_scratch); |
| + __ Check(above, kObjectAccessCheckFailed); |
| + __ Pop(access_scratch); |
| +} |
| +#endif |
| + |
| + |
| void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) { |
| ElementsKind elements_kind = instr->elements_kind(); |
| LOperand* key = instr->key(); |
| @@ -3040,6 +3279,13 @@ void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) { |
| elements_kind, |
| instr->base_offset())); |
| +#ifdef DEBUG |
| + // TODO(jkummerow): Support external elements too. |
| + if (!IsExternalArrayElementsKind(elements_kind)) { |
| + EmitKeyedAccessCheck(instr->elements(), operand); |
| + } |
| +#endif |
| + |
| if (elements_kind == EXTERNAL_FLOAT32_ELEMENTS || |
| elements_kind == FLOAT32_ELEMENTS) { |
| XMMRegister result(ToDoubleRegister(instr->result())); |
| @@ -3118,6 +3364,9 @@ void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) { |
| key, |
| FAST_DOUBLE_ELEMENTS, |
| instr->base_offset()); |
| +#ifdef DEBUG |
| + EmitKeyedAccessCheck(instr->elements(), double_load_operand); |
| +#endif |
| __ movsd(result, double_load_operand); |
| } |
| @@ -3149,12 +3398,14 @@ void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) { |
| offset += kPointerSize / 2; |
| } |
| - __ Load(result, |
| - BuildFastArrayOperand(instr->elements(), |
| - key, |
| - FAST_ELEMENTS, |
| - offset), |
| - representation); |
| + Operand operand = BuildFastArrayOperand(instr->elements(), |
| + key, |
| + FAST_ELEMENTS, |
| + offset); |
| +#ifdef DEBUG |
| + EmitKeyedAccessCheck(instr->elements(), operand); |
| +#endif |
| + __ Load(result, operand, representation); |
| // Check for the hole value. |
| if (requires_hole_check) { |
| @@ -4001,6 +4252,10 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { |
| Register object = ToRegister(instr->object()); |
| __ AssertNotSmi(object); |
| +#ifdef DEBUG |
| + EmitObjectAccessChecks(&access, object, true); |
| +#endif |
| + |
|
Igor Sheludko
2014/06/05 09:04:30
Extra empty line?
Jakob Kummerow
2014/06/05 14:33:31
Done.
|
| ASSERT(!representation.IsSmi() || |
| !instr->value()->IsConstantOperand() || |
| @@ -4174,6 +4429,12 @@ void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { |
| elements_kind, |
| instr->base_offset())); |
| +#ifdef DEBUG |
| + if (!IsExternalArrayElementsKind(elements_kind)) { |
| + EmitKeyedAccessCheck(instr->elements(), operand); |
| + } |
| +#endif |
| + |
| if (elements_kind == EXTERNAL_FLOAT32_ELEMENTS || |
| elements_kind == FLOAT32_ELEMENTS) { |
| XMMRegister value(ToDoubleRegister(instr->value())); |
| @@ -4245,7 +4506,9 @@ void LCodeGen::DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr) { |
| key, |
| FAST_DOUBLE_ELEMENTS, |
| instr->base_offset()); |
| - |
| +#ifdef DEBUG |
| + EmitKeyedAccessCheck(instr->elements(), double_store_operand); |
| +#endif |
| __ movsd(double_store_operand, value); |
| } |
| @@ -4280,6 +4543,9 @@ void LCodeGen::DoStoreKeyedFixedArray(LStoreKeyed* instr) { |
| key, |
| FAST_ELEMENTS, |
| offset); |
| +#ifdef DEBUG |
| + EmitKeyedAccessCheck(instr->elements(), operand); |
| +#endif |
| if (instr->value()->IsRegister()) { |
| __ Store(operand, ToRegister(instr->value()), representation); |
| } else { |