| Index: src/spaces-inl.h | 
| diff --git a/src/spaces-inl.h b/src/spaces-inl.h | 
| index da9c03d9137b9d2510778ea95a97c4355962f911..eba52c694919df382a527454c2cffef6f064e5c8 100644 | 
| --- a/src/spaces-inl.h | 
| +++ b/src/spaces-inl.h | 
| @@ -290,21 +290,6 @@ AllocationResult PagedSpace::AllocateRaw(int size_in_bytes) { | 
|  | 
| AllocationResult NewSpace::AllocateRaw(int size_in_bytes) { | 
| Address old_top = allocation_info_.top(); | 
| -#ifdef DEBUG | 
| -  // If we are stressing compaction we waste some memory in new space | 
| -  // in order to get more frequent GCs. | 
| -  if (FLAG_stress_compaction && !heap()->linear_allocation()) { | 
| -    if (allocation_info_.limit() - old_top >= size_in_bytes * 4) { | 
| -      int filler_size = size_in_bytes * 4; | 
| -      for (int i = 0; i < filler_size; i += kPointerSize) { | 
| -        *(reinterpret_cast<Object**>(old_top + i)) = | 
| -            heap()->one_pointer_filler_map(); | 
| -      } | 
| -      old_top += filler_size; | 
| -      allocation_info_.set_top(allocation_info_.top() + filler_size); | 
| -    } | 
| -  } | 
| -#endif | 
|  | 
| if (allocation_info_.limit() - old_top < size_in_bytes) { | 
| return SlowAllocateRaw(size_in_bytes); | 
|  |