| Index: src/objects.cc
 | 
| diff --git a/src/objects.cc b/src/objects.cc
 | 
| index 35abf27e6b30db584e06fc257e31dd93eb277d10..814eaae3427467c18dbc7aa92462a3c807a86a97 100644
 | 
| --- a/src/objects.cc
 | 
| +++ b/src/objects.cc
 | 
| @@ -2186,7 +2186,7 @@ static void RightTrimFixedArray(Heap* heap, FixedArray* elms, int to_trim) {
 | 
|    Address new_end = elms->address() + FixedArray::SizeFor(len - to_trim);
 | 
|  
 | 
|    if (trim_mode != FROM_GC || Heap::ShouldZapGarbage()) {
 | 
| -      ZapEndOfFixedArray(new_end, to_trim);
 | 
| +    ZapEndOfFixedArray(new_end, to_trim);
 | 
|    }
 | 
|  
 | 
|    int size_delta = to_trim * kPointerSize;
 | 
| @@ -8985,10 +8985,10 @@ void SharedFunctionInfo::AddToOptimizedCodeMap(
 | 
|      // No optimized code map.
 | 
|      ASSERT_EQ(0, Smi::cast(value)->value());
 | 
|      // Crate 3 entries per context {context, code, literals}.
 | 
| -    new_code_map = FACTORY->NewFixedArray(kEntryLength);
 | 
| -    new_code_map->set(0, *native_context);
 | 
| -    new_code_map->set(1, *code);
 | 
| -    new_code_map->set(2, *literals);
 | 
| +    new_code_map = FACTORY->NewFixedArray(kEntriesStart + kEntryLength);
 | 
| +    new_code_map->set(kEntriesStart + 0, *native_context);
 | 
| +    new_code_map->set(kEntriesStart + 1, *code);
 | 
| +    new_code_map->set(kEntriesStart + 2, *literals);
 | 
|    } else {
 | 
|      // Copy old map and append one new entry.
 | 
|      Handle<FixedArray> old_code_map(FixedArray::cast(value));
 | 
| @@ -9000,9 +9000,11 @@ void SharedFunctionInfo::AddToOptimizedCodeMap(
 | 
|      new_code_map->set(old_length, *native_context);
 | 
|      new_code_map->set(old_length + 1, *code);
 | 
|      new_code_map->set(old_length + 2, *literals);
 | 
| +    // Zap the old map for the sake of the heap verifier.
 | 
| +    if (Heap::ShouldZapGarbage()) shared->ZapOptimizedCodeMap();
 | 
|    }
 | 
|  #ifdef DEBUG
 | 
| -  for (int i = 0; i < new_code_map->length(); i += kEntryLength) {
 | 
| +  for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) {
 | 
|      ASSERT(new_code_map->get(i)->IsNativeContext());
 | 
|      ASSERT(new_code_map->get(i + 1)->IsCode());
 | 
|      ASSERT(Code::cast(new_code_map->get(i + 1))->kind() ==
 | 
| @@ -9016,8 +9018,7 @@ void SharedFunctionInfo::AddToOptimizedCodeMap(
 | 
|  
 | 
|  void SharedFunctionInfo::InstallFromOptimizedCodeMap(JSFunction* function,
 | 
|                                                       int index) {
 | 
| -  ASSERT(index > 0);
 | 
| -  ASSERT(optimized_code_map()->IsFixedArray());
 | 
| +  ASSERT(index > kEntriesStart);
 | 
|    FixedArray* code_map = FixedArray::cast(optimized_code_map());
 | 
|    if (!bound()) {
 | 
|      FixedArray* cached_literals = FixedArray::cast(code_map->get(index + 1));
 | 
| @@ -9031,15 +9032,18 @@ void SharedFunctionInfo::InstallFromOptimizedCodeMap(JSFunction* function,
 | 
|  }
 | 
|  
 | 
|  
 | 
| -void SharedFunctionInfo::ClearOptimizedCodeMap(const char* reason) {
 | 
| -  if (!optimized_code_map()->IsSmi()) {
 | 
| -    if (FLAG_trace_opt) {
 | 
| -      PrintF("[clearing entire optimizing code map (%s) for ", reason);
 | 
| -      ShortPrint();
 | 
| -      PrintF("]\n");
 | 
| -    }
 | 
| -    set_optimized_code_map(Smi::FromInt(0));
 | 
| +void SharedFunctionInfo::ClearOptimizedCodeMap() {
 | 
| +  FixedArray* code_map = FixedArray::cast(optimized_code_map());
 | 
| +
 | 
| +  // If the next map link slot is already used then the function was
 | 
| +  // enqueued with code flushing and we remove it now.
 | 
| +  if (!code_map->get(kNextMapIndex)->IsUndefined()) {
 | 
| +    CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
 | 
| +    flusher->EvictOptimizedCodeMap(this);
 | 
|    }
 | 
| +
 | 
| +  ASSERT(code_map->get(kNextMapIndex)->IsUndefined());
 | 
| +  set_optimized_code_map(Smi::FromInt(0));
 | 
|  }
 | 
|  
 | 
|  
 | 
| @@ -9050,11 +9054,11 @@ void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code,
 | 
|    int i;
 | 
|    bool removed_entry = false;
 | 
|    FixedArray* code_map = FixedArray::cast(optimized_code_map());
 | 
| -  for (i = 0; i < code_map->length(); i += kEntryLength) {
 | 
| +  for (i = kEntriesStart; i < code_map->length(); i += kEntryLength) {
 | 
|      ASSERT(code_map->get(i)->IsNativeContext());
 | 
|      if (Code::cast(code_map->get(i + 1)) == optimized_code) {
 | 
|        if (FLAG_trace_opt) {
 | 
| -        PrintF("[clearing optimizing code map (%s) for ", reason);
 | 
| +        PrintF("[evicting entry from optimizing code map (%s) for ", reason);
 | 
|          ShortPrint();
 | 
|          PrintF("]\n");
 | 
|        }
 | 
| @@ -9069,15 +9073,35 @@ void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code,
 | 
|      i += kEntryLength;
 | 
|    }
 | 
|    if (removed_entry) {
 | 
| -    if (code_map->length() > kEntryLength) {
 | 
| -      RightTrimFixedArray<FROM_MUTATOR>(GetHeap(), code_map, kEntryLength);
 | 
| -    } else {
 | 
| -      ClearOptimizedCodeMap(reason);
 | 
| +    // Always trim even when array is cleared because of heap verifier.
 | 
| +    RightTrimFixedArray<FROM_MUTATOR>(GetHeap(), code_map, kEntryLength);
 | 
| +    if (code_map->length() == kEntriesStart) {
 | 
| +      ClearOptimizedCodeMap();
 | 
|      }
 | 
|    }
 | 
|  }
 | 
|  
 | 
|  
 | 
| +void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) {
 | 
| +  FixedArray* code_map = FixedArray::cast(optimized_code_map());
 | 
| +  ASSERT(shrink_by % kEntryLength == 0);
 | 
| +  ASSERT(shrink_by <= code_map->length() - kEntriesStart);
 | 
| +  // Always trim even when array is cleared because of heap verifier.
 | 
| +  RightTrimFixedArray<FROM_GC>(GetHeap(), code_map, shrink_by);
 | 
| +  if (code_map->length() == kEntriesStart) {
 | 
| +    ClearOptimizedCodeMap();
 | 
| +  }
 | 
| +}
 | 
| +
 | 
| +
 | 
| +void SharedFunctionInfo::ZapOptimizedCodeMap() {
 | 
| +  FixedArray* code_map = FixedArray::cast(optimized_code_map());
 | 
| +  MemsetPointer(code_map->data_start(),
 | 
| +                GetHeap()->the_hole_value(),
 | 
| +                code_map->length());
 | 
| +}
 | 
| +
 | 
| +
 | 
|  bool JSFunction::CompileLazy(Handle<JSFunction> function,
 | 
|                               ClearExceptionFlag flag) {
 | 
|    bool result = true;
 | 
| @@ -9717,7 +9741,7 @@ int SharedFunctionInfo::SearchOptimizedCodeMap(Context* native_context) {
 | 
|    if (!value->IsSmi()) {
 | 
|      FixedArray* optimized_code_map = FixedArray::cast(value);
 | 
|      int length = optimized_code_map->length();
 | 
| -    for (int i = 0; i < length; i += 3) {
 | 
| +    for (int i = kEntriesStart; i < length; i += kEntryLength) {
 | 
|        if (optimized_code_map->get(i) == native_context) {
 | 
|          return i + 1;
 | 
|        }
 | 
| 
 |