Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(64)

Side by Side Diff: src/heap-inl.h

Issue 6685088: Merge isolates to bleeding_edge. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 9 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.cc ('k') | src/heap-profiler.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2006-2010 the V8 project authors. All rights reserved. 1 // Copyright 2006-2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 12 matching lines...) Expand all
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28 #ifndef V8_HEAP_INL_H_ 28 #ifndef V8_HEAP_INL_H_
29 #define V8_HEAP_INL_H_ 29 #define V8_HEAP_INL_H_
30 30
31 #include "heap.h" 31 #include "heap.h"
32 #include "objects.h" 32 #include "objects.h"
33 #include "isolate.h"
33 #include "v8-counters.h" 34 #include "v8-counters.h"
34 35
35 namespace v8 { 36 namespace v8 {
36 namespace internal { 37 namespace internal {
37 38
39 void PromotionQueue::insert(HeapObject* target, int size) {
40 *(--rear_) = reinterpret_cast<intptr_t>(target);
41 *(--rear_) = size;
42 // Assert no overflow into live objects.
43 ASSERT(reinterpret_cast<Address>(rear_) >= HEAP->new_space()->top());
44 }
45
46
38 int Heap::MaxObjectSizeInPagedSpace() { 47 int Heap::MaxObjectSizeInPagedSpace() {
39 return Page::kMaxHeapObjectSize; 48 return Page::kMaxHeapObjectSize;
40 } 49 }
41 50
42 51
43 MaybeObject* Heap::AllocateStringFromUtf8(Vector<const char> str, 52 MaybeObject* Heap::AllocateStringFromUtf8(Vector<const char> str,
44 PretenureFlag pretenure) { 53 PretenureFlag pretenure) {
45 // Check for ASCII first since this is the common case. 54 // Check for ASCII first since this is the common case.
46 if (String::IsAscii(str.start(), str.length())) { 55 if (String::IsAscii(str.start(), str.length())) {
47 // If the string is ASCII, we do not need to convert the characters 56 // If the string is ASCII, we do not need to convert the characters
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
139 ASSERT(space != NEW_SPACE || 148 ASSERT(space != NEW_SPACE ||
140 retry_space == OLD_POINTER_SPACE || 149 retry_space == OLD_POINTER_SPACE ||
141 retry_space == OLD_DATA_SPACE || 150 retry_space == OLD_DATA_SPACE ||
142 retry_space == LO_SPACE); 151 retry_space == LO_SPACE);
143 #ifdef DEBUG 152 #ifdef DEBUG
144 if (FLAG_gc_interval >= 0 && 153 if (FLAG_gc_interval >= 0 &&
145 !disallow_allocation_failure_ && 154 !disallow_allocation_failure_ &&
146 Heap::allocation_timeout_-- <= 0) { 155 Heap::allocation_timeout_-- <= 0) {
147 return Failure::RetryAfterGC(space); 156 return Failure::RetryAfterGC(space);
148 } 157 }
149 Counters::objs_since_last_full.Increment(); 158 isolate_->counters()->objs_since_last_full()->Increment();
150 Counters::objs_since_last_young.Increment(); 159 isolate_->counters()->objs_since_last_young()->Increment();
151 #endif 160 #endif
152 MaybeObject* result; 161 MaybeObject* result;
153 if (NEW_SPACE == space) { 162 if (NEW_SPACE == space) {
154 result = new_space_.AllocateRaw(size_in_bytes); 163 result = new_space_.AllocateRaw(size_in_bytes);
155 if (always_allocate() && result->IsFailure()) { 164 if (always_allocate() && result->IsFailure()) {
156 space = retry_space; 165 space = retry_space;
157 } else { 166 } else {
158 return result; 167 return result;
159 } 168 }
160 } 169 }
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
207 (*resource_addr)->Dispose(); 216 (*resource_addr)->Dispose();
208 } 217 }
209 218
210 // Clear the resource pointer in the string. 219 // Clear the resource pointer in the string.
211 *resource_addr = NULL; 220 *resource_addr = NULL;
212 } 221 }
213 222
214 223
215 MaybeObject* Heap::AllocateRawMap() { 224 MaybeObject* Heap::AllocateRawMap() {
216 #ifdef DEBUG 225 #ifdef DEBUG
217 Counters::objs_since_last_full.Increment(); 226 isolate_->counters()->objs_since_last_full()->Increment();
218 Counters::objs_since_last_young.Increment(); 227 isolate_->counters()->objs_since_last_young()->Increment();
219 #endif 228 #endif
220 MaybeObject* result = map_space_->AllocateRaw(Map::kSize); 229 MaybeObject* result = map_space_->AllocateRaw(Map::kSize);
221 if (result->IsFailure()) old_gen_exhausted_ = true; 230 if (result->IsFailure()) old_gen_exhausted_ = true;
222 #ifdef DEBUG 231 #ifdef DEBUG
223 if (!result->IsFailure()) { 232 if (!result->IsFailure()) {
224 // Maps have their own alignment. 233 // Maps have their own alignment.
225 CHECK((reinterpret_cast<intptr_t>(result) & kMapAlignmentMask) == 234 CHECK((reinterpret_cast<intptr_t>(result) & kMapAlignmentMask) ==
226 static_cast<intptr_t>(kHeapObjectTag)); 235 static_cast<intptr_t>(kHeapObjectTag));
227 } 236 }
228 #endif 237 #endif
229 return result; 238 return result;
230 } 239 }
231 240
232 241
233 MaybeObject* Heap::AllocateRawCell() { 242 MaybeObject* Heap::AllocateRawCell() {
234 #ifdef DEBUG 243 #ifdef DEBUG
235 Counters::objs_since_last_full.Increment(); 244 isolate_->counters()->objs_since_last_full()->Increment();
236 Counters::objs_since_last_young.Increment(); 245 isolate_->counters()->objs_since_last_young()->Increment();
237 #endif 246 #endif
238 MaybeObject* result = cell_space_->AllocateRaw(JSGlobalPropertyCell::kSize); 247 MaybeObject* result = cell_space_->AllocateRaw(JSGlobalPropertyCell::kSize);
239 if (result->IsFailure()) old_gen_exhausted_ = true; 248 if (result->IsFailure()) old_gen_exhausted_ = true;
240 return result; 249 return result;
241 } 250 }
242 251
243 252
244 bool Heap::InNewSpace(Object* object) { 253 bool Heap::InNewSpace(Object* object) {
245 bool result = new_space_.Contains(object); 254 bool result = new_space_.Contains(object);
246 ASSERT(!result || // Either not in new space 255 ASSERT(!result || // Either not in new space
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
334 ASSERT(IsAligned(byte_size, kPointerSize)); 343 ASSERT(IsAligned(byte_size, kPointerSize));
335 344
336 Page* page = Page::FromAddress(dst); 345 Page* page = Page::FromAddress(dst);
337 uint32_t marks = page->GetRegionMarks(); 346 uint32_t marks = page->GetRegionMarks();
338 347
339 for (int remaining = byte_size / kPointerSize; 348 for (int remaining = byte_size / kPointerSize;
340 remaining > 0; 349 remaining > 0;
341 remaining--) { 350 remaining--) {
342 Memory::Object_at(dst) = Memory::Object_at(src); 351 Memory::Object_at(dst) = Memory::Object_at(src);
343 352
344 if (Heap::InNewSpace(Memory::Object_at(dst))) { 353 if (InNewSpace(Memory::Object_at(dst))) {
345 marks |= page->GetRegionMaskForAddress(dst); 354 marks |= page->GetRegionMaskForAddress(dst);
346 } 355 }
347 356
348 dst += kPointerSize; 357 dst += kPointerSize;
349 src += kPointerSize; 358 src += kPointerSize;
350 } 359 }
351 360
352 page->SetRegionMarks(marks); 361 page->SetRegionMarks(marks);
353 } 362 }
354 363
(...skipping 25 matching lines...) Expand all
380 Address src, 389 Address src,
381 int byte_size) { 390 int byte_size) {
382 ASSERT(IsAligned(byte_size, kPointerSize)); 391 ASSERT(IsAligned(byte_size, kPointerSize));
383 ASSERT((dst >= (src + byte_size)) || 392 ASSERT((dst >= (src + byte_size)) ||
384 ((OffsetFrom(src) - OffsetFrom(dst)) >= kPointerSize)); 393 ((OffsetFrom(src) - OffsetFrom(dst)) >= kPointerSize));
385 394
386 CopyBlockToOldSpaceAndUpdateRegionMarks(dst, src, byte_size); 395 CopyBlockToOldSpaceAndUpdateRegionMarks(dst, src, byte_size);
387 } 396 }
388 397
389 398
399 void Heap::ScavengePointer(HeapObject** p) {
400 ScavengeObject(p, *p);
401 }
402
403
390 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) { 404 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
391 ASSERT(InFromSpace(object)); 405 ASSERT(HEAP->InFromSpace(object));
392 406
393 // We use the first word (where the map pointer usually is) of a heap 407 // We use the first word (where the map pointer usually is) of a heap
394 // object to record the forwarding pointer. A forwarding pointer can 408 // object to record the forwarding pointer. A forwarding pointer can
395 // point to an old space, the code space, or the to space of the new 409 // point to an old space, the code space, or the to space of the new
396 // generation. 410 // generation.
397 MapWord first_word = object->map_word(); 411 MapWord first_word = object->map_word();
398 412
399 // If the first word is a forwarding address, the object has already been 413 // If the first word is a forwarding address, the object has already been
400 // copied. 414 // copied.
401 if (first_word.IsForwardingAddress()) { 415 if (first_word.IsForwardingAddress()) {
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
454 } 468 }
455 ASSERT(amount_of_external_allocated_memory_ >= 0); 469 ASSERT(amount_of_external_allocated_memory_ >= 0);
456 return amount_of_external_allocated_memory_; 470 return amount_of_external_allocated_memory_;
457 } 471 }
458 472
459 473
460 void Heap::SetLastScriptId(Object* last_script_id) { 474 void Heap::SetLastScriptId(Object* last_script_id) {
461 roots_[kLastScriptIdRootIndex] = last_script_id; 475 roots_[kLastScriptIdRootIndex] = last_script_id;
462 } 476 }
463 477
478 Isolate* Heap::isolate() {
479 return reinterpret_cast<Isolate*>(reinterpret_cast<intptr_t>(this) -
480 reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4);
481 }
482
464 483
465 #ifdef DEBUG 484 #ifdef DEBUG
466 #define GC_GREEDY_CHECK() \ 485 #define GC_GREEDY_CHECK() \
467 if (FLAG_gc_greedy) v8::internal::Heap::GarbageCollectionGreedyCheck() 486 if (FLAG_gc_greedy) HEAP->GarbageCollectionGreedyCheck()
468 #else 487 #else
469 #define GC_GREEDY_CHECK() { } 488 #define GC_GREEDY_CHECK() { }
470 #endif 489 #endif
471 490
472 491
473 // Calls the FUNCTION_CALL function and retries it up to three times 492 // Calls the FUNCTION_CALL function and retries it up to three times
474 // to guarantee that any allocations performed during the call will 493 // to guarantee that any allocations performed during the call will
475 // succeed if there's enough memory. 494 // succeed if there's enough memory.
476 495
477 // Warning: Do not use the identifiers __object__, __maybe_object__ or 496 // Warning: Do not use the identifiers __object__, __maybe_object__ or
478 // __scope__ in a call to this macro. 497 // __scope__ in a call to this macro.
479 498
480 #define CALL_AND_RETRY(FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \ 499 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)\
481 do { \ 500 do { \
482 GC_GREEDY_CHECK(); \ 501 GC_GREEDY_CHECK(); \
483 MaybeObject* __maybe_object__ = FUNCTION_CALL; \ 502 MaybeObject* __maybe_object__ = FUNCTION_CALL; \
484 Object* __object__ = NULL; \ 503 Object* __object__ = NULL; \
485 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \ 504 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
486 if (__maybe_object__->IsOutOfMemory()) { \ 505 if (__maybe_object__->IsOutOfMemory()) { \
487 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_0", true);\ 506 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_0", true);\
488 } \ 507 } \
489 if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \ 508 if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \
490 Heap::CollectGarbage( \ 509 ISOLATE->heap()->CollectGarbage(Failure::cast(__maybe_object__)-> \
491 Failure::cast(__maybe_object__)->allocation_space()); \ 510 allocation_space()); \
492 __maybe_object__ = FUNCTION_CALL; \ 511 __maybe_object__ = FUNCTION_CALL; \
493 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \ 512 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
494 if (__maybe_object__->IsOutOfMemory()) { \ 513 if (__maybe_object__->IsOutOfMemory()) { \
495 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_1", true);\ 514 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_1", true);\
496 } \ 515 } \
497 if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \ 516 if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \
498 Counters::gc_last_resort_from_handles.Increment(); \ 517 ISOLATE->counters()->gc_last_resort_from_handles()->Increment(); \
499 Heap::CollectAllAvailableGarbage(); \ 518 ISOLATE->heap()->CollectAllAvailableGarbage(); \
500 { \ 519 { \
501 AlwaysAllocateScope __scope__; \ 520 AlwaysAllocateScope __scope__; \
502 __maybe_object__ = FUNCTION_CALL; \ 521 __maybe_object__ = FUNCTION_CALL; \
503 } \ 522 } \
504 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \ 523 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
505 if (__maybe_object__->IsOutOfMemory() || \ 524 if (__maybe_object__->IsOutOfMemory() || \
506 __maybe_object__->IsRetryAfterGC()) { \ 525 __maybe_object__->IsRetryAfterGC()) { \
507 /* TODO(1181417): Fix this. */ \ 526 /* TODO(1181417): Fix this. */ \
508 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_2", true);\ 527 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_2", true);\
509 } \ 528 } \
510 RETURN_EMPTY; \ 529 RETURN_EMPTY; \
511 } while (false) 530 } while (false)
512 531
513 532
514 #define CALL_HEAP_FUNCTION(FUNCTION_CALL, TYPE) \ 533 // TODO(isolates): cache isolate: either accept as a parameter or
515 CALL_AND_RETRY(FUNCTION_CALL, \ 534 // set to some known symbol (__CUR_ISOLATE__?)
516 return Handle<TYPE>(TYPE::cast(__object__)), \ 535 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE) \
536 CALL_AND_RETRY(ISOLATE, \
537 FUNCTION_CALL, \
538 return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \
517 return Handle<TYPE>()) 539 return Handle<TYPE>())
518 540
519 541
520 #define CALL_HEAP_FUNCTION_VOID(FUNCTION_CALL) \ 542 #define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \
521 CALL_AND_RETRY(FUNCTION_CALL, return, return) 543 CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, return, return)
522 544
523 545
524 #ifdef DEBUG 546 #ifdef DEBUG
525 547
526 inline bool Heap::allow_allocation(bool new_state) { 548 inline bool Heap::allow_allocation(bool new_state) {
527 bool old = allocation_allowed_; 549 bool old = allocation_allowed_;
528 allocation_allowed_ = new_state; 550 allocation_allowed_ = new_state;
529 return old; 551 return old;
530 } 552 }
531 553
532 #endif 554 #endif
533 555
534 556
535 void ExternalStringTable::AddString(String* string) { 557 void ExternalStringTable::AddString(String* string) {
536 ASSERT(string->IsExternalString()); 558 ASSERT(string->IsExternalString());
537 if (Heap::InNewSpace(string)) { 559 if (heap_->InNewSpace(string)) {
538 new_space_strings_.Add(string); 560 new_space_strings_.Add(string);
539 } else { 561 } else {
540 old_space_strings_.Add(string); 562 old_space_strings_.Add(string);
541 } 563 }
542 } 564 }
543 565
544 566
545 void ExternalStringTable::Iterate(ObjectVisitor* v) { 567 void ExternalStringTable::Iterate(ObjectVisitor* v) {
546 if (!new_space_strings_.is_empty()) { 568 if (!new_space_strings_.is_empty()) {
547 Object** start = &new_space_strings_[0]; 569 Object** start = &new_space_strings_[0];
548 v->VisitPointers(start, start + new_space_strings_.length()); 570 v->VisitPointers(start, start + new_space_strings_.length());
549 } 571 }
550 if (!old_space_strings_.is_empty()) { 572 if (!old_space_strings_.is_empty()) {
551 Object** start = &old_space_strings_[0]; 573 Object** start = &old_space_strings_[0];
552 v->VisitPointers(start, start + old_space_strings_.length()); 574 v->VisitPointers(start, start + old_space_strings_.length());
553 } 575 }
554 } 576 }
555 577
556 578
557 // Verify() is inline to avoid ifdef-s around its calls in release 579 // Verify() is inline to avoid ifdef-s around its calls in release
558 // mode. 580 // mode.
559 void ExternalStringTable::Verify() { 581 void ExternalStringTable::Verify() {
560 #ifdef DEBUG 582 #ifdef DEBUG
561 for (int i = 0; i < new_space_strings_.length(); ++i) { 583 for (int i = 0; i < new_space_strings_.length(); ++i) {
562 ASSERT(Heap::InNewSpace(new_space_strings_[i])); 584 ASSERT(heap_->InNewSpace(new_space_strings_[i]));
563 ASSERT(new_space_strings_[i] != Heap::raw_unchecked_null_value()); 585 ASSERT(new_space_strings_[i] != HEAP->raw_unchecked_null_value());
564 } 586 }
565 for (int i = 0; i < old_space_strings_.length(); ++i) { 587 for (int i = 0; i < old_space_strings_.length(); ++i) {
566 ASSERT(!Heap::InNewSpace(old_space_strings_[i])); 588 ASSERT(!heap_->InNewSpace(old_space_strings_[i]));
567 ASSERT(old_space_strings_[i] != Heap::raw_unchecked_null_value()); 589 ASSERT(old_space_strings_[i] != HEAP->raw_unchecked_null_value());
568 } 590 }
569 #endif 591 #endif
570 } 592 }
571 593
572 594
573 void ExternalStringTable::AddOldString(String* string) { 595 void ExternalStringTable::AddOldString(String* string) {
574 ASSERT(string->IsExternalString()); 596 ASSERT(string->IsExternalString());
575 ASSERT(!Heap::InNewSpace(string)); 597 ASSERT(!heap_->InNewSpace(string));
576 old_space_strings_.Add(string); 598 old_space_strings_.Add(string);
577 } 599 }
578 600
579 601
580 void ExternalStringTable::ShrinkNewStrings(int position) { 602 void ExternalStringTable::ShrinkNewStrings(int position) {
581 new_space_strings_.Rewind(position); 603 new_space_strings_.Rewind(position);
582 Verify(); 604 Verify();
583 } 605 }
584 606
607
608 void Heap::ClearInstanceofCache() {
609 set_instanceof_cache_function(the_hole_value());
610 }
611
612
613 Object* Heap::ToBoolean(bool condition) {
614 return condition ? true_value() : false_value();
615 }
616
617
618 void Heap::CompletelyClearInstanceofCache() {
619 set_instanceof_cache_map(the_hole_value());
620 set_instanceof_cache_function(the_hole_value());
621 }
622
623
624 MaybeObject* TranscendentalCache::Get(Type type, double input) {
625 SubCache* cache = caches_[type];
626 if (cache == NULL) {
627 caches_[type] = cache = new SubCache(type);
628 }
629 return cache->Get(input);
630 }
631
632
633 Address TranscendentalCache::cache_array_address() {
634 return reinterpret_cast<Address>(caches_);
635 }
636
637
638 double TranscendentalCache::SubCache::Calculate(double input) {
639 switch (type_) {
640 case ACOS:
641 return acos(input);
642 case ASIN:
643 return asin(input);
644 case ATAN:
645 return atan(input);
646 case COS:
647 return cos(input);
648 case EXP:
649 return exp(input);
650 case LOG:
651 return log(input);
652 case SIN:
653 return sin(input);
654 case TAN:
655 return tan(input);
656 default:
657 return 0.0; // Never happens.
658 }
659 }
660
661
662 MaybeObject* TranscendentalCache::SubCache::Get(double input) {
663 Converter c;
664 c.dbl = input;
665 int hash = Hash(c);
666 Element e = elements_[hash];
667 if (e.in[0] == c.integers[0] &&
668 e.in[1] == c.integers[1]) {
669 ASSERT(e.output != NULL);
670 isolate_->counters()->transcendental_cache_hit()->Increment();
671 return e.output;
672 }
673 double answer = Calculate(input);
674 isolate_->counters()->transcendental_cache_miss()->Increment();
675 Object* heap_number;
676 { MaybeObject* maybe_heap_number =
677 isolate_->heap()->AllocateHeapNumber(answer);
678 if (!maybe_heap_number->ToObject(&heap_number)) return maybe_heap_number;
679 }
680 elements_[hash].in[0] = c.integers[0];
681 elements_[hash].in[1] = c.integers[1];
682 elements_[hash].output = heap_number;
683 return heap_number;
684 }
685
686
687 Heap* _inline_get_heap_() {
688 return HEAP;
689 }
690
691
692 void MarkCompactCollector::SetMark(HeapObject* obj) {
693 tracer_->increment_marked_count();
694 #ifdef DEBUG
695 UpdateLiveObjectCount(obj);
696 #endif
697 obj->SetMark();
698 }
699
700
585 } } // namespace v8::internal 701 } } // namespace v8::internal
586 702
587 #endif // V8_HEAP_INL_H_ 703 #endif // V8_HEAP_INL_H_
OLDNEW
« no previous file with comments | « src/heap.cc ('k') | src/heap-profiler.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698