OLD | NEW |
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 4403 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4414 | 4414 |
4415 __ bind(&done); | 4415 __ bind(&done); |
4416 } | 4416 } |
4417 frame_->Forget(3); | 4417 frame_->Forget(3); |
4418 frame_->Push(rax); | 4418 frame_->Push(rax); |
4419 } | 4419 } |
4420 | 4420 |
4421 | 4421 |
4422 class DeferredSearchCache: public DeferredCode { | 4422 class DeferredSearchCache: public DeferredCode { |
4423 public: | 4423 public: |
4424 DeferredSearchCache(Register dst, Register cache, Register key) | 4424 DeferredSearchCache(Register dst, |
4425 : dst_(dst), cache_(cache), key_(key) { | 4425 Register cache, |
| 4426 Register key, |
| 4427 Register scratch) |
| 4428 : dst_(dst), cache_(cache), key_(key), scratch_(scratch) { |
4426 set_comment("[ DeferredSearchCache"); | 4429 set_comment("[ DeferredSearchCache"); |
4427 } | 4430 } |
4428 | 4431 |
4429 virtual void Generate(); | 4432 virtual void Generate(); |
4430 | 4433 |
4431 private: | 4434 private: |
4432 Register dst_, cache_, key_; | 4435 Register dst_; // on invocation index of finger (as Smi), on exit |
| 4436 // holds value being looked up. |
| 4437 Register cache_; // instance of JSFunctionResultCache. |
| 4438 Register key_; // key being looked up. |
| 4439 Register scratch_; |
4433 }; | 4440 }; |
4434 | 4441 |
4435 | 4442 |
| 4443 // Return a position of the element at |index| + |additional_offset| |
| 4444 // in FixedArray pointer to which is held in |array|. |index| is int32. |
| 4445 static Operand ArrayElement(Register array, |
| 4446 Register index, |
| 4447 int additional_offset = 0) { |
| 4448 int offset = FixedArray::kHeaderSize + additional_offset * kPointerSize; |
| 4449 return FieldOperand(array, index, times_pointer_size, offset); |
| 4450 } |
| 4451 |
| 4452 |
4436 void DeferredSearchCache::Generate() { | 4453 void DeferredSearchCache::Generate() { |
4437 __ push(cache_); | 4454 Label first_loop, search_further, second_loop, cache_miss; |
| 4455 |
| 4456 Immediate kEntriesIndexImm = Immediate(JSFunctionResultCache::kEntriesIndex); |
| 4457 Immediate kEntrySizeImm = Immediate(JSFunctionResultCache::kEntrySize); |
| 4458 |
| 4459 __ SmiToInteger32(dst_, dst_); |
| 4460 // Check the cache from finger to start of the cache. |
| 4461 __ bind(&first_loop); |
| 4462 __ subq(dst_, kEntrySizeImm); |
| 4463 __ cmpq(dst_, kEntriesIndexImm); |
| 4464 __ j(less, &search_further); |
| 4465 |
| 4466 __ cmpq(ArrayElement(cache_, dst_), key_); |
| 4467 __ j(not_equal, &first_loop); |
| 4468 |
| 4469 __ Integer32ToSmi(scratch_, dst_); |
| 4470 __ movq(FieldOperand(cache_, JSFunctionResultCache::kFingerOffset), scratch_); |
| 4471 __ movq(dst_, ArrayElement(cache_, dst_, 1)); |
| 4472 __ jmp(exit_label()); |
| 4473 |
| 4474 __ bind(&search_further); |
| 4475 |
| 4476 // Check the cache from end of cache up to finger. |
| 4477 __ movq(dst_, FieldOperand(cache_, JSFunctionResultCache::kCacheSizeOffset)); |
| 4478 __ movq(scratch_, FieldOperand(cache_, JSFunctionResultCache::kFingerOffset)); |
| 4479 __ SmiToInteger32(dst_, dst_); |
| 4480 __ SmiToInteger32(scratch_, scratch_); |
| 4481 |
| 4482 __ bind(&second_loop); |
| 4483 __ subq(dst_, kEntrySizeImm); |
| 4484 __ cmpq(dst_, scratch_); |
| 4485 __ j(less_equal, &cache_miss); |
| 4486 |
| 4487 __ cmpq(ArrayElement(cache_, dst_), key_); |
| 4488 __ j(not_equal, &second_loop); |
| 4489 |
| 4490 __ Integer32ToSmi(scratch_, dst_); |
| 4491 __ movq(FieldOperand(cache_, JSFunctionResultCache::kFingerOffset), scratch_); |
| 4492 __ movq(dst_, ArrayElement(cache_, dst_, 1)); |
| 4493 __ jmp(exit_label()); |
| 4494 |
| 4495 __ bind(&cache_miss); |
| 4496 __ push(cache_); // store a reference to cache |
| 4497 __ push(key_); // store a key |
| 4498 Handle<Object> receiver(Top::global_context()->global()); |
| 4499 __ Push(receiver); |
4438 __ push(key_); | 4500 __ push(key_); |
4439 __ CallRuntime(Runtime::kGetFromCache, 2); | 4501 // On x64 function must be in rdi. |
| 4502 __ movq(rdi, FieldOperand(cache_, JSFunctionResultCache::kFactoryOffset)); |
| 4503 ParameterCount expected(1); |
| 4504 __ InvokeFunction(rdi, expected, CALL_FUNCTION); |
| 4505 |
| 4506 // Find a place to put new cached value into. |
| 4507 Label add_new_entry, update_cache; |
| 4508 __ movq(rcx, Operand(rsp, kPointerSize)); // restore the cache |
| 4509 // Possible optimization: cache size is constant for the given cache |
| 4510 // so technically we could use a constant here. However, if we have |
| 4511 // cache miss this optimization would hardly matter much. |
| 4512 |
| 4513 // Check if we could add new entry to cache. |
| 4514 __ movl(rbx, FieldOperand(rcx, FixedArray::kLengthOffset)); |
| 4515 __ movq(r9, FieldOperand(rcx, JSFunctionResultCache::kCacheSizeOffset)); |
| 4516 __ SmiToInteger32(r9, r9); |
| 4517 __ cmpq(rbx, r9); |
| 4518 __ j(greater, &add_new_entry); |
| 4519 |
| 4520 // Check if we could evict entry after finger. |
| 4521 __ movq(rdx, FieldOperand(rcx, JSFunctionResultCache::kFingerOffset)); |
| 4522 __ SmiToInteger32(rdx, rdx); |
| 4523 __ addq(rdx, kEntrySizeImm); |
| 4524 Label forward; |
| 4525 __ cmpq(rbx, rdx); |
| 4526 __ j(greater, &forward); |
| 4527 // Need to wrap over the cache. |
| 4528 __ movq(rdx, kEntriesIndexImm); |
| 4529 __ bind(&forward); |
| 4530 __ Integer32ToSmi(r9, rdx); |
| 4531 __ jmp(&update_cache); |
| 4532 |
| 4533 __ bind(&add_new_entry); |
| 4534 // r9 holds cache size as int. |
| 4535 __ movq(rdx, r9); |
| 4536 __ Integer32ToSmi(r9, r9); |
| 4537 __ SmiAddConstant(rbx, r9, Smi::FromInt(JSFunctionResultCache::kEntrySize)); |
| 4538 __ movq(FieldOperand(rcx, JSFunctionResultCache::kCacheSizeOffset), rbx); |
| 4539 |
| 4540 // Update the cache itself. |
| 4541 // rdx holds the index as int. |
| 4542 // r9 holds the index as smi. |
| 4543 __ bind(&update_cache); |
| 4544 __ pop(rbx); // restore the key |
| 4545 __ movq(FieldOperand(rcx, JSFunctionResultCache::kFingerOffset), r9); |
| 4546 // Store key. |
| 4547 __ movq(ArrayElement(rcx, rdx), rbx); |
| 4548 __ RecordWrite(rcx, 0, rbx, r9); |
| 4549 |
| 4550 // Store value. |
| 4551 __ pop(rcx); // restore the cache. |
| 4552 __ movq(rdx, FieldOperand(rcx, JSFunctionResultCache::kFingerOffset)); |
| 4553 __ SmiAddConstant(rdx, rdx, Smi::FromInt(1)); |
| 4554 __ movq(r9, rdx); |
| 4555 __ SmiToInteger32(rdx, rdx); |
| 4556 __ movq(rbx, rax); |
| 4557 __ movq(ArrayElement(rcx, rdx), rbx); |
| 4558 __ RecordWrite(rcx, 0, rbx, r9); |
| 4559 |
4440 if (!dst_.is(rax)) { | 4560 if (!dst_.is(rax)) { |
4441 __ movq(dst_, rax); | 4561 __ movq(dst_, rax); |
4442 } | 4562 } |
4443 } | 4563 } |
4444 | 4564 |
4445 | 4565 |
4446 void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) { | 4566 void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) { |
4447 ASSERT_EQ(2, args->length()); | 4567 ASSERT_EQ(2, args->length()); |
4448 | 4568 |
4449 ASSERT_NE(NULL, args->at(0)->AsLiteral()); | 4569 ASSERT_NE(NULL, args->at(0)->AsLiteral()); |
(...skipping 17 matching lines...) Expand all Loading... |
4467 __ movq(cache.reg(), | 4587 __ movq(cache.reg(), |
4468 FieldOperand(cache.reg(), GlobalObject::kGlobalContextOffset)); | 4588 FieldOperand(cache.reg(), GlobalObject::kGlobalContextOffset)); |
4469 __ movq(cache.reg(), | 4589 __ movq(cache.reg(), |
4470 ContextOperand(cache.reg(), Context::JSFUNCTION_RESULT_CACHES_INDEX)); | 4590 ContextOperand(cache.reg(), Context::JSFUNCTION_RESULT_CACHES_INDEX)); |
4471 __ movq(cache.reg(), | 4591 __ movq(cache.reg(), |
4472 FieldOperand(cache.reg(), FixedArray::OffsetOfElementAt(cache_id))); | 4592 FieldOperand(cache.reg(), FixedArray::OffsetOfElementAt(cache_id))); |
4473 | 4593 |
4474 Result tmp = allocator()->Allocate(); | 4594 Result tmp = allocator()->Allocate(); |
4475 ASSERT(tmp.is_valid()); | 4595 ASSERT(tmp.is_valid()); |
4476 | 4596 |
| 4597 Result scratch = allocator()->Allocate(); |
| 4598 ASSERT(scratch.is_valid()); |
| 4599 |
4477 DeferredSearchCache* deferred = new DeferredSearchCache(tmp.reg(), | 4600 DeferredSearchCache* deferred = new DeferredSearchCache(tmp.reg(), |
4478 cache.reg(), | 4601 cache.reg(), |
4479 key.reg()); | 4602 key.reg(), |
| 4603 scratch.reg()); |
4480 | 4604 |
4481 const int kFingerOffset = | 4605 const int kFingerOffset = |
4482 FixedArray::OffsetOfElementAt(JSFunctionResultCache::kFingerIndex); | 4606 FixedArray::OffsetOfElementAt(JSFunctionResultCache::kFingerIndex); |
4483 // tmp.reg() now holds finger offset as a smi. | 4607 // tmp.reg() now holds finger offset as a smi. |
4484 ASSERT(kSmiTag == 0 && kSmiTagSize == 1); | |
4485 __ movq(tmp.reg(), FieldOperand(cache.reg(), kFingerOffset)); | 4608 __ movq(tmp.reg(), FieldOperand(cache.reg(), kFingerOffset)); |
4486 SmiIndex index = | 4609 SmiIndex index = |
4487 masm()->SmiToIndex(kScratchRegister, tmp.reg(), kPointerSizeLog2); | 4610 masm()->SmiToIndex(kScratchRegister, tmp.reg(), kPointerSizeLog2); |
4488 __ cmpq(key.reg(), FieldOperand(cache.reg(), | 4611 __ cmpq(key.reg(), FieldOperand(cache.reg(), |
4489 index.reg, | 4612 index.reg, index.scale, |
4490 index.scale, | |
4491 FixedArray::kHeaderSize)); | 4613 FixedArray::kHeaderSize)); |
| 4614 // Do NOT alter index.reg or tmp.reg() before cmpq below. |
4492 deferred->Branch(not_equal); | 4615 deferred->Branch(not_equal); |
4493 | |
4494 __ movq(tmp.reg(), FieldOperand(cache.reg(), | 4616 __ movq(tmp.reg(), FieldOperand(cache.reg(), |
4495 index.reg, | 4617 index.reg, index.scale, |
4496 index.scale, | 4618 FixedArray::kHeaderSize + kPointerSize)); |
4497 kPointerSize + FixedArray::kHeaderSize)); | |
4498 | 4619 |
4499 deferred->BindExit(); | 4620 deferred->BindExit(); |
4500 frame_->Push(&tmp); | 4621 frame_->Push(&tmp); |
4501 } | 4622 } |
4502 | 4623 |
4503 | 4624 |
4504 void CodeGenerator::GenerateNumberToString(ZoneList<Expression*>* args) { | 4625 void CodeGenerator::GenerateNumberToString(ZoneList<Expression*>* args) { |
4505 ASSERT_EQ(args->length(), 1); | 4626 ASSERT_EQ(args->length(), 1); |
4506 | 4627 |
4507 // Load the argument on the stack and jump to the runtime. | 4628 // Load the argument on the stack and jump to the runtime. |
(...skipping 6949 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
11457 // Call the function from C++. | 11578 // Call the function from C++. |
11458 return FUNCTION_CAST<ModuloFunction>(buffer); | 11579 return FUNCTION_CAST<ModuloFunction>(buffer); |
11459 } | 11580 } |
11460 | 11581 |
11461 #endif | 11582 #endif |
11462 | 11583 |
11463 | 11584 |
11464 #undef __ | 11585 #undef __ |
11465 | 11586 |
11466 } } // namespace v8::internal | 11587 } } // namespace v8::internal |
OLD | NEW |