| OLD | NEW |
| 1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 271 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 282 // Check that the key (index) is within bounds. | 282 // Check that the key (index) is within bounds. |
| 283 __ cmpl(rax, FieldOperand(rcx, FixedArray::kLengthOffset)); | 283 __ cmpl(rax, FieldOperand(rcx, FixedArray::kLengthOffset)); |
| 284 __ j(above_equal, &slow); // Unsigned comparison rejects negative indices. | 284 __ j(above_equal, &slow); // Unsigned comparison rejects negative indices. |
| 285 // Fast case: Do the load. | 285 // Fast case: Do the load. |
| 286 __ movq(rax, Operand(rcx, rax, times_pointer_size, | 286 __ movq(rax, Operand(rcx, rax, times_pointer_size, |
| 287 FixedArray::kHeaderSize - kHeapObjectTag)); | 287 FixedArray::kHeaderSize - kHeapObjectTag)); |
| 288 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); | 288 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); |
| 289 // In case the loaded value is the_hole we have to consult GetProperty | 289 // In case the loaded value is the_hole we have to consult GetProperty |
| 290 // to ensure the prototype chain is searched. | 290 // to ensure the prototype chain is searched. |
| 291 __ j(equal, &slow); | 291 __ j(equal, &slow); |
| 292 __ IncrementCounter(&Counters::keyed_load_generic_smi, 1); | 292 __ IncrementCounter(&COUNTER(keyed_load_generic_smi), 1); |
| 293 __ ret(0); | 293 __ ret(0); |
| 294 | 294 |
| 295 // Check whether the elements is a pixel array. | 295 // Check whether the elements is a pixel array. |
| 296 // rax: untagged index | 296 // rax: untagged index |
| 297 // rcx: elements array | 297 // rcx: elements array |
| 298 __ bind(&check_pixel_array); | 298 __ bind(&check_pixel_array); |
| 299 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), | 299 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), |
| 300 Heap::kPixelArrayMapRootIndex); | 300 Heap::kPixelArrayMapRootIndex); |
| 301 __ j(not_equal, &slow); | 301 __ j(not_equal, &slow); |
| 302 __ cmpl(rax, FieldOperand(rcx, PixelArray::kLengthOffset)); | 302 __ cmpl(rax, FieldOperand(rcx, PixelArray::kLengthOffset)); |
| 303 __ j(above_equal, &slow); | 303 __ j(above_equal, &slow); |
| 304 __ movq(rcx, FieldOperand(rcx, PixelArray::kExternalPointerOffset)); | 304 __ movq(rcx, FieldOperand(rcx, PixelArray::kExternalPointerOffset)); |
| 305 __ movzxbq(rax, Operand(rcx, rax, times_1, 0)); | 305 __ movzxbq(rax, Operand(rcx, rax, times_1, 0)); |
| 306 __ Integer32ToSmi(rax, rax); | 306 __ Integer32ToSmi(rax, rax); |
| 307 __ ret(0); | 307 __ ret(0); |
| 308 | 308 |
| 309 // Slow case: Load name and receiver from stack and jump to runtime. | 309 // Slow case: Load name and receiver from stack and jump to runtime. |
| 310 __ bind(&slow); | 310 __ bind(&slow); |
| 311 __ IncrementCounter(&Counters::keyed_load_generic_slow, 1); | 311 __ IncrementCounter(&COUNTER(keyed_load_generic_slow), 1); |
| 312 Generate(masm, ExternalReference(Runtime::kKeyedGetProperty)); | 312 Generate(masm, ExternalReference(Runtime::kKeyedGetProperty)); |
| 313 __ bind(&check_string); | 313 __ bind(&check_string); |
| 314 // The key is not a smi. | 314 // The key is not a smi. |
| 315 // Is it a string? | 315 // Is it a string? |
| 316 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx); | 316 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx); |
| 317 __ j(above_equal, &slow); | 317 __ j(above_equal, &slow); |
| 318 // Is the string an array index, with cached numeric value? | 318 // Is the string an array index, with cached numeric value? |
| 319 __ movl(rbx, FieldOperand(rax, String::kHashFieldOffset)); | 319 __ movl(rbx, FieldOperand(rax, String::kHashFieldOffset)); |
| 320 __ testl(rbx, Immediate(String::kIsArrayIndexMask)); | 320 __ testl(rbx, Immediate(String::kIsArrayIndexMask)); |
| 321 | 321 |
| 322 // If the string is a symbol, do a quick inline probe of the receiver's | 322 // If the string is a symbol, do a quick inline probe of the receiver's |
| 323 // dictionary, if it exists. | 323 // dictionary, if it exists. |
| 324 __ j(not_zero, &index_string); // The value in rbx is used at jump target. | 324 __ j(not_zero, &index_string); // The value in rbx is used at jump target. |
| 325 __ testb(FieldOperand(rdx, Map::kInstanceTypeOffset), | 325 __ testb(FieldOperand(rdx, Map::kInstanceTypeOffset), |
| 326 Immediate(kIsSymbolMask)); | 326 Immediate(kIsSymbolMask)); |
| 327 __ j(zero, &slow); | 327 __ j(zero, &slow); |
| 328 // Probe the dictionary leaving result in rcx. | 328 // Probe the dictionary leaving result in rcx. |
| 329 GenerateDictionaryLoad(masm, &slow, rbx, rcx, rdx, rax); | 329 GenerateDictionaryLoad(masm, &slow, rbx, rcx, rdx, rax); |
| 330 GenerateCheckNonObjectOrLoaded(masm, &slow, rcx); | 330 GenerateCheckNonObjectOrLoaded(masm, &slow, rcx); |
| 331 __ movq(rax, rcx); | 331 __ movq(rax, rcx); |
| 332 __ IncrementCounter(&Counters::keyed_load_generic_symbol, 1); | 332 __ IncrementCounter(&COUNTER(keyed_load_generic_symbol), 1); |
| 333 __ ret(0); | 333 __ ret(0); |
| 334 // If the hash field contains an array index pick it out. The assert checks | 334 // If the hash field contains an array index pick it out. The assert checks |
| 335 // that the constants for the maximum number of digits for an array index | 335 // that the constants for the maximum number of digits for an array index |
| 336 // cached in the hash field and the number of bits reserved for it does not | 336 // cached in the hash field and the number of bits reserved for it does not |
| 337 // conflict. | 337 // conflict. |
| 338 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) < | 338 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) < |
| 339 (1 << String::kArrayIndexValueBits)); | 339 (1 << String::kArrayIndexValueBits)); |
| 340 __ bind(&index_string); | 340 __ bind(&index_string); |
| 341 __ movl(rax, rbx); | 341 __ movl(rax, rbx); |
| 342 __ and_(rax, Immediate(String::kArrayIndexHashMask)); | 342 __ and_(rax, Immediate(String::kArrayIndexHashMask)); |
| (...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 473 | 473 |
| 474 // If we fail allocation of the HeapNumber, we still have a value on | 474 // If we fail allocation of the HeapNumber, we still have a value on |
| 475 // top of the FPU stack. Remove it. | 475 // top of the FPU stack. Remove it. |
| 476 __ bind(&failed_allocation); | 476 __ bind(&failed_allocation); |
| 477 __ ffree(); | 477 __ ffree(); |
| 478 __ fincstp(); | 478 __ fincstp(); |
| 479 // Fall through to slow case. | 479 // Fall through to slow case. |
| 480 | 480 |
| 481 // Slow case: Load name and receiver from stack and jump to runtime. | 481 // Slow case: Load name and receiver from stack and jump to runtime. |
| 482 __ bind(&slow); | 482 __ bind(&slow); |
| 483 __ IncrementCounter(&Counters::keyed_load_external_array_slow, 1); | 483 __ IncrementCounter(&COUNTER(keyed_load_external_array_slow), 1); |
| 484 Generate(masm, ExternalReference(Runtime::kKeyedGetProperty)); | 484 Generate(masm, ExternalReference(Runtime::kKeyedGetProperty)); |
| 485 } | 485 } |
| 486 | 486 |
| 487 | 487 |
| 488 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { | 488 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { |
| 489 // ----------- S t a t e ------------- | 489 // ----------- S t a t e ------------- |
| 490 // -- rsp[0] : return address | 490 // -- rsp[0] : return address |
| 491 // -- rsp[8] : name | 491 // -- rsp[8] : name |
| 492 // -- rsp[16] : receiver | 492 // -- rsp[16] : receiver |
| 493 // ----------------------------------- | 493 // ----------------------------------- |
| (...skipping 818 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1312 | 1312 |
| 1313 // Cache miss: Jump to runtime. | 1313 // Cache miss: Jump to runtime. |
| 1314 Generate(masm, ExternalReference(IC_Utility(kStoreIC_Miss))); | 1314 Generate(masm, ExternalReference(IC_Utility(kStoreIC_Miss))); |
| 1315 } | 1315 } |
| 1316 | 1316 |
| 1317 | 1317 |
| 1318 #undef __ | 1318 #undef __ |
| 1319 | 1319 |
| 1320 | 1320 |
| 1321 } } // namespace v8::internal | 1321 } } // namespace v8::internal |
| OLD | NEW |