| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 158 Register key, | 158 Register key, |
| 159 Register r0, | 159 Register r0, |
| 160 Register r1, | 160 Register r1, |
| 161 Register r2) { | 161 Register r2) { |
| 162 // Register use: | 162 // Register use: |
| 163 // | 163 // |
| 164 // elements - holds the slow-case elements of the receiver and is unchanged. | 164 // elements - holds the slow-case elements of the receiver and is unchanged. |
| 165 // | 165 // |
| 166 // key - holds the smi key on entry and is unchanged if a branch is | 166 // key - holds the smi key on entry and is unchanged if a branch is |
| 167 // performed to the miss label. | 167 // performed to the miss label. |
| 168 // Holds the result on exit if the load succeeded. | |
| 169 // | 168 // |
| 170 // Scratch registers: | 169 // Scratch registers: |
| 171 // | 170 // |
| 172 // r0 - holds the untagged key on entry and holds the hash once computed. | 171 // r0 - holds the untagged key on entry and holds the hash once computed. |
| 172 // Holds the result on exit if the load succeeded. |
| 173 // | 173 // |
| 174 // r1 - used to hold the capacity mask of the dictionary | 174 // r1 - used to hold the capacity mask of the dictionary |
| 175 // | 175 // |
| 176 // r2 - used for the index into the dictionary. | 176 // r2 - used for the index into the dictionary. |
| 177 Label done; | 177 Label done; |
| 178 | 178 |
| 179 // Compute the hash code from the untagged key. This must be kept in sync | 179 // Compute the hash code from the untagged key. This must be kept in sync |
| 180 // with ComputeIntegerHash in utils.h. | 180 // with ComputeIntegerHash in utils.h. |
| 181 // | 181 // |
| 182 // hash = ~hash + (hash << 15); | 182 // hash = ~hash + (hash << 15); |
| (...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 238 const int kDetailsOffset = | 238 const int kDetailsOffset = |
| 239 NumberDictionary::kElementsStartOffset + 2 * kPointerSize; | 239 NumberDictionary::kElementsStartOffset + 2 * kPointerSize; |
| 240 ASSERT_EQ(NORMAL, 0); | 240 ASSERT_EQ(NORMAL, 0); |
| 241 __ Test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset), | 241 __ Test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset), |
| 242 Smi::FromInt(PropertyDetails::TypeField::mask())); | 242 Smi::FromInt(PropertyDetails::TypeField::mask())); |
| 243 __ j(not_zero, miss); | 243 __ j(not_zero, miss); |
| 244 | 244 |
| 245 // Get the value at the masked, scaled index. | 245 // Get the value at the masked, scaled index. |
| 246 const int kValueOffset = | 246 const int kValueOffset = |
| 247 NumberDictionary::kElementsStartOffset + kPointerSize; | 247 NumberDictionary::kElementsStartOffset + kPointerSize; |
| 248 __ movq(key, FieldOperand(elements, r2, times_pointer_size, kValueOffset)); | 248 __ movq(r0, FieldOperand(elements, r2, times_pointer_size, kValueOffset)); |
| 249 } | 249 } |
| 250 | 250 |
| 251 | 251 |
| 252 // One byte opcode for test eax,0xXXXXXXXX. | 252 // One byte opcode for test eax,0xXXXXXXXX. |
| 253 static const byte kTestEaxByte = 0xA9; | 253 static const byte kTestEaxByte = 0xA9; |
| 254 | 254 |
| 255 | 255 |
| 256 static bool PatchInlinedMapCheck(Address address, Object* map) { | 256 static bool PatchInlinedMapCheck(Address address, Object* map) { |
| 257 // Arguments are address of start of call sequence that called | 257 // Arguments are address of start of call sequence that called |
| 258 // the IC, | 258 // the IC, |
| (...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 344 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); | 344 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); |
| 345 } | 345 } |
| 346 | 346 |
| 347 | 347 |
| 348 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { | 348 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { |
| 349 // ----------- S t a t e ------------- | 349 // ----------- S t a t e ------------- |
| 350 // -- rsp[0] : return address | 350 // -- rsp[0] : return address |
| 351 // -- rsp[8] : name | 351 // -- rsp[8] : name |
| 352 // -- rsp[16] : receiver | 352 // -- rsp[16] : receiver |
| 353 // ----------------------------------- | 353 // ----------------------------------- |
| 354 Label slow, check_string, index_smi, index_string; | 354 Label slow, check_string, index_int, index_string; |
| 355 Label check_pixel_array, probe_dictionary; | 355 Label check_pixel_array, probe_dictionary; |
| 356 Label check_number_dictionary; | 356 Label check_number_dictionary; |
| 357 | 357 |
| 358 // Load name and receiver. | 358 // Load name and receiver. |
| 359 __ movq(rax, Operand(rsp, kPointerSize)); | 359 __ movq(rax, Operand(rsp, kPointerSize)); |
| 360 __ movq(rcx, Operand(rsp, 2 * kPointerSize)); | 360 __ movq(rcx, Operand(rsp, 2 * kPointerSize)); |
| 361 | 361 |
| 362 // Check that the object isn't a smi. | 362 // Check that the object isn't a smi. |
| 363 __ JumpIfSmi(rcx, &slow); | 363 __ JumpIfSmi(rcx, &slow); |
| 364 | 364 |
| 365 // Check that the object is some kind of JS object EXCEPT JS Value type. | 365 // Check that the object is some kind of JS object EXCEPT JS Value type. |
| 366 // In the case that the object is a value-wrapper object, | 366 // In the case that the object is a value-wrapper object, |
| 367 // we enter the runtime system to make sure that indexing | 367 // we enter the runtime system to make sure that indexing |
| 368 // into string objects work as intended. | 368 // into string objects work as intended. |
| 369 ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE); | 369 ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE); |
| 370 __ CmpObjectType(rcx, JS_OBJECT_TYPE, rdx); | 370 __ CmpObjectType(rcx, JS_OBJECT_TYPE, rdx); |
| 371 __ j(below, &slow); | 371 __ j(below, &slow); |
| 372 | 372 |
| 373 // Check bit field. | 373 // Check bit field. |
| 374 __ testb(FieldOperand(rdx, Map::kBitFieldOffset), | 374 __ testb(FieldOperand(rdx, Map::kBitFieldOffset), |
| 375 Immediate(kSlowCaseBitFieldMask)); | 375 Immediate(kSlowCaseBitFieldMask)); |
| 376 __ j(not_zero, &slow); | 376 __ j(not_zero, &slow); |
| 377 | 377 |
| 378 // Check that the key is a smi. | 378 // Check that the key is a smi. |
| 379 __ JumpIfNotSmi(rax, &check_string); | 379 __ JumpIfNotSmi(rax, &check_string); |
| 380 | 380 // Save key in rbx in case we want it for the number dictionary |
| 381 // case. |
| 382 __ movq(rbx, rax); |
| 383 __ SmiToInteger32(rax, rax); |
| 381 // Get the elements array of the object. | 384 // Get the elements array of the object. |
| 382 __ bind(&index_smi); | 385 __ bind(&index_int); |
| 383 __ movq(rcx, FieldOperand(rcx, JSObject::kElementsOffset)); | 386 __ movq(rcx, FieldOperand(rcx, JSObject::kElementsOffset)); |
| 384 // Check that the object is in fast mode (not dictionary). | 387 // Check that the object is in fast mode (not dictionary). |
| 385 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), | 388 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), |
| 386 Heap::kFixedArrayMapRootIndex); | 389 Heap::kFixedArrayMapRootIndex); |
| 387 __ j(not_equal, &check_pixel_array); | 390 __ j(not_equal, &check_pixel_array); |
| 388 // Check that the key (index) is within bounds. | 391 // Check that the key (index) is within bounds. |
| 389 __ SmiCompare(rax, FieldOperand(rcx, FixedArray::kLengthOffset)); | 392 __ cmpl(rax, FieldOperand(rcx, FixedArray::kLengthOffset)); |
| 390 __ j(above_equal, &slow); // Unsigned comparison rejects negative indices. | 393 __ j(above_equal, &slow); // Unsigned comparison rejects negative indices. |
| 391 // Fast case: Do the load. | 394 // Fast case: Do the load. |
| 392 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2); | 395 __ movq(rax, Operand(rcx, rax, times_pointer_size, |
| 393 __ movq(rax, FieldOperand(rcx, | 396 FixedArray::kHeaderSize - kHeapObjectTag)); |
| 394 index.reg, | |
| 395 index.scale, | |
| 396 FixedArray::kHeaderSize)); | |
| 397 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); | 397 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); |
| 398 // In case the loaded value is the_hole we have to consult GetProperty | 398 // In case the loaded value is the_hole we have to consult GetProperty |
| 399 // to ensure the prototype chain is searched. | 399 // to ensure the prototype chain is searched. |
| 400 __ j(equal, &slow); | 400 __ j(equal, &slow); |
| 401 __ IncrementCounter(&Counters::keyed_load_generic_smi, 1); | 401 __ IncrementCounter(&Counters::keyed_load_generic_smi, 1); |
| 402 __ ret(0); | 402 __ ret(0); |
| 403 | 403 |
| 404 // Check whether the elements is a pixel array. | 404 // Check whether the elements is a pixel array. |
| 405 // rax: key | 405 // rax: untagged index |
| 406 // rcx: elements array | 406 // rcx: elements array |
| 407 __ bind(&check_pixel_array); | 407 __ bind(&check_pixel_array); |
| 408 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), | 408 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), |
| 409 Heap::kPixelArrayMapRootIndex); | 409 Heap::kPixelArrayMapRootIndex); |
| 410 __ j(not_equal, &check_number_dictionary); | 410 __ j(not_equal, &check_number_dictionary); |
| 411 __ SmiToInteger32(rax, rax); | |
| 412 __ cmpl(rax, FieldOperand(rcx, PixelArray::kLengthOffset)); | 411 __ cmpl(rax, FieldOperand(rcx, PixelArray::kLengthOffset)); |
| 413 __ j(above_equal, &slow); | 412 __ j(above_equal, &slow); |
| 414 __ movq(rcx, FieldOperand(rcx, PixelArray::kExternalPointerOffset)); | 413 __ movq(rcx, FieldOperand(rcx, PixelArray::kExternalPointerOffset)); |
| 415 __ movzxbq(rax, Operand(rcx, rax, times_1, 0)); | 414 __ movzxbq(rax, Operand(rcx, rax, times_1, 0)); |
| 416 __ Integer32ToSmi(rax, rax); | 415 __ Integer32ToSmi(rax, rax); |
| 417 __ ret(0); | 416 __ ret(0); |
| 418 | 417 |
| 419 __ bind(&check_number_dictionary); | 418 __ bind(&check_number_dictionary); |
| 420 // Check whether the elements is a number dictionary. | 419 // Check whether the elements is a number dictionary. |
| 421 // rax: key | 420 // rax: untagged index |
| 421 // rbx: key |
| 422 // rcx: elements | 422 // rcx: elements |
| 423 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), | 423 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), |
| 424 Heap::kHashTableMapRootIndex); | 424 Heap::kHashTableMapRootIndex); |
| 425 __ j(not_equal, &slow); | 425 __ j(not_equal, &slow); |
| 426 __ SmiToInteger32(rbx, rax); | 426 GenerateNumberDictionaryLoad(masm, &slow, rcx, rbx, rax, rdx, rdi); |
| 427 GenerateNumberDictionaryLoad(masm, &slow, rcx, rax, rbx, rdx, rdi); | |
| 428 __ ret(0); | 427 __ ret(0); |
| 429 | 428 |
| 430 // Slow case: Load name and receiver from stack and jump to runtime. | 429 // Slow case: Load name and receiver from stack and jump to runtime. |
| 431 __ bind(&slow); | 430 __ bind(&slow); |
| 432 __ IncrementCounter(&Counters::keyed_load_generic_slow, 1); | 431 __ IncrementCounter(&Counters::keyed_load_generic_slow, 1); |
| 433 GenerateRuntimeGetProperty(masm); | 432 GenerateRuntimeGetProperty(masm); |
| 434 __ bind(&check_string); | 433 __ bind(&check_string); |
| 435 // The key is not a smi. | 434 // The key is not a smi. |
| 436 // Is it a string? | 435 // Is it a string? |
| 437 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx); | 436 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx); |
| (...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 506 __ movq(rax, rcx); | 505 __ movq(rax, rcx); |
| 507 __ IncrementCounter(&Counters::keyed_load_generic_symbol, 1); | 506 __ IncrementCounter(&Counters::keyed_load_generic_symbol, 1); |
| 508 __ ret(0); | 507 __ ret(0); |
| 509 // If the hash field contains an array index pick it out. The assert checks | 508 // If the hash field contains an array index pick it out. The assert checks |
| 510 // that the constants for the maximum number of digits for an array index | 509 // that the constants for the maximum number of digits for an array index |
| 511 // cached in the hash field and the number of bits reserved for it does not | 510 // cached in the hash field and the number of bits reserved for it does not |
| 512 // conflict. | 511 // conflict. |
| 513 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) < | 512 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) < |
| 514 (1 << String::kArrayIndexValueBits)); | 513 (1 << String::kArrayIndexValueBits)); |
| 515 __ bind(&index_string); | 514 __ bind(&index_string); |
| 516 // We want the smi-tagged index in rax. | 515 __ movl(rax, rbx); |
| 517 __ and_(rbx, Immediate(String::kArrayIndexValueMask)); | 516 __ and_(rax, Immediate(String::kArrayIndexHashMask)); |
| 518 __ shr(rbx, Immediate(String::kHashShift)); | 517 __ shrl(rax, Immediate(String::kHashShift)); |
| 519 __ Integer32ToSmi(rax, rbx); | 518 __ jmp(&index_int); |
| 520 __ jmp(&index_smi); | |
| 521 } | 519 } |
| 522 | 520 |
| 523 | 521 |
| 524 void KeyedLoadIC::GenerateString(MacroAssembler* masm) { | 522 void KeyedLoadIC::GenerateString(MacroAssembler* masm) { |
| 525 // ----------- S t a t e ------------- | 523 // ----------- S t a t e ------------- |
| 526 // -- rsp[0] : return address | 524 // -- rsp[0] : return address |
| 527 // -- rsp[8] : name | 525 // -- rsp[8] : name |
| 528 // -- rsp[16] : receiver | 526 // -- rsp[16] : receiver |
| 529 // ----------------------------------- | 527 // ----------------------------------- |
| 530 Label miss; | 528 Label miss; |
| (...skipping 320 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 851 | 849 |
| 852 // Object case: Check key against length in the elements array. | 850 // Object case: Check key against length in the elements array. |
| 853 // rax: value | 851 // rax: value |
| 854 // rdx: JSObject | 852 // rdx: JSObject |
| 855 // rbx: index (as a smi) | 853 // rbx: index (as a smi) |
| 856 __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset)); | 854 __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset)); |
| 857 // Check that the object is in fast mode (not dictionary). | 855 // Check that the object is in fast mode (not dictionary). |
| 858 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), | 856 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), |
| 859 Heap::kFixedArrayMapRootIndex); | 857 Heap::kFixedArrayMapRootIndex); |
| 860 __ j(not_equal, &check_pixel_array); | 858 __ j(not_equal, &check_pixel_array); |
| 861 __ SmiCompare(rbx, FieldOperand(rcx, FixedArray::kLengthOffset)); | 859 // Untag the key (for checking against untagged length in the fixed array). |
| 860 __ SmiToInteger32(rdx, rbx); |
| 861 __ cmpl(rdx, FieldOperand(rcx, Array::kLengthOffset)); |
| 862 // rax: value | 862 // rax: value |
| 863 // rcx: FixedArray | 863 // rcx: FixedArray |
| 864 // rbx: index (as a smi) | 864 // rbx: index (as a smi) |
| 865 __ j(below, &fast); | 865 __ j(below, &fast); |
| 866 | 866 |
| 867 // Slow case: call runtime. | 867 // Slow case: call runtime. |
| 868 __ bind(&slow); | 868 __ bind(&slow); |
| 869 GenerateRuntimeSetProperty(masm); | 869 GenerateRuntimeSetProperty(masm); |
| 870 | 870 |
| 871 // Check whether the elements is a pixel array. | 871 // Check whether the elements is a pixel array. |
| (...skipping 28 matching lines...) Expand all Loading... |
| 900 // Extra capacity case: Check if there is extra capacity to | 900 // Extra capacity case: Check if there is extra capacity to |
| 901 // perform the store and update the length. Used for adding one | 901 // perform the store and update the length. Used for adding one |
| 902 // element to the array by writing to array[array.length]. | 902 // element to the array by writing to array[array.length]. |
| 903 __ bind(&extra); | 903 __ bind(&extra); |
| 904 // rax: value | 904 // rax: value |
| 905 // rdx: JSArray | 905 // rdx: JSArray |
| 906 // rcx: FixedArray | 906 // rcx: FixedArray |
| 907 // rbx: index (as a smi) | 907 // rbx: index (as a smi) |
| 908 // flags: smicompare (rdx.length(), rbx) | 908 // flags: smicompare (rdx.length(), rbx) |
| 909 __ j(not_equal, &slow); // do not leave holes in the array | 909 __ j(not_equal, &slow); // do not leave holes in the array |
| 910 __ SmiCompare(rbx, FieldOperand(rcx, FixedArray::kLengthOffset)); | 910 __ SmiToInteger64(rbx, rbx); |
| 911 __ cmpl(rbx, FieldOperand(rcx, FixedArray::kLengthOffset)); |
| 911 __ j(above_equal, &slow); | 912 __ j(above_equal, &slow); |
| 912 // Increment index to get new length. | 913 // Increment and restore smi-tag. |
| 913 __ SmiAddConstant(rdi, rbx, Smi::FromInt(1)); | 914 __ Integer64PlusConstantToSmi(rbx, rbx, 1); |
| 914 __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rdi); | 915 __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rbx); |
| 916 __ SmiSubConstant(rbx, rbx, Smi::FromInt(1)); |
| 915 __ jmp(&fast); | 917 __ jmp(&fast); |
| 916 | 918 |
| 917 // Array case: Get the length and the elements array from the JS | 919 // Array case: Get the length and the elements array from the JS |
| 918 // array. Check that the array is in fast mode; if it is the | 920 // array. Check that the array is in fast mode; if it is the |
| 919 // length is always a smi. | 921 // length is always a smi. |
| 920 __ bind(&array); | 922 __ bind(&array); |
| 921 // rax: value | 923 // rax: value |
| 922 // rdx: JSArray | 924 // rdx: JSArray |
| 923 // rbx: index (as a smi) | 925 // rbx: index (as a smi) |
| 924 __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset)); | 926 __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset)); |
| 925 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), | 927 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), |
| 926 Heap::kFixedArrayMapRootIndex); | 928 Heap::kFixedArrayMapRootIndex); |
| 927 __ j(not_equal, &slow); | 929 __ j(not_equal, &slow); |
| 928 | 930 |
| 929 // Check the key against the length in the array, compute the | 931 // Check the key against the length in the array, compute the |
| 930 // address to store into and fall through to fast case. | 932 // address to store into and fall through to fast case. |
| 931 __ SmiCompare(FieldOperand(rdx, JSArray::kLengthOffset), rbx); | 933 __ SmiCompare(FieldOperand(rdx, JSArray::kLengthOffset), rbx); |
| 932 __ j(below_equal, &extra); | 934 __ j(below_equal, &extra); |
| 933 | 935 |
| 934 // Fast case: Do the store. | 936 // Fast case: Do the store. |
| 935 __ bind(&fast); | 937 __ bind(&fast); |
| 936 // rax: value | 938 // rax: value |
| 937 // rcx: FixedArray | 939 // rcx: FixedArray |
| 938 // rbx: index (as a smi) | 940 // rbx: index (as a smi) |
| 939 Label non_smi_value; | 941 Label non_smi_value; |
| 940 __ JumpIfNotSmi(rax, &non_smi_value); | 942 __ JumpIfNotSmi(rax, &non_smi_value); |
| 941 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2); | 943 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2); |
| 942 __ movq(FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize), | 944 __ movq(Operand(rcx, index.reg, index.scale, |
| 945 FixedArray::kHeaderSize - kHeapObjectTag), |
| 943 rax); | 946 rax); |
| 944 __ ret(0); | 947 __ ret(0); |
| 945 __ bind(&non_smi_value); | 948 __ bind(&non_smi_value); |
| 946 // Slow case that needs to retain rbx for use by RecordWrite. | 949 // Slow case that needs to retain rbx for use by RecordWrite. |
| 947 // Update write barrier for the elements array address. | 950 // Update write barrier for the elements array address. |
| 948 SmiIndex index2 = masm->SmiToIndex(kScratchRegister, rbx, kPointerSizeLog2); | 951 SmiIndex index2 = masm->SmiToIndex(kScratchRegister, rbx, kPointerSizeLog2); |
| 949 __ movq(FieldOperand(rcx, index2.reg, index2.scale, FixedArray::kHeaderSize), | 952 __ movq(Operand(rcx, index2.reg, index2.scale, |
| 953 FixedArray::kHeaderSize - kHeapObjectTag), |
| 950 rax); | 954 rax); |
| 951 __ movq(rdx, rax); | 955 __ movq(rdx, rax); |
| 952 __ RecordWriteNonSmi(rcx, 0, rdx, rbx); | 956 __ RecordWriteNonSmi(rcx, 0, rdx, rbx); |
| 953 __ ret(0); | 957 __ ret(0); |
| 954 } | 958 } |
| 955 | 959 |
| 956 | 960 |
| 957 void KeyedStoreIC::GenerateExternalArray(MacroAssembler* masm, | 961 void KeyedStoreIC::GenerateExternalArray(MacroAssembler* masm, |
| 958 ExternalArrayType array_type) { | 962 ExternalArrayType array_type) { |
| 959 // ----------- S t a t e ------------- | 963 // ----------- S t a t e ------------- |
| (...skipping 652 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1612 GenerateMiss(masm); | 1616 GenerateMiss(masm); |
| 1613 } | 1617 } |
| 1614 | 1618 |
| 1615 | 1619 |
| 1616 #undef __ | 1620 #undef __ |
| 1617 | 1621 |
| 1618 | 1622 |
| 1619 } } // namespace v8::internal | 1623 } } // namespace v8::internal |
| 1620 | 1624 |
| 1621 #endif // V8_TARGET_ARCH_X64 | 1625 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |