| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 237 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 248 __ mov(cp, v0); | 248 __ mov(cp, v0); |
| 249 __ Addu(sp, sp, Operand(2 * kPointerSize)); | 249 __ Addu(sp, sp, Operand(2 * kPointerSize)); |
| 250 __ Ret(); | 250 __ Ret(); |
| 251 | 251 |
| 252 // Need to collect. Call into runtime system. | 252 // Need to collect. Call into runtime system. |
| 253 __ bind(&gc); | 253 __ bind(&gc); |
| 254 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); | 254 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); |
| 255 } | 255 } |
| 256 | 256 |
| 257 | 257 |
| 258 static void GenerateFastCloneShallowArrayCommon( |
| 259 MacroAssembler* masm, |
| 260 int length, |
| 261 FastCloneShallowArrayStub::Mode mode, |
| 262 Label* fail) { |
| 263 // Registers on entry: |
| 264 // a3: boilerplate literal array. |
| 265 ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS); |
| 266 |
| 267 // All sizes here are multiples of kPointerSize. |
| 268 int elements_size = 0; |
| 269 if (length > 0) { |
| 270 elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS |
| 271 ? FixedDoubleArray::SizeFor(length) |
| 272 : FixedArray::SizeFor(length); |
| 273 } |
| 274 int size = JSArray::kSize + elements_size; |
| 275 |
| 276 // Allocate both the JS array and the elements array in one big |
| 277 // allocation. This avoids multiple limit checks. |
| 278 __ AllocateInNewSpace(size, |
| 279 v0, |
| 280 a1, |
| 281 a2, |
| 282 fail, |
| 283 TAG_OBJECT); |
| 284 |
| 285 // Copy the JS array part. |
| 286 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { |
| 287 if ((i != JSArray::kElementsOffset) || (length == 0)) { |
| 288 __ lw(a1, FieldMemOperand(a3, i)); |
| 289 __ sw(a1, FieldMemOperand(v0, i)); |
| 290 } |
| 291 } |
| 292 |
| 293 if (length > 0) { |
| 294 // Get hold of the elements array of the boilerplate and setup the |
| 295 // elements pointer in the resulting object. |
| 296 __ lw(a3, FieldMemOperand(a3, JSArray::kElementsOffset)); |
| 297 __ Addu(a2, v0, Operand(JSArray::kSize)); |
| 298 __ sw(a2, FieldMemOperand(v0, JSArray::kElementsOffset)); |
| 299 |
| 300 // Copy the elements array. |
| 301 ASSERT((elements_size % kPointerSize) == 0); |
| 302 __ CopyFields(a2, a3, a1.bit(), elements_size / kPointerSize); |
| 303 } |
| 304 } |
| 305 |
| 258 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { | 306 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { |
| 259 // Stack layout on entry: | 307 // Stack layout on entry: |
| 308 // |
| 260 // [sp]: constant elements. | 309 // [sp]: constant elements. |
| 261 // [sp + kPointerSize]: literal index. | 310 // [sp + kPointerSize]: literal index. |
| 262 // [sp + (2 * kPointerSize)]: literals array. | 311 // [sp + (2 * kPointerSize)]: literals array. |
| 263 | 312 |
| 264 // All sizes here are multiples of kPointerSize. | |
| 265 int elements_size = 0; | |
| 266 if (length_ > 0) { | |
| 267 elements_size = mode_ == CLONE_DOUBLE_ELEMENTS | |
| 268 ? FixedDoubleArray::SizeFor(length_) | |
| 269 : FixedArray::SizeFor(length_); | |
| 270 } | |
| 271 int size = JSArray::kSize + elements_size; | |
| 272 | |
| 273 // Load boilerplate object into r3 and check if we need to create a | 313 // Load boilerplate object into r3 and check if we need to create a |
| 274 // boilerplate. | 314 // boilerplate. |
| 275 Label slow_case; | 315 Label slow_case; |
| 276 __ lw(a3, MemOperand(sp, 2 * kPointerSize)); | 316 __ lw(a3, MemOperand(sp, 2 * kPointerSize)); |
| 277 __ lw(a0, MemOperand(sp, 1 * kPointerSize)); | 317 __ lw(a0, MemOperand(sp, 1 * kPointerSize)); |
| 278 __ Addu(a3, a3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 318 __ Addu(a3, a3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 279 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize); | 319 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize); |
| 280 __ Addu(t0, a3, t0); | 320 __ Addu(t0, a3, t0); |
| 281 __ lw(a3, MemOperand(t0)); | 321 __ lw(a3, MemOperand(t0)); |
| 282 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex); | 322 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex); |
| 283 __ Branch(&slow_case, eq, a3, Operand(t1)); | 323 __ Branch(&slow_case, eq, a3, Operand(t1)); |
| 284 | 324 |
| 325 FastCloneShallowArrayStub::Mode mode = mode_; |
| 326 if (mode == CLONE_ANY_ELEMENTS) { |
| 327 Label double_elements, check_fast_elements; |
| 328 __ lw(v0, FieldMemOperand(a3, JSArray::kElementsOffset)); |
| 329 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); |
| 330 __ LoadRoot(t1, Heap::kFixedCOWArrayMapRootIndex); |
| 331 __ Branch(&check_fast_elements, ne, v0, Operand(t1)); |
| 332 GenerateFastCloneShallowArrayCommon(masm, 0, |
| 333 COPY_ON_WRITE_ELEMENTS, &slow_case); |
| 334 // Return and remove the on-stack parameters. |
| 335 __ DropAndRet(3); |
| 336 |
| 337 __ bind(&check_fast_elements); |
| 338 __ LoadRoot(t1, Heap::kFixedArrayMapRootIndex); |
| 339 __ Branch(&double_elements, ne, v0, Operand(t1)); |
| 340 GenerateFastCloneShallowArrayCommon(masm, length_, |
| 341 CLONE_ELEMENTS, &slow_case); |
| 342 // Return and remove the on-stack parameters. |
| 343 __ DropAndRet(3); |
| 344 |
| 345 __ bind(&double_elements); |
| 346 mode = CLONE_DOUBLE_ELEMENTS; |
| 347 // Fall through to generate the code to handle double elements. |
| 348 } |
| 349 |
| 285 if (FLAG_debug_code) { | 350 if (FLAG_debug_code) { |
| 286 const char* message; | 351 const char* message; |
| 287 Heap::RootListIndex expected_map_index; | 352 Heap::RootListIndex expected_map_index; |
| 288 if (mode_ == CLONE_ELEMENTS) { | 353 if (mode == CLONE_ELEMENTS) { |
| 289 message = "Expected (writable) fixed array"; | 354 message = "Expected (writable) fixed array"; |
| 290 expected_map_index = Heap::kFixedArrayMapRootIndex; | 355 expected_map_index = Heap::kFixedArrayMapRootIndex; |
| 291 } else if (mode_ == CLONE_DOUBLE_ELEMENTS) { | 356 } else if (mode == CLONE_DOUBLE_ELEMENTS) { |
| 292 message = "Expected (writable) fixed double array"; | 357 message = "Expected (writable) fixed double array"; |
| 293 expected_map_index = Heap::kFixedDoubleArrayMapRootIndex; | 358 expected_map_index = Heap::kFixedDoubleArrayMapRootIndex; |
| 294 } else { | 359 } else { |
| 295 ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS); | 360 ASSERT(mode == COPY_ON_WRITE_ELEMENTS); |
| 296 message = "Expected copy-on-write fixed array"; | 361 message = "Expected copy-on-write fixed array"; |
| 297 expected_map_index = Heap::kFixedCOWArrayMapRootIndex; | 362 expected_map_index = Heap::kFixedCOWArrayMapRootIndex; |
| 298 } | 363 } |
| 299 __ push(a3); | 364 __ push(a3); |
| 300 __ lw(a3, FieldMemOperand(a3, JSArray::kElementsOffset)); | 365 __ lw(a3, FieldMemOperand(a3, JSArray::kElementsOffset)); |
| 301 __ lw(a3, FieldMemOperand(a3, HeapObject::kMapOffset)); | 366 __ lw(a3, FieldMemOperand(a3, HeapObject::kMapOffset)); |
| 302 __ LoadRoot(at, expected_map_index); | 367 __ LoadRoot(at, expected_map_index); |
| 303 __ Assert(eq, message, a3, Operand(at)); | 368 __ Assert(eq, message, a3, Operand(at)); |
| 304 __ pop(a3); | 369 __ pop(a3); |
| 305 } | 370 } |
| 306 | 371 |
| 307 // Allocate both the JS array and the elements array in one big | 372 GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case); |
| 308 // allocation. This avoids multiple limit checks. | |
| 309 // Return new object in v0. | |
| 310 __ AllocateInNewSpace(size, | |
| 311 v0, | |
| 312 a1, | |
| 313 a2, | |
| 314 &slow_case, | |
| 315 TAG_OBJECT); | |
| 316 | |
| 317 // Copy the JS array part. | |
| 318 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { | |
| 319 if ((i != JSArray::kElementsOffset) || (length_ == 0)) { | |
| 320 __ lw(a1, FieldMemOperand(a3, i)); | |
| 321 __ sw(a1, FieldMemOperand(v0, i)); | |
| 322 } | |
| 323 } | |
| 324 | |
| 325 if (length_ > 0) { | |
| 326 // Get hold of the elements array of the boilerplate and setup the | |
| 327 // elements pointer in the resulting object. | |
| 328 __ lw(a3, FieldMemOperand(a3, JSArray::kElementsOffset)); | |
| 329 __ Addu(a2, v0, Operand(JSArray::kSize)); | |
| 330 __ sw(a2, FieldMemOperand(v0, JSArray::kElementsOffset)); | |
| 331 | |
| 332 // Copy the elements array. | |
| 333 ASSERT((elements_size % kPointerSize) == 0); | |
| 334 __ CopyFields(a2, a3, a1.bit(), elements_size / kPointerSize); | |
| 335 } | |
| 336 | 373 |
| 337 // Return and remove the on-stack parameters. | 374 // Return and remove the on-stack parameters. |
| 338 __ Addu(sp, sp, Operand(3 * kPointerSize)); | 375 __ Addu(sp, sp, Operand(3 * kPointerSize)); |
| 339 __ Ret(); | 376 __ Ret(); |
| 340 | 377 |
| 341 __ bind(&slow_case); | 378 __ bind(&slow_case); |
| 342 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); | 379 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); |
| 343 } | 380 } |
| 344 | 381 |
| 345 | 382 |
| 383 void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) { |
| 384 // Stack layout on entry: |
| 385 // |
| 386 // [sp]: object literal flags. |
| 387 // [sp + kPointerSize]: constant properties. |
| 388 // [sp + (2 * kPointerSize)]: literal index. |
| 389 // [sp + (3 * kPointerSize)]: literals array. |
| 390 |
| 391 // Load boilerplate object into a3 and check if we need to create a |
| 392 // boilerplate. |
| 393 Label slow_case; |
| 394 __ lw(a3, MemOperand(sp, 3 * kPointerSize)); |
| 395 __ lw(a0, MemOperand(sp, 2 * kPointerSize)); |
| 396 __ Addu(a3, a3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 397 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize); |
| 398 __ Addu(a3, t0, a3); |
| 399 __ lw(a3, MemOperand(a3)); |
| 400 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex); |
| 401 __ Branch(&slow_case, eq, a3, Operand(t0)); |
| 402 |
| 403 // Check that the boilerplate contains only fast properties and we can |
| 404 // statically determine the instance size. |
| 405 int size = JSObject::kHeaderSize + length_ * kPointerSize; |
| 406 __ lw(a0, FieldMemOperand(a3, HeapObject::kMapOffset)); |
| 407 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceSizeOffset)); |
| 408 __ Branch(&slow_case, ne, a0, Operand(size >> kPointerSizeLog2)); |
| 409 |
| 410 // Allocate the JS object and copy header together with all in-object |
| 411 // properties from the boilerplate. |
| 412 __ AllocateInNewSpace(size, a0, a1, a2, &slow_case, TAG_OBJECT); |
| 413 for (int i = 0; i < size; i += kPointerSize) { |
| 414 __ lw(a1, FieldMemOperand(a3, i)); |
| 415 __ sw(a1, FieldMemOperand(a0, i)); |
| 416 } |
| 417 |
| 418 // Return and remove the on-stack parameters. |
| 419 __ Drop(4); |
| 420 __ Ret(USE_DELAY_SLOT); |
| 421 __ mov(v0, a0); |
| 422 |
| 423 __ bind(&slow_case); |
| 424 __ TailCallRuntime(Runtime::kCreateObjectLiteralShallow, 4, 1); |
| 425 } |
| 426 |
| 427 |
| 346 // Takes a Smi and converts to an IEEE 64 bit floating point value in two | 428 // Takes a Smi and converts to an IEEE 64 bit floating point value in two |
| 347 // registers. The format is 1 sign bit, 11 exponent bits (biased 1023) and | 429 // registers. The format is 1 sign bit, 11 exponent bits (biased 1023) and |
| 348 // 52 fraction bits (20 in the first word, 32 in the second). Zeros is a | 430 // 52 fraction bits (20 in the first word, 32 in the second). Zeros is a |
| 349 // scratch register. Destroys the source register. No GC occurs during this | 431 // scratch register. Destroys the source register. No GC occurs during this |
| 350 // stub so you don't have to set up the frame. | 432 // stub so you don't have to set up the frame. |
| 351 class ConvertToDoubleStub : public CodeStub { | 433 class ConvertToDoubleStub : public CodeStub { |
| 352 public: | 434 public: |
| 353 ConvertToDoubleStub(Register result_reg_1, | 435 ConvertToDoubleStub(Register result_reg_1, |
| 354 Register result_reg_2, | 436 Register result_reg_2, |
| 355 Register source_reg, | 437 Register source_reg, |
| (...skipping 4396 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4752 | 4834 |
| 4753 // Reset offset for possibly sliced string. | 4835 // Reset offset for possibly sliced string. |
| 4754 __ mov(t0, zero_reg); | 4836 __ mov(t0, zero_reg); |
| 4755 // subject: Subject string | 4837 // subject: Subject string |
| 4756 // regexp_data: RegExp data (FixedArray) | 4838 // regexp_data: RegExp data (FixedArray) |
| 4757 // Check the representation and encoding of the subject string. | 4839 // Check the representation and encoding of the subject string. |
| 4758 Label seq_string; | 4840 Label seq_string; |
| 4759 __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset)); | 4841 __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset)); |
| 4760 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset)); | 4842 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset)); |
| 4761 // First check for flat string. None of the following string type tests will | 4843 // First check for flat string. None of the following string type tests will |
| 4762 // succeed if kIsNotStringTag is set. | 4844 // succeed if subject is not a string or a short external string. |
| 4763 __ And(a1, a0, Operand(kIsNotStringMask | kStringRepresentationMask)); | 4845 __ And(a1, |
| 4846 a0, |
| 4847 Operand(kIsNotStringMask | |
| 4848 kStringRepresentationMask | |
| 4849 kShortExternalStringMask)); |
| 4764 STATIC_ASSERT((kStringTag | kSeqStringTag) == 0); | 4850 STATIC_ASSERT((kStringTag | kSeqStringTag) == 0); |
| 4765 __ Branch(&seq_string, eq, a1, Operand(zero_reg)); | 4851 __ Branch(&seq_string, eq, a1, Operand(zero_reg)); |
| 4766 | 4852 |
| 4767 // subject: Subject string | 4853 // subject: Subject string |
| 4768 // a0: instance type if Subject string | 4854 // a0: instance type if Subject string |
| 4769 // regexp_data: RegExp data (FixedArray) | 4855 // regexp_data: RegExp data (FixedArray) |
| 4770 // a1: whether subject is a string and if yes, its string representation | 4856 // a1: whether subject is a string and if yes, its string representation |
| 4771 // Check for flat cons string or sliced string. | 4857 // Check for flat cons string or sliced string. |
| 4772 // A flat cons string is a cons string where the second part is the empty | 4858 // A flat cons string is a cons string where the second part is the empty |
| 4773 // string. In that case the subject string is just the first part of the cons | 4859 // string. In that case the subject string is just the first part of the cons |
| 4774 // string. Also in this case the first part of the cons string is known to be | 4860 // string. Also in this case the first part of the cons string is known to be |
| 4775 // a sequential string or an external string. | 4861 // a sequential string or an external string. |
| 4776 // In the case of a sliced string its offset has to be taken into account. | 4862 // In the case of a sliced string its offset has to be taken into account. |
| 4777 Label cons_string, check_encoding; | 4863 Label cons_string, external_string, check_encoding; |
| 4778 STATIC_ASSERT(kConsStringTag < kExternalStringTag); | 4864 STATIC_ASSERT(kConsStringTag < kExternalStringTag); |
| 4779 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag); | 4865 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag); |
| 4780 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag); | 4866 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag); |
| 4867 STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag); |
| 4781 __ Branch(&cons_string, lt, a1, Operand(kExternalStringTag)); | 4868 __ Branch(&cons_string, lt, a1, Operand(kExternalStringTag)); |
| 4782 __ Branch(&runtime, eq, a1, Operand(kExternalStringTag)); | 4869 __ Branch(&external_string, eq, a1, Operand(kExternalStringTag)); |
| 4783 | 4870 |
| 4784 // Catch non-string subject (should already have been guarded against). | 4871 // Catch non-string subject or short external string. |
| 4785 STATIC_ASSERT(kNotStringTag != 0); | 4872 STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0); |
| 4786 __ And(at, a1, Operand(kIsNotStringMask)); | 4873 __ And(at, a1, Operand(kIsNotStringMask | kShortExternalStringMask)); |
| 4787 __ Branch(&runtime, ne, at, Operand(zero_reg)); | 4874 __ Branch(&runtime, ne, at, Operand(zero_reg)); |
| 4788 | 4875 |
| 4789 // String is sliced. | 4876 // String is sliced. |
| 4790 __ lw(t0, FieldMemOperand(subject, SlicedString::kOffsetOffset)); | 4877 __ lw(t0, FieldMemOperand(subject, SlicedString::kOffsetOffset)); |
| 4791 __ sra(t0, t0, kSmiTagSize); | 4878 __ sra(t0, t0, kSmiTagSize); |
| 4792 __ lw(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); | 4879 __ lw(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); |
| 4793 // t5: offset of sliced string, smi-tagged. | 4880 // t5: offset of sliced string, smi-tagged. |
| 4794 __ jmp(&check_encoding); | 4881 __ jmp(&check_encoding); |
| 4795 // String is a cons string, check whether it is flat. | 4882 // String is a cons string, check whether it is flat. |
| 4796 __ bind(&cons_string); | 4883 __ bind(&cons_string); |
| 4797 __ lw(a0, FieldMemOperand(subject, ConsString::kSecondOffset)); | 4884 __ lw(a0, FieldMemOperand(subject, ConsString::kSecondOffset)); |
| 4798 __ LoadRoot(a1, Heap::kEmptyStringRootIndex); | 4885 __ LoadRoot(a1, Heap::kEmptyStringRootIndex); |
| 4799 __ Branch(&runtime, ne, a0, Operand(a1)); | 4886 __ Branch(&runtime, ne, a0, Operand(a1)); |
| 4800 __ lw(subject, FieldMemOperand(subject, ConsString::kFirstOffset)); | 4887 __ lw(subject, FieldMemOperand(subject, ConsString::kFirstOffset)); |
| 4801 // Is first part of cons or parent of slice a flat string? | 4888 // Is first part of cons or parent of slice a flat string? |
| 4802 __ bind(&check_encoding); | 4889 __ bind(&check_encoding); |
| 4803 __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset)); | 4890 __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset)); |
| 4804 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset)); | 4891 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset)); |
| 4805 STATIC_ASSERT(kSeqStringTag == 0); | 4892 STATIC_ASSERT(kSeqStringTag == 0); |
| 4806 __ And(at, a0, Operand(kStringRepresentationMask)); | 4893 __ And(at, a0, Operand(kStringRepresentationMask)); |
| 4807 __ Branch(&runtime, ne, at, Operand(zero_reg)); | 4894 __ Branch(&external_string, ne, at, Operand(zero_reg)); |
| 4808 | 4895 |
| 4809 __ bind(&seq_string); | 4896 __ bind(&seq_string); |
| 4810 // subject: Subject string | 4897 // subject: Subject string |
| 4811 // regexp_data: RegExp data (FixedArray) | 4898 // regexp_data: RegExp data (FixedArray) |
| 4812 // a0: Instance type of subject string | 4899 // a0: Instance type of subject string |
| 4813 STATIC_ASSERT(kStringEncodingMask == 4); | 4900 STATIC_ASSERT(kStringEncodingMask == 4); |
| 4814 STATIC_ASSERT(kAsciiStringTag == 4); | 4901 STATIC_ASSERT(kAsciiStringTag == 4); |
| 4815 STATIC_ASSERT(kTwoByteStringTag == 0); | 4902 STATIC_ASSERT(kTwoByteStringTag == 0); |
| 4816 // Find the code object based on the assumptions above. | 4903 // Find the code object based on the assumptions above. |
| 4817 __ And(a0, a0, Operand(kStringEncodingMask)); // Non-zero for ascii. | 4904 __ And(a0, a0, Operand(kStringEncodingMask)); // Non-zero for ascii. |
| (...skipping 205 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5023 __ Branch(&next_capture, USE_DELAY_SLOT); | 5110 __ Branch(&next_capture, USE_DELAY_SLOT); |
| 5024 __ addiu(a0, a0, kPointerSize); // In branch delay slot. | 5111 __ addiu(a0, a0, kPointerSize); // In branch delay slot. |
| 5025 | 5112 |
| 5026 __ bind(&done); | 5113 __ bind(&done); |
| 5027 | 5114 |
| 5028 // Return last match info. | 5115 // Return last match info. |
| 5029 __ lw(v0, MemOperand(sp, kLastMatchInfoOffset)); | 5116 __ lw(v0, MemOperand(sp, kLastMatchInfoOffset)); |
| 5030 __ Addu(sp, sp, Operand(4 * kPointerSize)); | 5117 __ Addu(sp, sp, Operand(4 * kPointerSize)); |
| 5031 __ Ret(); | 5118 __ Ret(); |
| 5032 | 5119 |
| 5120 // External string. Short external strings have already been ruled out. |
| 5121 // a0: scratch |
| 5122 __ bind(&external_string); |
| 5123 __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset)); |
| 5124 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset)); |
| 5125 if (FLAG_debug_code) { |
| 5126 // Assert that we do not have a cons or slice (indirect strings) here. |
| 5127 // Sequential strings have already been ruled out. |
| 5128 __ And(at, a0, Operand(kIsIndirectStringMask)); |
| 5129 __ Assert(eq, |
| 5130 "external string expected, but not found", |
| 5131 at, |
| 5132 Operand(zero_reg)); |
| 5133 } |
| 5134 __ lw(subject, |
| 5135 FieldMemOperand(subject, ExternalString::kResourceDataOffset)); |
| 5136 // Move the pointer so that offset-wise, it looks like a sequential string. |
| 5137 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqAsciiString::kHeaderSize); |
| 5138 __ Subu(subject, |
| 5139 subject, |
| 5140 SeqTwoByteString::kHeaderSize - kHeapObjectTag); |
| 5141 __ jmp(&seq_string); |
| 5142 |
| 5033 // Do the runtime call to execute the regexp. | 5143 // Do the runtime call to execute the regexp. |
| 5034 __ bind(&runtime); | 5144 __ bind(&runtime); |
| 5035 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); | 5145 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); |
| 5036 #endif // V8_INTERPRETED_REGEXP | 5146 #endif // V8_INTERPRETED_REGEXP |
| 5037 } | 5147 } |
| 5038 | 5148 |
| 5039 | 5149 |
| 5040 void RegExpConstructResultStub::Generate(MacroAssembler* masm) { | 5150 void RegExpConstructResultStub::Generate(MacroAssembler* masm) { |
| 5041 const int kMaxInlineLength = 100; | 5151 const int kMaxInlineLength = 100; |
| 5042 Label slowcase; | 5152 Label slowcase; |
| (...skipping 238 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5281 | 5391 |
| 5282 // If the index is non-smi trigger the non-smi case. | 5392 // If the index is non-smi trigger the non-smi case. |
| 5283 __ JumpIfNotSmi(index_, &index_not_smi_); | 5393 __ JumpIfNotSmi(index_, &index_not_smi_); |
| 5284 | 5394 |
| 5285 __ bind(&got_smi_index_); | 5395 __ bind(&got_smi_index_); |
| 5286 | 5396 |
| 5287 // Check for index out of range. | 5397 // Check for index out of range. |
| 5288 __ lw(t0, FieldMemOperand(object_, String::kLengthOffset)); | 5398 __ lw(t0, FieldMemOperand(object_, String::kLengthOffset)); |
| 5289 __ Branch(index_out_of_range_, ls, t0, Operand(index_)); | 5399 __ Branch(index_out_of_range_, ls, t0, Operand(index_)); |
| 5290 | 5400 |
| 5291 // We need special handling for non-flat strings. | 5401 __ sra(index_, index_, kSmiTagSize); |
| 5292 STATIC_ASSERT(kSeqStringTag == 0); | |
| 5293 __ And(t0, result_, Operand(kStringRepresentationMask)); | |
| 5294 __ Branch(&flat_string, eq, t0, Operand(zero_reg)); | |
| 5295 | 5402 |
| 5296 // Handle non-flat strings. | 5403 StringCharLoadGenerator::Generate(masm, |
| 5297 __ And(result_, result_, Operand(kStringRepresentationMask)); | 5404 object_, |
| 5298 STATIC_ASSERT(kConsStringTag < kExternalStringTag); | 5405 index_, |
| 5299 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag); | 5406 result_, |
| 5300 __ Branch(&sliced_string, gt, result_, Operand(kExternalStringTag)); | 5407 &call_runtime_); |
| 5301 __ Branch(&call_runtime_, eq, result_, Operand(kExternalStringTag)); | |
| 5302 | 5408 |
| 5303 // ConsString. | |
| 5304 // Check whether the right hand side is the empty string (i.e. if | |
| 5305 // this is really a flat string in a cons string). If that is not | |
| 5306 // the case we would rather go to the runtime system now to flatten | |
| 5307 // the string. | |
| 5308 Label assure_seq_string; | |
| 5309 __ lw(result_, FieldMemOperand(object_, ConsString::kSecondOffset)); | |
| 5310 __ LoadRoot(t0, Heap::kEmptyStringRootIndex); | |
| 5311 __ Branch(&call_runtime_, ne, result_, Operand(t0)); | |
| 5312 | |
| 5313 // Get the first of the two parts. | |
| 5314 __ lw(object_, FieldMemOperand(object_, ConsString::kFirstOffset)); | |
| 5315 __ jmp(&assure_seq_string); | |
| 5316 | |
| 5317 // SlicedString, unpack and add offset. | |
| 5318 __ bind(&sliced_string); | |
| 5319 __ lw(result_, FieldMemOperand(object_, SlicedString::kOffsetOffset)); | |
| 5320 __ Addu(index_, index_, result_); | |
| 5321 __ lw(object_, FieldMemOperand(object_, SlicedString::kParentOffset)); | |
| 5322 | |
| 5323 // Assure that we are dealing with a sequential string. Go to runtime if not. | |
| 5324 __ bind(&assure_seq_string); | |
| 5325 __ lw(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); | |
| 5326 __ lbu(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); | |
| 5327 // Check that parent is not an external string. Go to runtime otherwise. | |
| 5328 // Note that if the original string is a cons or slice with an external | |
| 5329 // string as underlying string, we pass that unpacked underlying string with | |
| 5330 // the adjusted index to the runtime function. | |
| 5331 STATIC_ASSERT(kSeqStringTag == 0); | |
| 5332 | |
| 5333 __ And(t0, result_, Operand(kStringRepresentationMask)); | |
| 5334 __ Branch(&call_runtime_, ne, t0, Operand(zero_reg)); | |
| 5335 | |
| 5336 // Check for 1-byte or 2-byte string. | |
| 5337 __ bind(&flat_string); | |
| 5338 STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0); | |
| 5339 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0); | |
| 5340 __ And(t0, result_, Operand(kStringEncodingMask)); | |
| 5341 __ Branch(&ascii_string, ne, t0, Operand(zero_reg)); | |
| 5342 | |
| 5343 // 2-byte string. | |
| 5344 // Load the 2-byte character code into the result register. We can | |
| 5345 // add without shifting since the smi tag size is the log2 of the | |
| 5346 // number of bytes in a two-byte character. | |
| 5347 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1 && kSmiShiftSize == 0); | |
| 5348 __ Addu(index_, object_, Operand(index_)); | |
| 5349 __ lhu(result_, FieldMemOperand(index_, SeqTwoByteString::kHeaderSize)); | |
| 5350 __ Branch(&got_char_code); | |
| 5351 | |
| 5352 // ASCII string. | |
| 5353 // Load the byte into the result register. | |
| 5354 __ bind(&ascii_string); | |
| 5355 | |
| 5356 __ srl(t0, index_, kSmiTagSize); | |
| 5357 __ Addu(index_, object_, t0); | |
| 5358 | |
| 5359 __ lbu(result_, FieldMemOperand(index_, SeqAsciiString::kHeaderSize)); | |
| 5360 | |
| 5361 __ bind(&got_char_code); | |
| 5362 __ sll(result_, result_, kSmiTagSize); | 5409 __ sll(result_, result_, kSmiTagSize); |
| 5363 __ bind(&exit_); | 5410 __ bind(&exit_); |
| 5364 } | 5411 } |
| 5365 | 5412 |
| 5366 | 5413 |
| 5367 void StringCharCodeAtGenerator::GenerateSlow( | 5414 void StringCharCodeAtGenerator::GenerateSlow( |
| 5368 MacroAssembler* masm, | 5415 MacroAssembler* masm, |
| 5369 const RuntimeCallHelper& call_helper) { | 5416 const RuntimeCallHelper& call_helper) { |
| 5370 __ Abort("Unexpected fallthrough to CharCodeAt slow case"); | 5417 __ Abort("Unexpected fallthrough to CharCodeAt slow case"); |
| 5371 | 5418 |
| (...skipping 28 matching lines...) Expand all Loading... |
| 5400 // If index is still not a smi, it must be out of range. | 5447 // If index is still not a smi, it must be out of range. |
| 5401 __ JumpIfNotSmi(index_, index_out_of_range_); | 5448 __ JumpIfNotSmi(index_, index_out_of_range_); |
| 5402 // Otherwise, return to the fast path. | 5449 // Otherwise, return to the fast path. |
| 5403 __ Branch(&got_smi_index_); | 5450 __ Branch(&got_smi_index_); |
| 5404 | 5451 |
| 5405 // Call runtime. We get here when the receiver is a string and the | 5452 // Call runtime. We get here when the receiver is a string and the |
| 5406 // index is a number, but the code of getting the actual character | 5453 // index is a number, but the code of getting the actual character |
| 5407 // is too complex (e.g., when the string needs to be flattened). | 5454 // is too complex (e.g., when the string needs to be flattened). |
| 5408 __ bind(&call_runtime_); | 5455 __ bind(&call_runtime_); |
| 5409 call_helper.BeforeCall(masm); | 5456 call_helper.BeforeCall(masm); |
| 5457 __ sll(index_, index_, kSmiTagSize); |
| 5410 __ Push(object_, index_); | 5458 __ Push(object_, index_); |
| 5411 __ CallRuntime(Runtime::kStringCharCodeAt, 2); | 5459 __ CallRuntime(Runtime::kStringCharCodeAt, 2); |
| 5412 | 5460 |
| 5413 __ Move(result_, v0); | 5461 __ Move(result_, v0); |
| 5414 | 5462 |
| 5415 call_helper.AfterCall(masm); | 5463 call_helper.AfterCall(masm); |
| 5416 __ jmp(&exit_); | 5464 __ jmp(&exit_); |
| 5417 | 5465 |
| 5418 __ Abort("Unexpected fallthrough from CharCodeAt slow case"); | 5466 __ Abort("Unexpected fallthrough from CharCodeAt slow case"); |
| 5419 } | 5467 } |
| (...skipping 2036 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7456 // Array literal has ElementsKind of FAST_ELEMENTS and value is an object. | 7504 // Array literal has ElementsKind of FAST_ELEMENTS and value is an object. |
| 7457 __ bind(&fast_elements); | 7505 __ bind(&fast_elements); |
| 7458 __ lw(t1, FieldMemOperand(a1, JSObject::kElementsOffset)); | 7506 __ lw(t1, FieldMemOperand(a1, JSObject::kElementsOffset)); |
| 7459 __ sll(t2, a3, kPointerSizeLog2 - kSmiTagSize); | 7507 __ sll(t2, a3, kPointerSizeLog2 - kSmiTagSize); |
| 7460 __ Addu(t2, t1, t2); | 7508 __ Addu(t2, t1, t2); |
| 7461 __ Addu(t2, t2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 7509 __ Addu(t2, t2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 7462 __ sw(a0, MemOperand(t2, 0)); | 7510 __ sw(a0, MemOperand(t2, 0)); |
| 7463 // Update the write barrier for the array store. | 7511 // Update the write barrier for the array store. |
| 7464 __ RecordWrite(t1, t2, a0, kRAHasNotBeenSaved, kDontSaveFPRegs, | 7512 __ RecordWrite(t1, t2, a0, kRAHasNotBeenSaved, kDontSaveFPRegs, |
| 7465 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | 7513 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
| 7466 __ Ret(); | 7514 __ Ret(USE_DELAY_SLOT); |
| 7515 __ mov(v0, a0); |
| 7467 | 7516 |
| 7468 // Array literal has ElementsKind of FAST_SMI_ONLY_ELEMENTS or | 7517 // Array literal has ElementsKind of FAST_SMI_ONLY_ELEMENTS or |
| 7469 // FAST_ELEMENTS, and value is Smi. | 7518 // FAST_ELEMENTS, and value is Smi. |
| 7470 __ bind(&smi_element); | 7519 __ bind(&smi_element); |
| 7471 __ lw(t1, FieldMemOperand(a1, JSObject::kElementsOffset)); | 7520 __ lw(t1, FieldMemOperand(a1, JSObject::kElementsOffset)); |
| 7472 __ sll(t2, a3, kPointerSizeLog2 - kSmiTagSize); | 7521 __ sll(t2, a3, kPointerSizeLog2 - kSmiTagSize); |
| 7473 __ Addu(t2, t1, t2); | 7522 __ Addu(t2, t1, t2); |
| 7474 __ sw(a0, FieldMemOperand(t2, FixedArray::kHeaderSize)); | 7523 __ sw(a0, FieldMemOperand(t2, FixedArray::kHeaderSize)); |
| 7475 __ Ret(); | 7524 __ Ret(USE_DELAY_SLOT); |
| 7525 __ mov(v0, a0); |
| 7476 | 7526 |
| 7477 // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS. | 7527 // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS. |
| 7478 __ bind(&double_elements); | 7528 __ bind(&double_elements); |
| 7479 __ lw(t1, FieldMemOperand(a1, JSObject::kElementsOffset)); | 7529 __ lw(t1, FieldMemOperand(a1, JSObject::kElementsOffset)); |
| 7480 __ StoreNumberToDoubleElements(a0, a3, a1, t1, t2, t3, t5, t6, | 7530 __ StoreNumberToDoubleElements(a0, a3, a1, t1, t2, t3, t5, t6, |
| 7481 &slow_elements); | 7531 &slow_elements); |
| 7482 __ Ret(); | 7532 __ Ret(USE_DELAY_SLOT); |
| 7533 __ mov(v0, a0); |
| 7483 } | 7534 } |
| 7484 | 7535 |
| 7485 | 7536 |
| 7486 #undef __ | 7537 #undef __ |
| 7487 | 7538 |
| 7488 } } // namespace v8::internal | 7539 } } // namespace v8::internal |
| 7489 | 7540 |
| 7490 #endif // V8_TARGET_ARCH_MIPS | 7541 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |