OLD | NEW |
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2082 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2093 ASSERT(temp.is_valid()); | 2093 ASSERT(temp.is_valid()); |
2094 __ movq(temp.reg(), | 2094 __ movq(temp.reg(), |
2095 FieldOperand(left_reg, HeapObject::kMapOffset)); | 2095 FieldOperand(left_reg, HeapObject::kMapOffset)); |
2096 __ movzxbl(temp.reg(), | 2096 __ movzxbl(temp.reg(), |
2097 FieldOperand(temp.reg(), Map::kInstanceTypeOffset)); | 2097 FieldOperand(temp.reg(), Map::kInstanceTypeOffset)); |
2098 // If we are testing for equality then make use of the symbol shortcut. | 2098 // If we are testing for equality then make use of the symbol shortcut. |
2099 // Check if the left hand side has the same type as the right hand | 2099 // Check if the left hand side has the same type as the right hand |
2100 // side (which is always a symbol). | 2100 // side (which is always a symbol). |
2101 if (cc == equal) { | 2101 if (cc == equal) { |
2102 Label not_a_symbol; | 2102 Label not_a_symbol; |
2103 ASSERT(kSymbolTag != 0); | 2103 STATIC_ASSERT(kSymbolTag != 0); |
2104 // Ensure that no non-strings have the symbol bit set. | 2104 // Ensure that no non-strings have the symbol bit set. |
2105 ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE); | 2105 STATIC_ASSERT(LAST_TYPE < kNotStringTag + kIsSymbolMask); |
2106 __ testb(temp.reg(), Immediate(kIsSymbolMask)); // Test the symbol bit. | 2106 __ testb(temp.reg(), Immediate(kIsSymbolMask)); // Test the symbol bit. |
2107 __ j(zero, ¬_a_symbol); | 2107 __ j(zero, ¬_a_symbol); |
2108 // They are symbols, so do identity compare. | 2108 // They are symbols, so do identity compare. |
2109 __ Cmp(left_reg, right_side.handle()); | 2109 __ Cmp(left_reg, right_side.handle()); |
2110 dest->true_target()->Branch(equal); | 2110 dest->true_target()->Branch(equal); |
2111 dest->false_target()->Branch(not_equal); | 2111 dest->false_target()->Branch(not_equal); |
2112 __ bind(¬_a_symbol); | 2112 __ bind(¬_a_symbol); |
2113 } | 2113 } |
2114 // Call the compare stub if the left side is not a flat ascii string. | 2114 // Call the compare stub if the left side is not a flat ascii string. |
2115 __ andb(temp.reg(), | 2115 __ andb(temp.reg(), |
(...skipping 444 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2560 // rsp[2]: applicand. | 2560 // rsp[2]: applicand. |
2561 | 2561 |
2562 // Check that the receiver really is a JavaScript object. | 2562 // Check that the receiver really is a JavaScript object. |
2563 __ movq(rax, Operand(rsp, 0)); | 2563 __ movq(rax, Operand(rsp, 0)); |
2564 Condition is_smi = masm_->CheckSmi(rax); | 2564 Condition is_smi = masm_->CheckSmi(rax); |
2565 __ j(is_smi, &build_args); | 2565 __ j(is_smi, &build_args); |
2566 // We allow all JSObjects including JSFunctions. As long as | 2566 // We allow all JSObjects including JSFunctions. As long as |
2567 // JS_FUNCTION_TYPE is the last instance type and it is right | 2567 // JS_FUNCTION_TYPE is the last instance type and it is right |
2568 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper | 2568 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper |
2569 // bound. | 2569 // bound. |
2570 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); | 2570 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); |
2571 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1); | 2571 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1); |
2572 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx); | 2572 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx); |
2573 __ j(below, &build_args); | 2573 __ j(below, &build_args); |
2574 | 2574 |
2575 // Check that applicand.apply is Function.prototype.apply. | 2575 // Check that applicand.apply is Function.prototype.apply. |
2576 __ movq(rax, Operand(rsp, kPointerSize)); | 2576 __ movq(rax, Operand(rsp, kPointerSize)); |
2577 is_smi = masm_->CheckSmi(rax); | 2577 is_smi = masm_->CheckSmi(rax); |
2578 __ j(is_smi, &build_args); | 2578 __ j(is_smi, &build_args); |
2579 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rcx); | 2579 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rcx); |
2580 __ j(not_equal, &build_args); | 2580 __ j(not_equal, &build_args); |
2581 __ movq(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset)); | 2581 __ movq(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset)); |
(...skipping 1422 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4004 __ movq(kScratchRegister, handler_address); | 4004 __ movq(kScratchRegister, handler_address); |
4005 __ cmpq(rsp, Operand(kScratchRegister, 0)); | 4005 __ cmpq(rsp, Operand(kScratchRegister, 0)); |
4006 __ Assert(equal, "stack pointer should point to top handler"); | 4006 __ Assert(equal, "stack pointer should point to top handler"); |
4007 } | 4007 } |
4008 | 4008 |
4009 // If we can fall off the end of the try block, unlink from try chain. | 4009 // If we can fall off the end of the try block, unlink from try chain. |
4010 if (has_valid_frame()) { | 4010 if (has_valid_frame()) { |
4011 // The next handler address is on top of the frame. Unlink from | 4011 // The next handler address is on top of the frame. Unlink from |
4012 // the handler list and drop the rest of this handler from the | 4012 // the handler list and drop the rest of this handler from the |
4013 // frame. | 4013 // frame. |
4014 ASSERT(StackHandlerConstants::kNextOffset == 0); | 4014 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); |
4015 __ movq(kScratchRegister, handler_address); | 4015 __ movq(kScratchRegister, handler_address); |
4016 frame_->EmitPop(Operand(kScratchRegister, 0)); | 4016 frame_->EmitPop(Operand(kScratchRegister, 0)); |
4017 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); | 4017 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); |
4018 if (has_unlinks) { | 4018 if (has_unlinks) { |
4019 exit.Jump(); | 4019 exit.Jump(); |
4020 } | 4020 } |
4021 } | 4021 } |
4022 | 4022 |
4023 // Generate unlink code for the (formerly) shadowing targets that | 4023 // Generate unlink code for the (formerly) shadowing targets that |
4024 // have been jumped to. Deallocate each shadow target. | 4024 // have been jumped to. Deallocate each shadow target. |
(...skipping 12 matching lines...) Expand all Loading... |
4037 // unspilled code, we need to reestablish a spilled frame at | 4037 // unspilled code, we need to reestablish a spilled frame at |
4038 // this block. | 4038 // this block. |
4039 frame_->SpillAll(); | 4039 frame_->SpillAll(); |
4040 | 4040 |
4041 // Reload sp from the top handler, because some statements that we | 4041 // Reload sp from the top handler, because some statements that we |
4042 // break from (eg, for...in) may have left stuff on the stack. | 4042 // break from (eg, for...in) may have left stuff on the stack. |
4043 __ movq(kScratchRegister, handler_address); | 4043 __ movq(kScratchRegister, handler_address); |
4044 __ movq(rsp, Operand(kScratchRegister, 0)); | 4044 __ movq(rsp, Operand(kScratchRegister, 0)); |
4045 frame_->Forget(frame_->height() - handler_height); | 4045 frame_->Forget(frame_->height() - handler_height); |
4046 | 4046 |
4047 ASSERT(StackHandlerConstants::kNextOffset == 0); | 4047 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); |
4048 __ movq(kScratchRegister, handler_address); | 4048 __ movq(kScratchRegister, handler_address); |
4049 frame_->EmitPop(Operand(kScratchRegister, 0)); | 4049 frame_->EmitPop(Operand(kScratchRegister, 0)); |
4050 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); | 4050 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); |
4051 | 4051 |
4052 if (i == kReturnShadowIndex) { | 4052 if (i == kReturnShadowIndex) { |
4053 if (!function_return_is_shadowed_) frame_->PrepareForReturn(); | 4053 if (!function_return_is_shadowed_) frame_->PrepareForReturn(); |
4054 shadows[i]->other_target()->Jump(&return_value); | 4054 shadows[i]->other_target()->Jump(&return_value); |
4055 } else { | 4055 } else { |
4056 shadows[i]->other_target()->Jump(); | 4056 shadows[i]->other_target()->Jump(); |
4057 } | 4057 } |
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4124 } | 4124 } |
4125 function_return_is_shadowed_ = function_return_was_shadowed; | 4125 function_return_is_shadowed_ = function_return_was_shadowed; |
4126 | 4126 |
4127 // Get an external reference to the handler address. | 4127 // Get an external reference to the handler address. |
4128 ExternalReference handler_address(Top::k_handler_address); | 4128 ExternalReference handler_address(Top::k_handler_address); |
4129 | 4129 |
4130 // If we can fall off the end of the try block, unlink from the try | 4130 // If we can fall off the end of the try block, unlink from the try |
4131 // chain and set the state on the frame to FALLING. | 4131 // chain and set the state on the frame to FALLING. |
4132 if (has_valid_frame()) { | 4132 if (has_valid_frame()) { |
4133 // The next handler address is on top of the frame. | 4133 // The next handler address is on top of the frame. |
4134 ASSERT(StackHandlerConstants::kNextOffset == 0); | 4134 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); |
4135 __ movq(kScratchRegister, handler_address); | 4135 __ movq(kScratchRegister, handler_address); |
4136 frame_->EmitPop(Operand(kScratchRegister, 0)); | 4136 frame_->EmitPop(Operand(kScratchRegister, 0)); |
4137 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); | 4137 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); |
4138 | 4138 |
4139 // Fake a top of stack value (unneeded when FALLING) and set the | 4139 // Fake a top of stack value (unneeded when FALLING) and set the |
4140 // state in ecx, then jump around the unlink blocks if any. | 4140 // state in ecx, then jump around the unlink blocks if any. |
4141 frame_->EmitPush(Heap::kUndefinedValueRootIndex); | 4141 frame_->EmitPush(Heap::kUndefinedValueRootIndex); |
4142 __ Move(rcx, Smi::FromInt(FALLING)); | 4142 __ Move(rcx, Smi::FromInt(FALLING)); |
4143 if (nof_unlinks > 0) { | 4143 if (nof_unlinks > 0) { |
4144 finally_block.Jump(); | 4144 finally_block.Jump(); |
(...skipping 20 matching lines...) Expand all Loading... |
4165 frame_->SpillAll(); | 4165 frame_->SpillAll(); |
4166 | 4166 |
4167 // Reload sp from the top handler, because some statements that | 4167 // Reload sp from the top handler, because some statements that |
4168 // we break from (eg, for...in) may have left stuff on the | 4168 // we break from (eg, for...in) may have left stuff on the |
4169 // stack. | 4169 // stack. |
4170 __ movq(kScratchRegister, handler_address); | 4170 __ movq(kScratchRegister, handler_address); |
4171 __ movq(rsp, Operand(kScratchRegister, 0)); | 4171 __ movq(rsp, Operand(kScratchRegister, 0)); |
4172 frame_->Forget(frame_->height() - handler_height); | 4172 frame_->Forget(frame_->height() - handler_height); |
4173 | 4173 |
4174 // Unlink this handler and drop it from the frame. | 4174 // Unlink this handler and drop it from the frame. |
4175 ASSERT(StackHandlerConstants::kNextOffset == 0); | 4175 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); |
4176 __ movq(kScratchRegister, handler_address); | 4176 __ movq(kScratchRegister, handler_address); |
4177 frame_->EmitPop(Operand(kScratchRegister, 0)); | 4177 frame_->EmitPop(Operand(kScratchRegister, 0)); |
4178 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); | 4178 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); |
4179 | 4179 |
4180 if (i == kReturnShadowIndex) { | 4180 if (i == kReturnShadowIndex) { |
4181 // If this target shadowed the function return, materialize | 4181 // If this target shadowed the function return, materialize |
4182 // the return value on the stack. | 4182 // the return value on the stack. |
4183 frame_->EmitPush(rax); | 4183 frame_->EmitPush(rax); |
4184 } else { | 4184 } else { |
4185 // Fake TOS for targets that shadowed breaks and continues. | 4185 // Fake TOS for targets that shadowed breaks and continues. |
(...skipping 1999 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6185 right.Unuse(); | 6185 right.Unuse(); |
6186 left.Unuse(); | 6186 left.Unuse(); |
6187 destination()->Split(equal); | 6187 destination()->Split(equal); |
6188 } | 6188 } |
6189 | 6189 |
6190 | 6190 |
6191 void CodeGenerator::GenerateGetFramePointer(ZoneList<Expression*>* args) { | 6191 void CodeGenerator::GenerateGetFramePointer(ZoneList<Expression*>* args) { |
6192 ASSERT(args->length() == 0); | 6192 ASSERT(args->length() == 0); |
6193 // RBP value is aligned, so it should be tagged as a smi (without necesarily | 6193 // RBP value is aligned, so it should be tagged as a smi (without necesarily |
6194 // being padded as a smi, so it should not be treated as a smi.). | 6194 // being padded as a smi, so it should not be treated as a smi.). |
6195 ASSERT(kSmiTag == 0 && kSmiTagSize == 1); | 6195 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); |
6196 Result rbp_as_smi = allocator_->Allocate(); | 6196 Result rbp_as_smi = allocator_->Allocate(); |
6197 ASSERT(rbp_as_smi.is_valid()); | 6197 ASSERT(rbp_as_smi.is_valid()); |
6198 __ movq(rbp_as_smi.reg(), rbp); | 6198 __ movq(rbp_as_smi.reg(), rbp); |
6199 frame_->Push(&rbp_as_smi); | 6199 frame_->Push(&rbp_as_smi); |
6200 } | 6200 } |
6201 | 6201 |
6202 | 6202 |
6203 void CodeGenerator::GenerateRandomHeapNumber( | 6203 void CodeGenerator::GenerateRandomHeapNumber( |
6204 ZoneList<Expression*>* args) { | 6204 ZoneList<Expression*>* args) { |
6205 ASSERT(args->length() == 0); | 6205 ASSERT(args->length() == 0); |
(...skipping 3898 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
10104 __ JumpIfSmi(rax, &runtime); | 10104 __ JumpIfSmi(rax, &runtime); |
10105 __ CmpObjectType(rax, JS_ARRAY_TYPE, kScratchRegister); | 10105 __ CmpObjectType(rax, JS_ARRAY_TYPE, kScratchRegister); |
10106 __ j(not_equal, &runtime); | 10106 __ j(not_equal, &runtime); |
10107 // Check that the JSArray is in fast case. | 10107 // Check that the JSArray is in fast case. |
10108 __ movq(rbx, FieldOperand(rax, JSArray::kElementsOffset)); | 10108 __ movq(rbx, FieldOperand(rax, JSArray::kElementsOffset)); |
10109 __ movq(rax, FieldOperand(rbx, HeapObject::kMapOffset)); | 10109 __ movq(rax, FieldOperand(rbx, HeapObject::kMapOffset)); |
10110 __ Cmp(rax, Factory::fixed_array_map()); | 10110 __ Cmp(rax, Factory::fixed_array_map()); |
10111 __ j(not_equal, &runtime); | 10111 __ j(not_equal, &runtime); |
10112 // Check that the last match info has space for the capture registers and the | 10112 // Check that the last match info has space for the capture registers and the |
10113 // additional information. Ensure no overflow in add. | 10113 // additional information. Ensure no overflow in add. |
10114 ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset); | 10114 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset); |
10115 __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset)); | 10115 __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset)); |
10116 __ addl(rdx, Immediate(RegExpImpl::kLastMatchOverhead)); | 10116 __ addl(rdx, Immediate(RegExpImpl::kLastMatchOverhead)); |
10117 __ cmpl(rdx, rax); | 10117 __ cmpl(rdx, rax); |
10118 __ j(greater, &runtime); | 10118 __ j(greater, &runtime); |
10119 | 10119 |
10120 // rcx: RegExp data (FixedArray) | 10120 // rcx: RegExp data (FixedArray) |
10121 // Check the representation and encoding of the subject string. | 10121 // Check the representation and encoding of the subject string. |
10122 Label seq_ascii_string, seq_two_byte_string, check_code; | 10122 Label seq_ascii_string, seq_two_byte_string, check_code; |
10123 __ movq(rax, Operand(rsp, kSubjectOffset)); | 10123 __ movq(rax, Operand(rsp, kSubjectOffset)); |
10124 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); | 10124 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); |
10125 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); | 10125 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); |
10126 // First check for flat two byte string. | 10126 // First check for flat two byte string. |
10127 __ andb(rbx, Immediate( | 10127 __ andb(rbx, Immediate( |
10128 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask)); | 10128 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask)); |
10129 ASSERT_EQ(0, kStringTag | kSeqStringTag | kTwoByteStringTag); | 10129 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0); |
10130 __ j(zero, &seq_two_byte_string); | 10130 __ j(zero, &seq_two_byte_string); |
10131 // Any other flat string must be a flat ascii string. | 10131 // Any other flat string must be a flat ascii string. |
10132 __ testb(rbx, Immediate(kIsNotStringMask | kStringRepresentationMask)); | 10132 __ testb(rbx, Immediate(kIsNotStringMask | kStringRepresentationMask)); |
10133 __ j(zero, &seq_ascii_string); | 10133 __ j(zero, &seq_ascii_string); |
10134 | 10134 |
10135 // Check for flat cons string. | 10135 // Check for flat cons string. |
10136 // A flat cons string is a cons string where the second part is the empty | 10136 // A flat cons string is a cons string where the second part is the empty |
10137 // string. In that case the subject string is just the first part of the cons | 10137 // string. In that case the subject string is just the first part of the cons |
10138 // string. Also in this case the first part of the cons string is known to be | 10138 // string. Also in this case the first part of the cons string is known to be |
10139 // a sequential string or an external string. | 10139 // a sequential string or an external string. |
10140 ASSERT(kExternalStringTag !=0); | 10140 STATIC_ASSERT(kExternalStringTag !=0); |
10141 ASSERT_EQ(0, kConsStringTag & kExternalStringTag); | 10141 STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0); |
10142 __ testb(rbx, Immediate(kIsNotStringMask | kExternalStringTag)); | 10142 __ testb(rbx, Immediate(kIsNotStringMask | kExternalStringTag)); |
10143 __ j(not_zero, &runtime); | 10143 __ j(not_zero, &runtime); |
10144 // String is a cons string. | 10144 // String is a cons string. |
10145 __ movq(rdx, FieldOperand(rax, ConsString::kSecondOffset)); | 10145 __ movq(rdx, FieldOperand(rax, ConsString::kSecondOffset)); |
10146 __ Cmp(rdx, Factory::empty_string()); | 10146 __ Cmp(rdx, Factory::empty_string()); |
10147 __ j(not_equal, &runtime); | 10147 __ j(not_equal, &runtime); |
10148 __ movq(rax, FieldOperand(rax, ConsString::kFirstOffset)); | 10148 __ movq(rax, FieldOperand(rax, ConsString::kFirstOffset)); |
10149 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); | 10149 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); |
10150 // String is a cons string with empty second part. | 10150 // String is a cons string with empty second part. |
10151 // rax: first part of cons string. | 10151 // rax: first part of cons string. |
10152 // rbx: map of first part of cons string. | 10152 // rbx: map of first part of cons string. |
10153 // Is first part a flat two byte string? | 10153 // Is first part a flat two byte string? |
10154 __ testb(FieldOperand(rbx, Map::kInstanceTypeOffset), | 10154 __ testb(FieldOperand(rbx, Map::kInstanceTypeOffset), |
10155 Immediate(kStringRepresentationMask | kStringEncodingMask)); | 10155 Immediate(kStringRepresentationMask | kStringEncodingMask)); |
10156 ASSERT_EQ(0, kSeqStringTag | kTwoByteStringTag); | 10156 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0); |
10157 __ j(zero, &seq_two_byte_string); | 10157 __ j(zero, &seq_two_byte_string); |
10158 // Any other flat string must be ascii. | 10158 // Any other flat string must be ascii. |
10159 __ testb(FieldOperand(rbx, Map::kInstanceTypeOffset), | 10159 __ testb(FieldOperand(rbx, Map::kInstanceTypeOffset), |
10160 Immediate(kStringRepresentationMask)); | 10160 Immediate(kStringRepresentationMask)); |
10161 __ j(not_zero, &runtime); | 10161 __ j(not_zero, &runtime); |
10162 | 10162 |
10163 __ bind(&seq_ascii_string); | 10163 __ bind(&seq_ascii_string); |
10164 // rax: subject string (sequential ascii) | 10164 // rax: subject string (sequential ascii) |
10165 // rcx: RegExp data (FixedArray) | 10165 // rcx: RegExp data (FixedArray) |
10166 __ movq(r11, FieldOperand(rcx, JSRegExp::kDataAsciiCodeOffset)); | 10166 __ movq(r11, FieldOperand(rcx, JSRegExp::kDataAsciiCodeOffset)); |
(...skipping 216 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
10383 // Calculate the entry in the number string cache. The hash value in the | 10383 // Calculate the entry in the number string cache. The hash value in the |
10384 // number string cache for smis is just the smi value, and the hash for | 10384 // number string cache for smis is just the smi value, and the hash for |
10385 // doubles is the xor of the upper and lower words. See | 10385 // doubles is the xor of the upper and lower words. See |
10386 // Heap::GetNumberStringCache. | 10386 // Heap::GetNumberStringCache. |
10387 Label is_smi; | 10387 Label is_smi; |
10388 Label load_result_from_cache; | 10388 Label load_result_from_cache; |
10389 if (!object_is_smi) { | 10389 if (!object_is_smi) { |
10390 __ JumpIfSmi(object, &is_smi); | 10390 __ JumpIfSmi(object, &is_smi); |
10391 __ CheckMap(object, Factory::heap_number_map(), not_found, true); | 10391 __ CheckMap(object, Factory::heap_number_map(), not_found, true); |
10392 | 10392 |
10393 ASSERT_EQ(8, kDoubleSize); | 10393 STATIC_ASSERT(8 == kDoubleSize); |
10394 __ movl(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4)); | 10394 __ movl(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4)); |
10395 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset)); | 10395 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset)); |
10396 GenerateConvertHashCodeToIndex(masm, scratch, mask); | 10396 GenerateConvertHashCodeToIndex(masm, scratch, mask); |
10397 | 10397 |
10398 Register index = scratch; | 10398 Register index = scratch; |
10399 Register probe = mask; | 10399 Register probe = mask; |
10400 __ movq(probe, | 10400 __ movq(probe, |
10401 FieldOperand(number_string_cache, | 10401 FieldOperand(number_string_cache, |
10402 index, | 10402 index, |
10403 times_1, | 10403 times_1, |
(...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
10564 __ ret(0); | 10564 __ ret(0); |
10565 | 10565 |
10566 __ bind(¬_smis); | 10566 __ bind(¬_smis); |
10567 } | 10567 } |
10568 | 10568 |
10569 // If either operand is a JSObject or an oddball value, then they are not | 10569 // If either operand is a JSObject or an oddball value, then they are not |
10570 // equal since their pointers are different | 10570 // equal since their pointers are different |
10571 // There is no test for undetectability in strict equality. | 10571 // There is no test for undetectability in strict equality. |
10572 | 10572 |
10573 // If the first object is a JS object, we have done pointer comparison. | 10573 // If the first object is a JS object, we have done pointer comparison. |
10574 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); | 10574 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); |
10575 Label first_non_object; | 10575 Label first_non_object; |
10576 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx); | 10576 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx); |
10577 __ j(below, &first_non_object); | 10577 __ j(below, &first_non_object); |
10578 // Return non-zero (eax (not rax) is not zero) | 10578 // Return non-zero (eax (not rax) is not zero) |
10579 Label return_not_equal; | 10579 Label return_not_equal; |
10580 ASSERT(kHeapObjectTag != 0); | 10580 STATIC_ASSERT(kHeapObjectTag != 0); |
10581 __ bind(&return_not_equal); | 10581 __ bind(&return_not_equal); |
10582 __ ret(0); | 10582 __ ret(0); |
10583 | 10583 |
10584 __ bind(&first_non_object); | 10584 __ bind(&first_non_object); |
10585 // Check for oddballs: true, false, null, undefined. | 10585 // Check for oddballs: true, false, null, undefined. |
10586 __ CmpInstanceType(rcx, ODDBALL_TYPE); | 10586 __ CmpInstanceType(rcx, ODDBALL_TYPE); |
10587 __ j(equal, &return_not_equal); | 10587 __ j(equal, &return_not_equal); |
10588 | 10588 |
10589 __ CmpObjectType(rdx, FIRST_JS_OBJECT_TYPE, rcx); | 10589 __ CmpObjectType(rdx, FIRST_JS_OBJECT_TYPE, rcx); |
10590 __ j(above_equal, &return_not_equal); | 10590 __ j(above_equal, &return_not_equal); |
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
10662 | 10662 |
10663 __ bind(&check_unequal_objects); | 10663 __ bind(&check_unequal_objects); |
10664 if (cc_ == equal && !strict_) { | 10664 if (cc_ == equal && !strict_) { |
10665 // Not strict equality. Objects are unequal if | 10665 // Not strict equality. Objects are unequal if |
10666 // they are both JSObjects and not undetectable, | 10666 // they are both JSObjects and not undetectable, |
10667 // and their pointers are different. | 10667 // and their pointers are different. |
10668 Label not_both_objects, return_unequal; | 10668 Label not_both_objects, return_unequal; |
10669 // At most one is a smi, so we can test for smi by adding the two. | 10669 // At most one is a smi, so we can test for smi by adding the two. |
10670 // A smi plus a heap object has the low bit set, a heap object plus | 10670 // A smi plus a heap object has the low bit set, a heap object plus |
10671 // a heap object has the low bit clear. | 10671 // a heap object has the low bit clear. |
10672 ASSERT_EQ(0, kSmiTag); | 10672 STATIC_ASSERT(kSmiTag == 0); |
10673 ASSERT_EQ(static_cast<int64_t>(1), kSmiTagMask); | 10673 STATIC_ASSERT(kSmiTagMask == 1); |
10674 __ lea(rcx, Operand(rax, rdx, times_1, 0)); | 10674 __ lea(rcx, Operand(rax, rdx, times_1, 0)); |
10675 __ testb(rcx, Immediate(kSmiTagMask)); | 10675 __ testb(rcx, Immediate(kSmiTagMask)); |
10676 __ j(not_zero, ¬_both_objects); | 10676 __ j(not_zero, ¬_both_objects); |
10677 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rbx); | 10677 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rbx); |
10678 __ j(below, ¬_both_objects); | 10678 __ j(below, ¬_both_objects); |
10679 __ CmpObjectType(rdx, FIRST_JS_OBJECT_TYPE, rcx); | 10679 __ CmpObjectType(rdx, FIRST_JS_OBJECT_TYPE, rcx); |
10680 __ j(below, ¬_both_objects); | 10680 __ j(below, ¬_both_objects); |
10681 __ testb(FieldOperand(rbx, Map::kBitFieldOffset), | 10681 __ testb(FieldOperand(rbx, Map::kBitFieldOffset), |
10682 Immediate(1 << Map::kIsUndetectable)); | 10682 Immediate(1 << Map::kIsUndetectable)); |
10683 __ j(zero, &return_unequal); | 10683 __ j(zero, &return_unequal); |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
10719 | 10719 |
10720 void CompareStub::BranchIfNonSymbol(MacroAssembler* masm, | 10720 void CompareStub::BranchIfNonSymbol(MacroAssembler* masm, |
10721 Label* label, | 10721 Label* label, |
10722 Register object, | 10722 Register object, |
10723 Register scratch) { | 10723 Register scratch) { |
10724 __ JumpIfSmi(object, label); | 10724 __ JumpIfSmi(object, label); |
10725 __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset)); | 10725 __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset)); |
10726 __ movzxbq(scratch, | 10726 __ movzxbq(scratch, |
10727 FieldOperand(scratch, Map::kInstanceTypeOffset)); | 10727 FieldOperand(scratch, Map::kInstanceTypeOffset)); |
10728 // Ensure that no non-strings have the symbol bit set. | 10728 // Ensure that no non-strings have the symbol bit set. |
10729 ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE); | 10729 STATIC_ASSERT(LAST_TYPE < kNotStringTag + kIsSymbolMask); |
10730 ASSERT(kSymbolTag != 0); | 10730 STATIC_ASSERT(kSymbolTag != 0); |
10731 __ testb(scratch, Immediate(kIsSymbolMask)); | 10731 __ testb(scratch, Immediate(kIsSymbolMask)); |
10732 __ j(zero, label); | 10732 __ j(zero, label); |
10733 } | 10733 } |
10734 | 10734 |
10735 | 10735 |
10736 void StackCheckStub::Generate(MacroAssembler* masm) { | 10736 void StackCheckStub::Generate(MacroAssembler* masm) { |
10737 // Because builtins always remove the receiver from the stack, we | 10737 // Because builtins always remove the receiver from the stack, we |
10738 // have to fake one to avoid underflowing the stack. The receiver | 10738 // have to fake one to avoid underflowing the stack. The receiver |
10739 // must be inserted below the return address on the stack so we | 10739 // must be inserted below the return address on the stack so we |
10740 // temporarily store that in a register. | 10740 // temporarily store that in a register. |
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
10799 __ Set(rbx, 0); | 10799 __ Set(rbx, 0); |
10800 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); | 10800 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); |
10801 Handle<Code> adaptor(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline)); | 10801 Handle<Code> adaptor(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline)); |
10802 __ Jump(adaptor, RelocInfo::CODE_TARGET); | 10802 __ Jump(adaptor, RelocInfo::CODE_TARGET); |
10803 } | 10803 } |
10804 | 10804 |
10805 | 10805 |
10806 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) { | 10806 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) { |
10807 // Check that stack should contain next handler, frame pointer, state and | 10807 // Check that stack should contain next handler, frame pointer, state and |
10808 // return address in that order. | 10808 // return address in that order. |
10809 ASSERT_EQ(StackHandlerConstants::kFPOffset + kPointerSize, | 10809 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize == |
10810 StackHandlerConstants::kStateOffset); | 10810 StackHandlerConstants::kStateOffset); |
10811 ASSERT_EQ(StackHandlerConstants::kStateOffset + kPointerSize, | 10811 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize == |
10812 StackHandlerConstants::kPCOffset); | 10812 StackHandlerConstants::kPCOffset); |
10813 | 10813 |
10814 ExternalReference handler_address(Top::k_handler_address); | 10814 ExternalReference handler_address(Top::k_handler_address); |
10815 __ movq(kScratchRegister, handler_address); | 10815 __ movq(kScratchRegister, handler_address); |
10816 __ movq(rsp, Operand(kScratchRegister, 0)); | 10816 __ movq(rsp, Operand(kScratchRegister, 0)); |
10817 // get next in chain | 10817 // get next in chain |
10818 __ pop(rcx); | 10818 __ pop(rcx); |
10819 __ movq(Operand(kScratchRegister, 0), rcx); | 10819 __ movq(Operand(kScratchRegister, 0), rcx); |
10820 __ pop(rbp); // pop frame pointer | 10820 __ pop(rbp); // pop frame pointer |
10821 __ pop(rdx); // remove state | 10821 __ pop(rdx); // remove state |
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
10911 __ call(rbx); | 10911 __ call(rbx); |
10912 // Result is in rax - do not destroy this register! | 10912 // Result is in rax - do not destroy this register! |
10913 | 10913 |
10914 if (always_allocate_scope) { | 10914 if (always_allocate_scope) { |
10915 __ movq(kScratchRegister, scope_depth); | 10915 __ movq(kScratchRegister, scope_depth); |
10916 __ decl(Operand(kScratchRegister, 0)); | 10916 __ decl(Operand(kScratchRegister, 0)); |
10917 } | 10917 } |
10918 | 10918 |
10919 // Check for failure result. | 10919 // Check for failure result. |
10920 Label failure_returned; | 10920 Label failure_returned; |
10921 ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0); | 10921 STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0); |
10922 #ifdef _WIN64 | 10922 #ifdef _WIN64 |
10923 // If return value is on the stack, pop it to registers. | 10923 // If return value is on the stack, pop it to registers. |
10924 if (result_size_ > 1) { | 10924 if (result_size_ > 1) { |
10925 ASSERT_EQ(2, result_size_); | 10925 ASSERT_EQ(2, result_size_); |
10926 // Read result values stored on stack. Result is stored | 10926 // Read result values stored on stack. Result is stored |
10927 // above the four argument mirror slots and the two | 10927 // above the four argument mirror slots and the two |
10928 // Arguments object slots. | 10928 // Arguments object slots. |
10929 __ movq(rax, Operand(rsp, 6 * kPointerSize)); | 10929 __ movq(rax, Operand(rsp, 6 * kPointerSize)); |
10930 __ movq(rdx, Operand(rsp, 7 * kPointerSize)); | 10930 __ movq(rdx, Operand(rsp, 7 * kPointerSize)); |
10931 } | 10931 } |
10932 #endif | 10932 #endif |
10933 __ lea(rcx, Operand(rax, 1)); | 10933 __ lea(rcx, Operand(rax, 1)); |
10934 // Lower 2 bits of rcx are 0 iff rax has failure tag. | 10934 // Lower 2 bits of rcx are 0 iff rax has failure tag. |
10935 __ testl(rcx, Immediate(kFailureTagMask)); | 10935 __ testl(rcx, Immediate(kFailureTagMask)); |
10936 __ j(zero, &failure_returned); | 10936 __ j(zero, &failure_returned); |
10937 | 10937 |
10938 // Exit the JavaScript to C++ exit frame. | 10938 // Exit the JavaScript to C++ exit frame. |
10939 __ LeaveExitFrame(mode_, result_size_); | 10939 __ LeaveExitFrame(mode_, result_size_); |
10940 __ ret(0); | 10940 __ ret(0); |
10941 | 10941 |
10942 // Handling of failure. | 10942 // Handling of failure. |
10943 __ bind(&failure_returned); | 10943 __ bind(&failure_returned); |
10944 | 10944 |
10945 Label retry; | 10945 Label retry; |
10946 // If the returned exception is RETRY_AFTER_GC continue at retry label | 10946 // If the returned exception is RETRY_AFTER_GC continue at retry label |
10947 ASSERT(Failure::RETRY_AFTER_GC == 0); | 10947 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0); |
10948 __ testl(rax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize)); | 10948 __ testl(rax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize)); |
10949 __ j(zero, &retry); | 10949 __ j(zero, &retry); |
10950 | 10950 |
10951 // Special handling of out of memory exceptions. | 10951 // Special handling of out of memory exceptions. |
10952 __ movq(kScratchRegister, Failure::OutOfMemoryException(), RelocInfo::NONE); | 10952 __ movq(kScratchRegister, Failure::OutOfMemoryException(), RelocInfo::NONE); |
10953 __ cmpq(rax, kScratchRegister); | 10953 __ cmpq(rax, kScratchRegister); |
10954 __ j(equal, throw_out_of_memory_exception); | 10954 __ j(equal, throw_out_of_memory_exception); |
10955 | 10955 |
10956 // Retrieve the pending exception and clear the variable. | 10956 // Retrieve the pending exception and clear the variable. |
10957 ExternalReference pending_exception_address(Top::k_pending_exception_address); | 10957 ExternalReference pending_exception_address(Top::k_pending_exception_address); |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
11007 // Set pending exception and rax to out of memory exception. | 11007 // Set pending exception and rax to out of memory exception. |
11008 ExternalReference pending_exception(Top::k_pending_exception_address); | 11008 ExternalReference pending_exception(Top::k_pending_exception_address); |
11009 __ movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE); | 11009 __ movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE); |
11010 __ store_rax(pending_exception); | 11010 __ store_rax(pending_exception); |
11011 } | 11011 } |
11012 | 11012 |
11013 // Clear the context pointer. | 11013 // Clear the context pointer. |
11014 __ xor_(rsi, rsi); | 11014 __ xor_(rsi, rsi); |
11015 | 11015 |
11016 // Restore registers from handler. | 11016 // Restore registers from handler. |
11017 ASSERT_EQ(StackHandlerConstants::kNextOffset + kPointerSize, | 11017 STATIC_ASSERT(StackHandlerConstants::kNextOffset + kPointerSize == |
11018 StackHandlerConstants::kFPOffset); | 11018 StackHandlerConstants::kFPOffset); |
11019 __ pop(rbp); // FP | 11019 __ pop(rbp); // FP |
11020 ASSERT_EQ(StackHandlerConstants::kFPOffset + kPointerSize, | 11020 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize == |
11021 StackHandlerConstants::kStateOffset); | 11021 StackHandlerConstants::kStateOffset); |
11022 __ pop(rdx); // State | 11022 __ pop(rdx); // State |
11023 | 11023 |
11024 ASSERT_EQ(StackHandlerConstants::kStateOffset + kPointerSize, | 11024 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize == |
11025 StackHandlerConstants::kPCOffset); | 11025 StackHandlerConstants::kPCOffset); |
11026 __ ret(0); | 11026 __ ret(0); |
11027 } | 11027 } |
11028 | 11028 |
11029 | 11029 |
11030 void CEntryStub::Generate(MacroAssembler* masm) { | 11030 void CEntryStub::Generate(MacroAssembler* masm) { |
11031 // rax: number of arguments including receiver | 11031 // rax: number of arguments including receiver |
11032 // rbx: pointer to C function (C callee-saved) | 11032 // rbx: pointer to C function (C callee-saved) |
11033 // rbp: frame pointer of calling JS frame (restored after C call) | 11033 // rbp: frame pointer of calling JS frame (restored after C call) |
11034 // rsp: stack pointer (restored after C call) | 11034 // rsp: stack pointer (restored after C call) |
(...skipping 254 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
11289 // The code at is_not_instance assumes that kScratchRegister contains a | 11289 // The code at is_not_instance assumes that kScratchRegister contains a |
11290 // non-zero GCable value (the null object in this case). | 11290 // non-zero GCable value (the null object in this case). |
11291 __ j(equal, &is_not_instance); | 11291 __ j(equal, &is_not_instance); |
11292 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset)); | 11292 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset)); |
11293 __ movq(rcx, FieldOperand(rcx, Map::kPrototypeOffset)); | 11293 __ movq(rcx, FieldOperand(rcx, Map::kPrototypeOffset)); |
11294 __ jmp(&loop); | 11294 __ jmp(&loop); |
11295 | 11295 |
11296 __ bind(&is_instance); | 11296 __ bind(&is_instance); |
11297 __ xorl(rax, rax); | 11297 __ xorl(rax, rax); |
11298 // Store bitwise zero in the cache. This is a Smi in GC terms. | 11298 // Store bitwise zero in the cache. This is a Smi in GC terms. |
11299 ASSERT_EQ(0, kSmiTag); | 11299 STATIC_ASSERT(kSmiTag == 0); |
11300 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); | 11300 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); |
11301 __ ret(2 * kPointerSize); | 11301 __ ret(2 * kPointerSize); |
11302 | 11302 |
11303 __ bind(&is_not_instance); | 11303 __ bind(&is_not_instance); |
11304 // We have to store a non-zero value in the cache. | 11304 // We have to store a non-zero value in the cache. |
11305 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); | 11305 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); |
11306 __ ret(2 * kPointerSize); | 11306 __ ret(2 * kPointerSize); |
11307 | 11307 |
11308 // Slow-case: Go through the JavaScript implementation. | 11308 // Slow-case: Go through the JavaScript implementation. |
11309 __ bind(&slow); | 11309 __ bind(&slow); |
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
11394 | 11394 |
11395 // Put smi-tagged index into scratch register. | 11395 // Put smi-tagged index into scratch register. |
11396 __ movq(scratch_, index_); | 11396 __ movq(scratch_, index_); |
11397 __ bind(&got_smi_index_); | 11397 __ bind(&got_smi_index_); |
11398 | 11398 |
11399 // Check for index out of range. | 11399 // Check for index out of range. |
11400 __ SmiCompare(scratch_, FieldOperand(object_, String::kLengthOffset)); | 11400 __ SmiCompare(scratch_, FieldOperand(object_, String::kLengthOffset)); |
11401 __ j(above_equal, index_out_of_range_); | 11401 __ j(above_equal, index_out_of_range_); |
11402 | 11402 |
11403 // We need special handling for non-flat strings. | 11403 // We need special handling for non-flat strings. |
11404 ASSERT(kSeqStringTag == 0); | 11404 STATIC_ASSERT(kSeqStringTag == 0); |
11405 __ testb(result_, Immediate(kStringRepresentationMask)); | 11405 __ testb(result_, Immediate(kStringRepresentationMask)); |
11406 __ j(zero, &flat_string); | 11406 __ j(zero, &flat_string); |
11407 | 11407 |
11408 // Handle non-flat strings. | 11408 // Handle non-flat strings. |
11409 __ testb(result_, Immediate(kIsConsStringMask)); | 11409 __ testb(result_, Immediate(kIsConsStringMask)); |
11410 __ j(zero, &call_runtime_); | 11410 __ j(zero, &call_runtime_); |
11411 | 11411 |
11412 // ConsString. | 11412 // ConsString. |
11413 // Check whether the right hand side is the empty string (i.e. if | 11413 // Check whether the right hand side is the empty string (i.e. if |
11414 // this is really a flat string in a cons string). If that is not | 11414 // this is really a flat string in a cons string). If that is not |
11415 // the case we would rather go to the runtime system now to flatten | 11415 // the case we would rather go to the runtime system now to flatten |
11416 // the string. | 11416 // the string. |
11417 __ CompareRoot(FieldOperand(object_, ConsString::kSecondOffset), | 11417 __ CompareRoot(FieldOperand(object_, ConsString::kSecondOffset), |
11418 Heap::kEmptyStringRootIndex); | 11418 Heap::kEmptyStringRootIndex); |
11419 __ j(not_equal, &call_runtime_); | 11419 __ j(not_equal, &call_runtime_); |
11420 // Get the first of the two strings and load its instance type. | 11420 // Get the first of the two strings and load its instance type. |
11421 __ movq(object_, FieldOperand(object_, ConsString::kFirstOffset)); | 11421 __ movq(object_, FieldOperand(object_, ConsString::kFirstOffset)); |
11422 __ movq(result_, FieldOperand(object_, HeapObject::kMapOffset)); | 11422 __ movq(result_, FieldOperand(object_, HeapObject::kMapOffset)); |
11423 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); | 11423 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); |
11424 // If the first cons component is also non-flat, then go to runtime. | 11424 // If the first cons component is also non-flat, then go to runtime. |
11425 ASSERT(kSeqStringTag == 0); | 11425 STATIC_ASSERT(kSeqStringTag == 0); |
11426 __ testb(result_, Immediate(kStringRepresentationMask)); | 11426 __ testb(result_, Immediate(kStringRepresentationMask)); |
11427 __ j(not_zero, &call_runtime_); | 11427 __ j(not_zero, &call_runtime_); |
11428 | 11428 |
11429 // Check for 1-byte or 2-byte string. | 11429 // Check for 1-byte or 2-byte string. |
11430 __ bind(&flat_string); | 11430 __ bind(&flat_string); |
11431 ASSERT(kAsciiStringTag != 0); | 11431 STATIC_ASSERT(kAsciiStringTag != 0); |
11432 __ testb(result_, Immediate(kStringEncodingMask)); | 11432 __ testb(result_, Immediate(kStringEncodingMask)); |
11433 __ j(not_zero, &ascii_string); | 11433 __ j(not_zero, &ascii_string); |
11434 | 11434 |
11435 // 2-byte string. | 11435 // 2-byte string. |
11436 // Load the 2-byte character code into the result register. | 11436 // Load the 2-byte character code into the result register. |
11437 __ SmiToInteger32(scratch_, scratch_); | 11437 __ SmiToInteger32(scratch_, scratch_); |
11438 __ movzxwl(result_, FieldOperand(object_, | 11438 __ movzxwl(result_, FieldOperand(object_, |
11439 scratch_, times_2, | 11439 scratch_, times_2, |
11440 SeqTwoByteString::kHeaderSize)); | 11440 SeqTwoByteString::kHeaderSize)); |
11441 __ jmp(&got_char_code); | 11441 __ jmp(&got_char_code); |
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
11615 // by the code above. | 11615 // by the code above. |
11616 if (!string_check_) { | 11616 if (!string_check_) { |
11617 __ movq(r8, FieldOperand(rax, HeapObject::kMapOffset)); | 11617 __ movq(r8, FieldOperand(rax, HeapObject::kMapOffset)); |
11618 __ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset)); | 11618 __ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset)); |
11619 } | 11619 } |
11620 // Get the instance types of the two strings as they will be needed soon. | 11620 // Get the instance types of the two strings as they will be needed soon. |
11621 __ movzxbl(r8, FieldOperand(r8, Map::kInstanceTypeOffset)); | 11621 __ movzxbl(r8, FieldOperand(r8, Map::kInstanceTypeOffset)); |
11622 __ movzxbl(r9, FieldOperand(r9, Map::kInstanceTypeOffset)); | 11622 __ movzxbl(r9, FieldOperand(r9, Map::kInstanceTypeOffset)); |
11623 | 11623 |
11624 // Look at the length of the result of adding the two strings. | 11624 // Look at the length of the result of adding the two strings. |
11625 ASSERT(String::kMaxLength <= Smi::kMaxValue / 2); | 11625 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue / 2); |
11626 __ SmiAdd(rbx, rbx, rcx, NULL); | 11626 __ SmiAdd(rbx, rbx, rcx, NULL); |
11627 // Use the runtime system when adding two one character strings, as it | 11627 // Use the runtime system when adding two one character strings, as it |
11628 // contains optimizations for this specific case using the symbol table. | 11628 // contains optimizations for this specific case using the symbol table. |
11629 __ SmiCompare(rbx, Smi::FromInt(2)); | 11629 __ SmiCompare(rbx, Smi::FromInt(2)); |
11630 __ j(not_equal, &longer_than_two); | 11630 __ j(not_equal, &longer_than_two); |
11631 | 11631 |
11632 // Check that both strings are non-external ascii strings. | 11632 // Check that both strings are non-external ascii strings. |
11633 __ JumpIfBothInstanceTypesAreNotSequentialAscii(r8, r9, rbx, rcx, | 11633 __ JumpIfBothInstanceTypesAreNotSequentialAscii(r8, r9, rbx, rcx, |
11634 &string_add_runtime); | 11634 &string_add_runtime); |
11635 | 11635 |
(...skipping 11 matching lines...) Expand all Loading... |
11647 | 11647 |
11648 __ bind(&make_two_character_string); | 11648 __ bind(&make_two_character_string); |
11649 __ Set(rbx, 2); | 11649 __ Set(rbx, 2); |
11650 __ jmp(&make_flat_ascii_string); | 11650 __ jmp(&make_flat_ascii_string); |
11651 | 11651 |
11652 __ bind(&longer_than_two); | 11652 __ bind(&longer_than_two); |
11653 // Check if resulting string will be flat. | 11653 // Check if resulting string will be flat. |
11654 __ SmiCompare(rbx, Smi::FromInt(String::kMinNonFlatLength)); | 11654 __ SmiCompare(rbx, Smi::FromInt(String::kMinNonFlatLength)); |
11655 __ j(below, &string_add_flat_result); | 11655 __ j(below, &string_add_flat_result); |
11656 // Handle exceptionally long strings in the runtime system. | 11656 // Handle exceptionally long strings in the runtime system. |
11657 ASSERT((String::kMaxLength & 0x80000000) == 0); | 11657 STATIC_ASSERT((String::kMaxLength & 0x80000000) == 0); |
11658 __ SmiCompare(rbx, Smi::FromInt(String::kMaxLength)); | 11658 __ SmiCompare(rbx, Smi::FromInt(String::kMaxLength)); |
11659 __ j(above, &string_add_runtime); | 11659 __ j(above, &string_add_runtime); |
11660 | 11660 |
11661 // If result is not supposed to be flat, allocate a cons string object. If | 11661 // If result is not supposed to be flat, allocate a cons string object. If |
11662 // both strings are ascii the result is an ascii cons string. | 11662 // both strings are ascii the result is an ascii cons string. |
11663 // rax: first string | 11663 // rax: first string |
11664 // rbx: length of resulting flat string | 11664 // rbx: length of resulting flat string |
11665 // rdx: second string | 11665 // rdx: second string |
11666 // r8: instance type of first string | 11666 // r8: instance type of first string |
11667 // r9: instance type of second string | 11667 // r9: instance type of second string |
11668 Label non_ascii, allocated, ascii_data; | 11668 Label non_ascii, allocated, ascii_data; |
11669 __ movl(rcx, r8); | 11669 __ movl(rcx, r8); |
11670 __ and_(rcx, r9); | 11670 __ and_(rcx, r9); |
11671 ASSERT(kStringEncodingMask == kAsciiStringTag); | 11671 STATIC_ASSERT(kStringEncodingMask == kAsciiStringTag); |
11672 __ testl(rcx, Immediate(kAsciiStringTag)); | 11672 __ testl(rcx, Immediate(kAsciiStringTag)); |
11673 __ j(zero, &non_ascii); | 11673 __ j(zero, &non_ascii); |
11674 __ bind(&ascii_data); | 11674 __ bind(&ascii_data); |
11675 // Allocate an acsii cons string. | 11675 // Allocate an acsii cons string. |
11676 __ AllocateAsciiConsString(rcx, rdi, no_reg, &string_add_runtime); | 11676 __ AllocateAsciiConsString(rcx, rdi, no_reg, &string_add_runtime); |
11677 __ bind(&allocated); | 11677 __ bind(&allocated); |
11678 // Fill the fields of the cons string. | 11678 // Fill the fields of the cons string. |
11679 __ movq(FieldOperand(rcx, ConsString::kLengthOffset), rbx); | 11679 __ movq(FieldOperand(rcx, ConsString::kLengthOffset), rbx); |
11680 __ movq(FieldOperand(rcx, ConsString::kHashFieldOffset), | 11680 __ movq(FieldOperand(rcx, ConsString::kHashFieldOffset), |
11681 Immediate(String::kEmptyHashField)); | 11681 Immediate(String::kEmptyHashField)); |
11682 __ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax); | 11682 __ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax); |
11683 __ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx); | 11683 __ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx); |
11684 __ movq(rax, rcx); | 11684 __ movq(rax, rcx); |
11685 __ IncrementCounter(&Counters::string_add_native, 1); | 11685 __ IncrementCounter(&Counters::string_add_native, 1); |
11686 __ ret(2 * kPointerSize); | 11686 __ ret(2 * kPointerSize); |
11687 __ bind(&non_ascii); | 11687 __ bind(&non_ascii); |
11688 // At least one of the strings is two-byte. Check whether it happens | 11688 // At least one of the strings is two-byte. Check whether it happens |
11689 // to contain only ascii characters. | 11689 // to contain only ascii characters. |
11690 // rcx: first instance type AND second instance type. | 11690 // rcx: first instance type AND second instance type. |
11691 // r8: first instance type. | 11691 // r8: first instance type. |
11692 // r9: second instance type. | 11692 // r9: second instance type. |
11693 __ testb(rcx, Immediate(kAsciiDataHintMask)); | 11693 __ testb(rcx, Immediate(kAsciiDataHintMask)); |
11694 __ j(not_zero, &ascii_data); | 11694 __ j(not_zero, &ascii_data); |
11695 __ xor_(r8, r9); | 11695 __ xor_(r8, r9); |
11696 ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0); | 11696 STATIC_ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0); |
11697 __ andb(r8, Immediate(kAsciiStringTag | kAsciiDataHintTag)); | 11697 __ andb(r8, Immediate(kAsciiStringTag | kAsciiDataHintTag)); |
11698 __ cmpb(r8, Immediate(kAsciiStringTag | kAsciiDataHintTag)); | 11698 __ cmpb(r8, Immediate(kAsciiStringTag | kAsciiDataHintTag)); |
11699 __ j(equal, &ascii_data); | 11699 __ j(equal, &ascii_data); |
11700 // Allocate a two byte cons string. | 11700 // Allocate a two byte cons string. |
11701 __ AllocateConsString(rcx, rdi, no_reg, &string_add_runtime); | 11701 __ AllocateConsString(rcx, rdi, no_reg, &string_add_runtime); |
11702 __ jmp(&allocated); | 11702 __ jmp(&allocated); |
11703 | 11703 |
11704 // Handle creating a flat result. First check that both strings are not | 11704 // Handle creating a flat result. First check that both strings are not |
11705 // external strings. | 11705 // external strings. |
11706 // rax: first string | 11706 // rax: first string |
(...skipping 11 matching lines...) Expand all Loading... |
11718 __ and_(rcx, Immediate(kStringRepresentationMask)); | 11718 __ and_(rcx, Immediate(kStringRepresentationMask)); |
11719 __ cmpl(rcx, Immediate(kExternalStringTag)); | 11719 __ cmpl(rcx, Immediate(kExternalStringTag)); |
11720 __ j(equal, &string_add_runtime); | 11720 __ j(equal, &string_add_runtime); |
11721 // Now check if both strings are ascii strings. | 11721 // Now check if both strings are ascii strings. |
11722 // rax: first string | 11722 // rax: first string |
11723 // rbx: length of resulting flat string | 11723 // rbx: length of resulting flat string |
11724 // rdx: second string | 11724 // rdx: second string |
11725 // r8: instance type of first string | 11725 // r8: instance type of first string |
11726 // r9: instance type of second string | 11726 // r9: instance type of second string |
11727 Label non_ascii_string_add_flat_result; | 11727 Label non_ascii_string_add_flat_result; |
11728 ASSERT(kStringEncodingMask == kAsciiStringTag); | 11728 STATIC_ASSERT(kStringEncodingMask == kAsciiStringTag); |
11729 __ testl(r8, Immediate(kAsciiStringTag)); | 11729 __ testl(r8, Immediate(kAsciiStringTag)); |
11730 __ j(zero, &non_ascii_string_add_flat_result); | 11730 __ j(zero, &non_ascii_string_add_flat_result); |
11731 __ testl(r9, Immediate(kAsciiStringTag)); | 11731 __ testl(r9, Immediate(kAsciiStringTag)); |
11732 __ j(zero, &string_add_runtime); | 11732 __ j(zero, &string_add_runtime); |
11733 | 11733 |
11734 __ bind(&make_flat_ascii_string); | 11734 __ bind(&make_flat_ascii_string); |
11735 // Both strings are ascii strings. As they are short they are both flat. | 11735 // Both strings are ascii strings. As they are short they are both flat. |
11736 __ AllocateAsciiString(rcx, rbx, rdi, r14, r11, &string_add_runtime); | 11736 __ AllocateAsciiString(rcx, rbx, rdi, r14, r11, &string_add_runtime); |
11737 // rcx: result string | 11737 // rcx: result string |
11738 __ movq(rbx, rcx); | 11738 __ movq(rbx, rcx); |
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
11840 ASSERT(src.is(rsi)); // rep movs source | 11840 ASSERT(src.is(rsi)); // rep movs source |
11841 ASSERT(count.is(rcx)); // rep movs count | 11841 ASSERT(count.is(rcx)); // rep movs count |
11842 | 11842 |
11843 // Nothing to do for zero characters. | 11843 // Nothing to do for zero characters. |
11844 Label done; | 11844 Label done; |
11845 __ testl(count, count); | 11845 __ testl(count, count); |
11846 __ j(zero, &done); | 11846 __ j(zero, &done); |
11847 | 11847 |
11848 // Make count the number of bytes to copy. | 11848 // Make count the number of bytes to copy. |
11849 if (!ascii) { | 11849 if (!ascii) { |
11850 ASSERT_EQ(2, static_cast<int>(sizeof(uc16))); // NOLINT | 11850 STATIC_ASSERT(2 == sizeof(uc16)); |
11851 __ addl(count, count); | 11851 __ addl(count, count); |
11852 } | 11852 } |
11853 | 11853 |
11854 // Don't enter the rep movs if there are less than 4 bytes to copy. | 11854 // Don't enter the rep movs if there are less than 4 bytes to copy. |
11855 Label last_bytes; | 11855 Label last_bytes; |
11856 __ testl(count, Immediate(~7)); | 11856 __ testl(count, Immediate(~7)); |
11857 __ j(zero, &last_bytes); | 11857 __ j(zero, &last_bytes); |
11858 | 11858 |
11859 // Copy from edi to esi using rep movs instruction. | 11859 // Copy from edi to esi using rep movs instruction. |
11860 __ movl(kScratchRegister, count); | 11860 __ movl(kScratchRegister, count); |
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
11947 for (int i = 0; i < kProbes; i++) { | 11947 for (int i = 0; i < kProbes; i++) { |
11948 // Calculate entry in symbol table. | 11948 // Calculate entry in symbol table. |
11949 __ movl(scratch, hash); | 11949 __ movl(scratch, hash); |
11950 if (i > 0) { | 11950 if (i > 0) { |
11951 __ addl(scratch, Immediate(SymbolTable::GetProbeOffset(i))); | 11951 __ addl(scratch, Immediate(SymbolTable::GetProbeOffset(i))); |
11952 } | 11952 } |
11953 __ andl(scratch, mask); | 11953 __ andl(scratch, mask); |
11954 | 11954 |
11955 // Load the entry from the symble table. | 11955 // Load the entry from the symble table. |
11956 Register candidate = scratch; // Scratch register contains candidate. | 11956 Register candidate = scratch; // Scratch register contains candidate. |
11957 ASSERT_EQ(1, SymbolTable::kEntrySize); | 11957 STATIC_ASSERT(SymbolTable::kEntrySize == 1); |
11958 __ movq(candidate, | 11958 __ movq(candidate, |
11959 FieldOperand(symbol_table, | 11959 FieldOperand(symbol_table, |
11960 scratch, | 11960 scratch, |
11961 times_pointer_size, | 11961 times_pointer_size, |
11962 SymbolTable::kElementsStartOffset)); | 11962 SymbolTable::kElementsStartOffset)); |
11963 | 11963 |
11964 // If entry is undefined no string with this hash can be found. | 11964 // If entry is undefined no string with this hash can be found. |
11965 __ cmpq(candidate, undefined); | 11965 __ cmpq(candidate, undefined); |
11966 __ j(equal, not_found); | 11966 __ j(equal, not_found); |
11967 | 11967 |
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
12062 // rsp[16]: from | 12062 // rsp[16]: from |
12063 // rsp[24]: string | 12063 // rsp[24]: string |
12064 | 12064 |
12065 const int kToOffset = 1 * kPointerSize; | 12065 const int kToOffset = 1 * kPointerSize; |
12066 const int kFromOffset = kToOffset + kPointerSize; | 12066 const int kFromOffset = kToOffset + kPointerSize; |
12067 const int kStringOffset = kFromOffset + kPointerSize; | 12067 const int kStringOffset = kFromOffset + kPointerSize; |
12068 const int kArgumentsSize = (kStringOffset + kPointerSize) - kToOffset; | 12068 const int kArgumentsSize = (kStringOffset + kPointerSize) - kToOffset; |
12069 | 12069 |
12070 // Make sure first argument is a string. | 12070 // Make sure first argument is a string. |
12071 __ movq(rax, Operand(rsp, kStringOffset)); | 12071 __ movq(rax, Operand(rsp, kStringOffset)); |
12072 ASSERT_EQ(0, kSmiTag); | 12072 STATIC_ASSERT(kSmiTag == 0); |
12073 __ testl(rax, Immediate(kSmiTagMask)); | 12073 __ testl(rax, Immediate(kSmiTagMask)); |
12074 __ j(zero, &runtime); | 12074 __ j(zero, &runtime); |
12075 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx); | 12075 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx); |
12076 __ j(NegateCondition(is_string), &runtime); | 12076 __ j(NegateCondition(is_string), &runtime); |
12077 | 12077 |
12078 // rax: string | 12078 // rax: string |
12079 // rbx: instance type | 12079 // rbx: instance type |
12080 // Calculate length of sub string using the smi values. | 12080 // Calculate length of sub string using the smi values. |
12081 Label result_longer_than_two; | 12081 Label result_longer_than_two; |
12082 __ movq(rcx, Operand(rsp, kToOffset)); | 12082 __ movq(rcx, Operand(rsp, kToOffset)); |
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
12202 | 12202 |
12203 void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm, | 12203 void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm, |
12204 Register left, | 12204 Register left, |
12205 Register right, | 12205 Register right, |
12206 Register scratch1, | 12206 Register scratch1, |
12207 Register scratch2, | 12207 Register scratch2, |
12208 Register scratch3, | 12208 Register scratch3, |
12209 Register scratch4) { | 12209 Register scratch4) { |
12210 // Ensure that you can always subtract a string length from a non-negative | 12210 // Ensure that you can always subtract a string length from a non-negative |
12211 // number (e.g. another length). | 12211 // number (e.g. another length). |
12212 ASSERT(String::kMaxLength < 0x7fffffff); | 12212 STATIC_ASSERT(String::kMaxLength < 0x7fffffff); |
12213 | 12213 |
12214 // Find minimum length and length difference. | 12214 // Find minimum length and length difference. |
12215 __ movq(scratch1, FieldOperand(left, String::kLengthOffset)); | 12215 __ movq(scratch1, FieldOperand(left, String::kLengthOffset)); |
12216 __ movq(scratch4, scratch1); | 12216 __ movq(scratch4, scratch1); |
12217 __ SmiSub(scratch4, | 12217 __ SmiSub(scratch4, |
12218 scratch4, | 12218 scratch4, |
12219 FieldOperand(right, String::kLengthOffset), | 12219 FieldOperand(right, String::kLengthOffset), |
12220 NULL); | 12220 NULL); |
12221 // Register scratch4 now holds left.length - right.length. | 12221 // Register scratch4 now holds left.length - right.length. |
12222 const Register length_difference = scratch4; | 12222 const Register length_difference = scratch4; |
(...skipping 202 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
12425 #undef __ | 12425 #undef __ |
12426 | 12426 |
12427 void RecordWriteStub::Generate(MacroAssembler* masm) { | 12427 void RecordWriteStub::Generate(MacroAssembler* masm) { |
12428 masm->RecordWriteHelper(object_, addr_, scratch_); | 12428 masm->RecordWriteHelper(object_, addr_, scratch_); |
12429 masm->ret(0); | 12429 masm->ret(0); |
12430 } | 12430 } |
12431 | 12431 |
12432 } } // namespace v8::internal | 12432 } } // namespace v8::internal |
12433 | 12433 |
12434 #endif // V8_TARGET_ARCH_X64 | 12434 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |