OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 768 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
779 | 779 |
780 | 780 |
781 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { | 781 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { |
782 // The variable in the declaration always resides in the current function | 782 // The variable in the declaration always resides in the current function |
783 // context. | 783 // context. |
784 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); | 784 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); |
785 if (generate_debug_code_) { | 785 if (generate_debug_code_) { |
786 // Check that we're not inside a with or catch context. | 786 // Check that we're not inside a with or catch context. |
787 __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset)); | 787 __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset)); |
788 __ LoadRoot(t0, Heap::kWithContextMapRootIndex); | 788 __ LoadRoot(t0, Heap::kWithContextMapRootIndex); |
789 __ Check(ne, "Declaration in with context.", | 789 __ Check(ne, kDeclarationInWithContext, |
790 a1, Operand(t0)); | 790 a1, Operand(t0)); |
791 __ LoadRoot(t0, Heap::kCatchContextMapRootIndex); | 791 __ LoadRoot(t0, Heap::kCatchContextMapRootIndex); |
792 __ Check(ne, "Declaration in catch context.", | 792 __ Check(ne, kDeclarationInCatchContext, |
793 a1, Operand(t0)); | 793 a1, Operand(t0)); |
794 } | 794 } |
795 } | 795 } |
796 | 796 |
797 | 797 |
798 void FullCodeGenerator::VisitVariableDeclaration( | 798 void FullCodeGenerator::VisitVariableDeclaration( |
799 VariableDeclaration* declaration) { | 799 VariableDeclaration* declaration) { |
800 // If it was not possible to allocate the variable at compile time, we | 800 // If it was not possible to allocate the variable at compile time, we |
801 // need to "declare" it at runtime to make sure it actually exists in the | 801 // need to "declare" it at runtime to make sure it actually exists in the |
802 // local context. | 802 // local context. |
(...skipping 1719 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2522 | 2522 |
2523 } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) { | 2523 } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) { |
2524 // Assignment to var or initializing assignment to let/const | 2524 // Assignment to var or initializing assignment to let/const |
2525 // in harmony mode. | 2525 // in harmony mode. |
2526 if (var->IsStackAllocated() || var->IsContextSlot()) { | 2526 if (var->IsStackAllocated() || var->IsContextSlot()) { |
2527 MemOperand location = VarOperand(var, a1); | 2527 MemOperand location = VarOperand(var, a1); |
2528 if (generate_debug_code_ && op == Token::INIT_LET) { | 2528 if (generate_debug_code_ && op == Token::INIT_LET) { |
2529 // Check for an uninitialized let binding. | 2529 // Check for an uninitialized let binding. |
2530 __ lw(a2, location); | 2530 __ lw(a2, location); |
2531 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); | 2531 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); |
2532 __ Check(eq, "Let binding re-initialization.", a2, Operand(t0)); | 2532 __ Check(eq, kLetBindingReInitialization, a2, Operand(t0)); |
2533 } | 2533 } |
2534 // Perform the assignment. | 2534 // Perform the assignment. |
2535 __ sw(v0, location); | 2535 __ sw(v0, location); |
2536 if (var->IsContextSlot()) { | 2536 if (var->IsContextSlot()) { |
2537 __ mov(a3, v0); | 2537 __ mov(a3, v0); |
2538 int offset = Context::SlotOffset(var->index()); | 2538 int offset = Context::SlotOffset(var->index()); |
2539 __ RecordWriteContextSlot( | 2539 __ RecordWriteContextSlot( |
2540 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs); | 2540 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs); |
2541 } | 2541 } |
2542 } else { | 2542 } else { |
(...skipping 942 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3485 __ bind(&done); | 3485 __ bind(&done); |
3486 context()->Plug(v0); | 3486 context()->Plug(v0); |
3487 } | 3487 } |
3488 | 3488 |
3489 | 3489 |
3490 void FullCodeGenerator::EmitSeqStringSetCharCheck(Register string, | 3490 void FullCodeGenerator::EmitSeqStringSetCharCheck(Register string, |
3491 Register index, | 3491 Register index, |
3492 Register value, | 3492 Register value, |
3493 uint32_t encoding_mask) { | 3493 uint32_t encoding_mask) { |
3494 __ And(at, index, Operand(kSmiTagMask)); | 3494 __ And(at, index, Operand(kSmiTagMask)); |
3495 __ Check(eq, "Non-smi index", at, Operand(zero_reg)); | 3495 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg)); |
3496 __ And(at, value, Operand(kSmiTagMask)); | 3496 __ And(at, value, Operand(kSmiTagMask)); |
3497 __ Check(eq, "Non-smi value", at, Operand(zero_reg)); | 3497 __ Check(eq, kNonSmiValue, at, Operand(zero_reg)); |
3498 | 3498 |
3499 __ lw(at, FieldMemOperand(string, String::kLengthOffset)); | 3499 __ lw(at, FieldMemOperand(string, String::kLengthOffset)); |
3500 __ Check(lt, "Index is too large", index, Operand(at)); | 3500 __ Check(lt, kIndexIsTooLarge, index, Operand(at)); |
3501 | 3501 |
3502 __ Check(ge, "Index is negative", index, Operand(zero_reg)); | 3502 __ Check(ge, kIndexIsNegative, index, Operand(zero_reg)); |
3503 | 3503 |
3504 __ lw(at, FieldMemOperand(string, HeapObject::kMapOffset)); | 3504 __ lw(at, FieldMemOperand(string, HeapObject::kMapOffset)); |
3505 __ lbu(at, FieldMemOperand(at, Map::kInstanceTypeOffset)); | 3505 __ lbu(at, FieldMemOperand(at, Map::kInstanceTypeOffset)); |
3506 | 3506 |
3507 __ And(at, at, Operand(kStringRepresentationMask | kStringEncodingMask)); | 3507 __ And(at, at, Operand(kStringRepresentationMask | kStringEncodingMask)); |
3508 __ Subu(at, at, Operand(encoding_mask)); | 3508 __ Subu(at, at, Operand(encoding_mask)); |
3509 __ Check(eq, "Unexpected string type", at, Operand(zero_reg)); | 3509 __ Check(eq, kUnexpectedStringType, at, Operand(zero_reg)); |
3510 } | 3510 } |
3511 | 3511 |
3512 | 3512 |
3513 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) { | 3513 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) { |
3514 ZoneList<Expression*>* args = expr->arguments(); | 3514 ZoneList<Expression*>* args = expr->arguments(); |
3515 ASSERT_EQ(3, args->length()); | 3515 ASSERT_EQ(3, args->length()); |
3516 | 3516 |
3517 Register string = v0; | 3517 Register string = v0; |
3518 Register index = a1; | 3518 Register index = a1; |
3519 Register value = a2; | 3519 Register value = a2; |
(...skipping 354 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3874 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) { | 3874 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) { |
3875 ZoneList<Expression*>* args = expr->arguments(); | 3875 ZoneList<Expression*>* args = expr->arguments(); |
3876 ASSERT_EQ(2, args->length()); | 3876 ASSERT_EQ(2, args->length()); |
3877 | 3877 |
3878 ASSERT_NE(NULL, args->at(0)->AsLiteral()); | 3878 ASSERT_NE(NULL, args->at(0)->AsLiteral()); |
3879 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value(); | 3879 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value(); |
3880 | 3880 |
3881 Handle<FixedArray> jsfunction_result_caches( | 3881 Handle<FixedArray> jsfunction_result_caches( |
3882 isolate()->native_context()->jsfunction_result_caches()); | 3882 isolate()->native_context()->jsfunction_result_caches()); |
3883 if (jsfunction_result_caches->length() <= cache_id) { | 3883 if (jsfunction_result_caches->length() <= cache_id) { |
3884 __ Abort("Attempt to use undefined cache."); | 3884 __ Abort(kAttemptToUseUndefinedCache); |
3885 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); | 3885 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); |
3886 context()->Plug(v0); | 3886 context()->Plug(v0); |
3887 return; | 3887 return; |
3888 } | 3888 } |
3889 | 3889 |
3890 VisitForAccumulatorValue(args->at(1)); | 3890 VisitForAccumulatorValue(args->at(1)); |
3891 | 3891 |
3892 Register key = v0; | 3892 Register key = v0; |
3893 Register cache = a1; | 3893 Register cache = a1; |
3894 __ lw(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); | 3894 __ lw(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); |
(...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4056 __ Addu(elements_end, element, elements_end); | 4056 __ Addu(elements_end, element, elements_end); |
4057 // Loop condition: while (element < elements_end). | 4057 // Loop condition: while (element < elements_end). |
4058 // Live values in registers: | 4058 // Live values in registers: |
4059 // elements: Fixed array of strings. | 4059 // elements: Fixed array of strings. |
4060 // array_length: Length of the fixed array of strings (not smi) | 4060 // array_length: Length of the fixed array of strings (not smi) |
4061 // separator: Separator string | 4061 // separator: Separator string |
4062 // string_length: Accumulated sum of string lengths (smi). | 4062 // string_length: Accumulated sum of string lengths (smi). |
4063 // element: Current array element. | 4063 // element: Current array element. |
4064 // elements_end: Array end. | 4064 // elements_end: Array end. |
4065 if (generate_debug_code_) { | 4065 if (generate_debug_code_) { |
4066 __ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin", | 4066 __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin, |
4067 array_length, Operand(zero_reg)); | 4067 array_length, Operand(zero_reg)); |
4068 } | 4068 } |
4069 __ bind(&loop); | 4069 __ bind(&loop); |
4070 __ lw(string, MemOperand(element)); | 4070 __ lw(string, MemOperand(element)); |
4071 __ Addu(element, element, kPointerSize); | 4071 __ Addu(element, element, kPointerSize); |
4072 __ JumpIfSmi(string, &bailout); | 4072 __ JumpIfSmi(string, &bailout); |
4073 __ lw(scratch1, FieldMemOperand(string, HeapObject::kMapOffset)); | 4073 __ lw(scratch1, FieldMemOperand(string, HeapObject::kMapOffset)); |
4074 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); | 4074 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); |
4075 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout); | 4075 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout); |
4076 __ lw(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset)); | 4076 __ lw(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset)); |
(...skipping 870 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4947 *context_length = 0; | 4947 *context_length = 0; |
4948 return previous_; | 4948 return previous_; |
4949 } | 4949 } |
4950 | 4950 |
4951 | 4951 |
4952 #undef __ | 4952 #undef __ |
4953 | 4953 |
4954 } } // namespace v8::internal | 4954 } } // namespace v8::internal |
4955 | 4955 |
4956 #endif // V8_TARGET_ARCH_MIPS | 4956 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |