| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 77 } | 77 } |
| 78 | 78 |
| 79 // When initially emitting this ensure that a jump is always generated to skip | 79 // When initially emitting this ensure that a jump is always generated to skip |
| 80 // the inlined smi code. | 80 // the inlined smi code. |
| 81 void EmitJumpIfNotSmi(Register reg, Label* target) { | 81 void EmitJumpIfNotSmi(Register reg, Label* target) { |
| 82 ASSERT(!patch_site_.is_bound() && !info_emitted_); | 82 ASSERT(!patch_site_.is_bound() && !info_emitted_); |
| 83 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | 83 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
| 84 __ bind(&patch_site_); | 84 __ bind(&patch_site_); |
| 85 __ andi(at, reg, 0); | 85 __ andi(at, reg, 0); |
| 86 // Always taken before patched. | 86 // Always taken before patched. |
| 87 __ Branch(target, eq, at, Operand(zero_reg)); | 87 __ BranchShort(target, eq, at, Operand(zero_reg)); |
| 88 } | 88 } |
| 89 | 89 |
| 90 // When initially emitting this ensure that a jump is never generated to skip | 90 // When initially emitting this ensure that a jump is never generated to skip |
| 91 // the inlined smi code. | 91 // the inlined smi code. |
| 92 void EmitJumpIfSmi(Register reg, Label* target) { | 92 void EmitJumpIfSmi(Register reg, Label* target) { |
| 93 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | 93 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
| 94 ASSERT(!patch_site_.is_bound() && !info_emitted_); | 94 ASSERT(!patch_site_.is_bound() && !info_emitted_); |
| 95 __ bind(&patch_site_); | 95 __ bind(&patch_site_); |
| 96 __ andi(at, reg, 0); | 96 __ andi(at, reg, 0); |
| 97 // Never taken before patched. | 97 // Never taken before patched. |
| 98 __ Branch(target, ne, at, Operand(zero_reg)); | 98 __ BranchShort(target, ne, at, Operand(zero_reg)); |
| 99 } | 99 } |
| 100 | 100 |
| 101 void EmitPatchInfo() { | 101 void EmitPatchInfo() { |
| 102 if (patch_site_.is_bound()) { | 102 if (patch_site_.is_bound()) { |
| 103 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_); | 103 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_); |
| 104 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask); | 104 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask); |
| 105 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask); | 105 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask); |
| 106 #ifdef DEBUG | 106 #ifdef DEBUG |
| 107 info_emitted_ = true; | 107 info_emitted_ = true; |
| 108 #endif | 108 #endif |
| (...skipping 1043 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1152 __ bind(&use_cache); | 1152 __ bind(&use_cache); |
| 1153 | 1153 |
| 1154 __ EnumLength(a1, v0); | 1154 __ EnumLength(a1, v0); |
| 1155 __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0))); | 1155 __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0))); |
| 1156 | 1156 |
| 1157 __ LoadInstanceDescriptors(v0, a2); | 1157 __ LoadInstanceDescriptors(v0, a2); |
| 1158 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset)); | 1158 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset)); |
| 1159 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset)); | 1159 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset)); |
| 1160 | 1160 |
| 1161 // Set up the four remaining stack slots. | 1161 // Set up the four remaining stack slots. |
| 1162 __ push(v0); // Map. | |
| 1163 __ li(a0, Operand(Smi::FromInt(0))); | 1162 __ li(a0, Operand(Smi::FromInt(0))); |
| 1164 // Push enumeration cache, enumeration cache length (as smi) and zero. | 1163 // Push map, enumeration cache, enumeration cache length (as smi) and zero. |
| 1165 __ Push(a2, a1, a0); | 1164 __ Push(v0, a2, a1, a0); |
| 1166 __ jmp(&loop); | 1165 __ jmp(&loop); |
| 1167 | 1166 |
| 1168 __ bind(&no_descriptors); | 1167 __ bind(&no_descriptors); |
| 1169 __ Drop(1); | 1168 __ Drop(1); |
| 1170 __ jmp(&exit); | 1169 __ jmp(&exit); |
| 1171 | 1170 |
| 1172 // We got a fixed array in register v0. Iterate through that. | 1171 // We got a fixed array in register v0. Iterate through that. |
| 1173 Label non_proxy; | 1172 Label non_proxy; |
| 1174 __ bind(&fixed_array); | 1173 __ bind(&fixed_array); |
| 1175 | 1174 |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1220 __ Branch(&update_each, eq, t0, Operand(a2)); | 1219 __ Branch(&update_each, eq, t0, Operand(a2)); |
| 1221 | 1220 |
| 1222 // For proxies, no filtering is done. | 1221 // For proxies, no filtering is done. |
| 1223 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. | 1222 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. |
| 1224 ASSERT_EQ(Smi::FromInt(0), 0); | 1223 ASSERT_EQ(Smi::FromInt(0), 0); |
| 1225 __ Branch(&update_each, eq, a2, Operand(zero_reg)); | 1224 __ Branch(&update_each, eq, a2, Operand(zero_reg)); |
| 1226 | 1225 |
| 1227 // Convert the entry to a string or (smi) 0 if it isn't a property | 1226 // Convert the entry to a string or (smi) 0 if it isn't a property |
| 1228 // any more. If the property has been removed while iterating, we | 1227 // any more. If the property has been removed while iterating, we |
| 1229 // just skip it. | 1228 // just skip it. |
| 1230 __ push(a1); // Enumerable. | 1229 __ Push(a1, a3); // Enumerable and current entry. |
| 1231 __ push(a3); // Current entry. | |
| 1232 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); | 1230 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); |
| 1233 __ mov(a3, result_register()); | 1231 __ mov(a3, result_register()); |
| 1234 __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg)); | 1232 __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg)); |
| 1235 | 1233 |
| 1236 // Update the 'each' property or variable from the possibly filtered | 1234 // Update the 'each' property or variable from the possibly filtered |
| 1237 // entry in register a3. | 1235 // entry in register a3. |
| 1238 __ bind(&update_each); | 1236 __ bind(&update_each); |
| 1239 __ mov(result_register(), a3); | 1237 __ mov(result_register(), a3); |
| 1240 // Perform the assignment as if via '='. | 1238 // Perform the assignment as if via '='. |
| 1241 { EffectContext context(this); | 1239 { EffectContext context(this); |
| (...skipping 817 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2059 Label l_catch, l_try, l_suspend, l_continuation, l_resume; | 2057 Label l_catch, l_try, l_suspend, l_continuation, l_resume; |
| 2060 Label l_next, l_call, l_loop; | 2058 Label l_next, l_call, l_loop; |
| 2061 // Initial send value is undefined. | 2059 // Initial send value is undefined. |
| 2062 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); | 2060 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); |
| 2063 __ Branch(&l_next); | 2061 __ Branch(&l_next); |
| 2064 | 2062 |
| 2065 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; } | 2063 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; } |
| 2066 __ bind(&l_catch); | 2064 __ bind(&l_catch); |
| 2067 __ mov(a0, v0); | 2065 __ mov(a0, v0); |
| 2068 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos())); | 2066 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos())); |
| 2069 __ LoadRoot(a2, Heap::kthrow_stringRootIndex); // "throw" | 2067 __ LoadRoot(a2, Heap::kthrow_stringRootIndex); // "throw" |
| 2070 __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter | 2068 __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter |
| 2071 __ Push(a3, a0); // iter, exception | 2069 __ Push(a2, a3, a0); // "throw", iter, except |
| 2072 __ jmp(&l_call); | 2070 __ jmp(&l_call); |
| 2073 | 2071 |
| 2074 // try { received = %yield result } | 2072 // try { received = %yield result } |
| 2075 // Shuffle the received result above a try handler and yield it without | 2073 // Shuffle the received result above a try handler and yield it without |
| 2076 // re-boxing. | 2074 // re-boxing. |
| 2077 __ bind(&l_try); | 2075 __ bind(&l_try); |
| 2078 __ pop(a0); // result | 2076 __ pop(a0); // result |
| 2079 __ PushTryHandler(StackHandler::CATCH, expr->index()); | 2077 __ PushTryHandler(StackHandler::CATCH, expr->index()); |
| 2080 const int handler_size = StackHandlerConstants::kSize; | 2078 const int handler_size = StackHandlerConstants::kSize; |
| 2081 __ push(a0); // result | 2079 __ push(a0); // result |
| 2082 __ jmp(&l_suspend); | 2080 __ jmp(&l_suspend); |
| 2083 __ bind(&l_continuation); | 2081 __ bind(&l_continuation); |
| 2084 __ mov(a0, v0); | 2082 __ mov(a0, v0); |
| 2085 __ jmp(&l_resume); | 2083 __ jmp(&l_resume); |
| 2086 __ bind(&l_suspend); | 2084 __ bind(&l_suspend); |
| 2087 const int generator_object_depth = kPointerSize + handler_size; | 2085 const int generator_object_depth = kPointerSize + handler_size; |
| 2088 __ lw(a0, MemOperand(sp, generator_object_depth)); | 2086 __ lw(a0, MemOperand(sp, generator_object_depth)); |
| 2089 __ push(a0); // g | 2087 __ push(a0); // g |
| 2090 ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos())); | 2088 ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos())); |
| 2091 __ li(a1, Operand(Smi::FromInt(l_continuation.pos()))); | 2089 __ li(a1, Operand(Smi::FromInt(l_continuation.pos()))); |
| 2092 __ sw(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset)); | 2090 __ sw(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset)); |
| 2093 __ sw(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset)); | 2091 __ sw(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset)); |
| 2094 __ mov(a1, cp); | 2092 __ mov(a1, cp); |
| 2095 __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2, | 2093 __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2, |
| 2096 kRAHasBeenSaved, kDontSaveFPRegs); | 2094 kRAHasBeenSaved, kDontSaveFPRegs); |
| 2097 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); | 2095 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); |
| 2098 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2096 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2099 __ pop(v0); // result | 2097 __ pop(v0); // result |
| 2100 EmitReturnSequence(); | 2098 EmitReturnSequence(); |
| 2101 __ mov(a0, v0); | 2099 __ mov(a0, v0); |
| 2102 __ bind(&l_resume); // received in a0 | 2100 __ bind(&l_resume); // received in a0 |
| 2103 __ PopTryHandler(); | 2101 __ PopTryHandler(); |
| 2104 | 2102 |
| 2105 // receiver = iter; f = 'next'; arg = received; | 2103 // receiver = iter; f = 'next'; arg = received; |
| 2106 __ bind(&l_next); | 2104 __ bind(&l_next); |
| 2107 __ LoadRoot(a2, Heap::knext_stringRootIndex); // "next" | 2105 __ LoadRoot(a2, Heap::knext_stringRootIndex); // "next" |
| 2108 __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter | 2106 __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter |
| 2109 __ Push(a3, a0); // iter, received | 2107 __ Push(a2, a3, a0); // "next", iter, received |
| 2110 | 2108 |
| 2111 // result = receiver[f](arg); | 2109 // result = receiver[f](arg); |
| 2112 __ bind(&l_call); | 2110 __ bind(&l_call); |
| 2113 Handle<Code> ic = isolate()->stub_cache()->ComputeKeyedCallInitialize(1); | 2111 __ lw(a1, MemOperand(sp, kPointerSize)); |
| 2114 CallIC(ic); | 2112 __ lw(a0, MemOperand(sp, 2 * kPointerSize)); |
| 2113 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); |
| 2114 CallIC(ic, NOT_CONTEXTUAL, TypeFeedbackId::None()); |
| 2115 __ mov(a0, v0); |
| 2116 __ mov(a1, a0); |
| 2117 __ sw(a1, MemOperand(sp, 2 * kPointerSize)); |
| 2118 CallFunctionStub stub(1, CALL_AS_METHOD); |
| 2119 __ CallStub(&stub); |
| 2120 |
| 2115 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2121 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2122 __ Drop(1); // The function is still on the stack; drop it. |
| 2116 | 2123 |
| 2117 // if (!result.done) goto l_try; | 2124 // if (!result.done) goto l_try; |
| 2118 __ bind(&l_loop); | 2125 __ bind(&l_loop); |
| 2119 __ mov(a0, v0); | 2126 __ mov(a0, v0); |
| 2120 __ push(a0); // save result | 2127 __ push(a0); // save result |
| 2121 __ LoadRoot(a2, Heap::kdone_stringRootIndex); // "done" | 2128 __ LoadRoot(a2, Heap::kdone_stringRootIndex); // "done" |
| 2122 CallLoadIC(NOT_CONTEXTUAL); // result.done in v0 | 2129 CallLoadIC(NOT_CONTEXTUAL); // result.done in v0 |
| 2123 __ mov(a0, v0); | 2130 __ mov(a0, v0); |
| 2124 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate()); | 2131 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate()); |
| 2125 CallIC(bool_ic); | 2132 CallIC(bool_ic); |
| (...skipping 207 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2333 CallIC(stub.GetCode(isolate()), NOT_CONTEXTUAL, | 2340 CallIC(stub.GetCode(isolate()), NOT_CONTEXTUAL, |
| 2334 expr->BinaryOperationFeedbackId()); | 2341 expr->BinaryOperationFeedbackId()); |
| 2335 patch_site.EmitPatchInfo(); | 2342 patch_site.EmitPatchInfo(); |
| 2336 __ jmp(&done); | 2343 __ jmp(&done); |
| 2337 | 2344 |
| 2338 __ bind(&smi_case); | 2345 __ bind(&smi_case); |
| 2339 // Smi case. This code works the same way as the smi-smi case in the type | 2346 // Smi case. This code works the same way as the smi-smi case in the type |
| 2340 // recording binary operation stub, see | 2347 // recording binary operation stub, see |
| 2341 switch (op) { | 2348 switch (op) { |
| 2342 case Token::SAR: | 2349 case Token::SAR: |
| 2343 __ Branch(&stub_call); | |
| 2344 __ GetLeastBitsFromSmi(scratch1, right, 5); | 2350 __ GetLeastBitsFromSmi(scratch1, right, 5); |
| 2345 __ srav(right, left, scratch1); | 2351 __ srav(right, left, scratch1); |
| 2346 __ And(v0, right, Operand(~kSmiTagMask)); | 2352 __ And(v0, right, Operand(~kSmiTagMask)); |
| 2347 break; | 2353 break; |
| 2348 case Token::SHL: { | 2354 case Token::SHL: { |
| 2349 __ Branch(&stub_call); | |
| 2350 __ SmiUntag(scratch1, left); | 2355 __ SmiUntag(scratch1, left); |
| 2351 __ GetLeastBitsFromSmi(scratch2, right, 5); | 2356 __ GetLeastBitsFromSmi(scratch2, right, 5); |
| 2352 __ sllv(scratch1, scratch1, scratch2); | 2357 __ sllv(scratch1, scratch1, scratch2); |
| 2353 __ Addu(scratch2, scratch1, Operand(0x40000000)); | 2358 __ Addu(scratch2, scratch1, Operand(0x40000000)); |
| 2354 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg)); | 2359 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg)); |
| 2355 __ SmiTag(v0, scratch1); | 2360 __ SmiTag(v0, scratch1); |
| 2356 break; | 2361 break; |
| 2357 } | 2362 } |
| 2358 case Token::SHR: { | 2363 case Token::SHR: { |
| 2359 __ Branch(&stub_call); | |
| 2360 __ SmiUntag(scratch1, left); | 2364 __ SmiUntag(scratch1, left); |
| 2361 __ GetLeastBitsFromSmi(scratch2, right, 5); | 2365 __ GetLeastBitsFromSmi(scratch2, right, 5); |
| 2362 __ srlv(scratch1, scratch1, scratch2); | 2366 __ srlv(scratch1, scratch1, scratch2); |
| 2363 __ And(scratch2, scratch1, 0xc0000000); | 2367 __ And(scratch2, scratch1, 0xc0000000); |
| 2364 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg)); | 2368 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg)); |
| 2365 __ SmiTag(v0, scratch1); | 2369 __ SmiTag(v0, scratch1); |
| 2366 break; | 2370 break; |
| 2367 } | 2371 } |
| 2368 case Token::ADD: | 2372 case Token::ADD: |
| 2369 __ AdduAndCheckForOverflow(v0, left, right, scratch1); | 2373 __ AdduAndCheckForOverflow(v0, left, right, scratch1); |
| (...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2490 __ Branch(&skip, ne, a1, Operand(t0)); | 2494 __ Branch(&skip, ne, a1, Operand(t0)); |
| 2491 __ sw(result_register(), StackOperand(var)); | 2495 __ sw(result_register(), StackOperand(var)); |
| 2492 __ bind(&skip); | 2496 __ bind(&skip); |
| 2493 } else { | 2497 } else { |
| 2494 ASSERT(var->IsContextSlot() || var->IsLookupSlot()); | 2498 ASSERT(var->IsContextSlot() || var->IsLookupSlot()); |
| 2495 // Like var declarations, const declarations are hoisted to function | 2499 // Like var declarations, const declarations are hoisted to function |
| 2496 // scope. However, unlike var initializers, const initializers are | 2500 // scope. However, unlike var initializers, const initializers are |
| 2497 // able to drill a hole to that function context, even from inside a | 2501 // able to drill a hole to that function context, even from inside a |
| 2498 // 'with' context. We thus bypass the normal static scope lookup for | 2502 // 'with' context. We thus bypass the normal static scope lookup for |
| 2499 // var->IsContextSlot(). | 2503 // var->IsContextSlot(). |
| 2500 __ push(v0); | |
| 2501 __ li(a0, Operand(var->name())); | 2504 __ li(a0, Operand(var->name())); |
| 2502 __ Push(cp, a0); // Context and name. | 2505 __ Push(v0, cp, a0); // Context and name. |
| 2503 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); | 2506 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); |
| 2504 } | 2507 } |
| 2505 | 2508 |
| 2506 } else if (var->mode() == LET && op != Token::INIT_LET) { | 2509 } else if (var->mode() == LET && op != Token::INIT_LET) { |
| 2507 // Non-initializing assignment to let variable needs a write barrier. | 2510 // Non-initializing assignment to let variable needs a write barrier. |
| 2508 if (var->IsLookupSlot()) { | 2511 if (var->IsLookupSlot()) { |
| 2509 __ push(v0); // Value. | |
| 2510 __ li(a1, Operand(var->name())); | 2512 __ li(a1, Operand(var->name())); |
| 2511 __ li(a0, Operand(Smi::FromInt(language_mode()))); | 2513 __ li(a0, Operand(Smi::FromInt(language_mode()))); |
| 2512 __ Push(cp, a1, a0); // Context, name, strict mode. | 2514 __ Push(v0, cp, a1, a0); // Value, context, name, strict mode. |
| 2513 __ CallRuntime(Runtime::kStoreContextSlot, 4); | 2515 __ CallRuntime(Runtime::kStoreContextSlot, 4); |
| 2514 } else { | 2516 } else { |
| 2515 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); | 2517 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); |
| 2516 Label assign; | 2518 Label assign; |
| 2517 MemOperand location = VarOperand(var, a1); | 2519 MemOperand location = VarOperand(var, a1); |
| 2518 __ lw(a3, location); | 2520 __ lw(a3, location); |
| 2519 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); | 2521 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); |
| 2520 __ Branch(&assign, ne, a3, Operand(t0)); | 2522 __ Branch(&assign, ne, a3, Operand(t0)); |
| 2521 __ li(a3, Operand(var->name())); | 2523 __ li(a3, Operand(var->name())); |
| 2522 __ push(a3); | 2524 __ push(a3); |
| (...skipping 24 matching lines...) Expand all Loading... |
| 2547 // Perform the assignment. | 2549 // Perform the assignment. |
| 2548 __ sw(v0, location); | 2550 __ sw(v0, location); |
| 2549 if (var->IsContextSlot()) { | 2551 if (var->IsContextSlot()) { |
| 2550 __ mov(a3, v0); | 2552 __ mov(a3, v0); |
| 2551 int offset = Context::SlotOffset(var->index()); | 2553 int offset = Context::SlotOffset(var->index()); |
| 2552 __ RecordWriteContextSlot( | 2554 __ RecordWriteContextSlot( |
| 2553 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs); | 2555 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs); |
| 2554 } | 2556 } |
| 2555 } else { | 2557 } else { |
| 2556 ASSERT(var->IsLookupSlot()); | 2558 ASSERT(var->IsLookupSlot()); |
| 2557 __ push(v0); // Value. | |
| 2558 __ li(a1, Operand(var->name())); | 2559 __ li(a1, Operand(var->name())); |
| 2559 __ li(a0, Operand(Smi::FromInt(language_mode()))); | 2560 __ li(a0, Operand(Smi::FromInt(language_mode()))); |
| 2560 __ Push(cp, a1, a0); // Context, name, strict mode. | 2561 __ Push(v0, cp, a1, a0); // Value, context, name, strict mode. |
| 2561 __ CallRuntime(Runtime::kStoreContextSlot, 4); | 2562 __ CallRuntime(Runtime::kStoreContextSlot, 4); |
| 2562 } | 2563 } |
| 2563 } | 2564 } |
| 2564 // Non-initializing assignments to consts are ignored. | 2565 // Non-initializing assignments to consts are ignored. |
| 2565 } | 2566 } |
| 2566 | 2567 |
| 2567 | 2568 |
| 2568 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { | 2569 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { |
| 2569 // Assignment to a property, using a named store IC. | 2570 // Assignment to a property, using a named store IC. |
| 2570 Property* prop = expr->target()->AsProperty(); | 2571 Property* prop = expr->target()->AsProperty(); |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2628 | 2629 |
| 2629 void FullCodeGenerator::CallIC(Handle<Code> code, | 2630 void FullCodeGenerator::CallIC(Handle<Code> code, |
| 2630 ContextualMode mode, | 2631 ContextualMode mode, |
| 2631 TypeFeedbackId id) { | 2632 TypeFeedbackId id) { |
| 2632 ic_total_count_++; | 2633 ic_total_count_++; |
| 2633 ASSERT(mode != CONTEXTUAL || id.IsNone()); | 2634 ASSERT(mode != CONTEXTUAL || id.IsNone()); |
| 2634 __ Call(code, RelocInfo::CODE_TARGET, id); | 2635 __ Call(code, RelocInfo::CODE_TARGET, id); |
| 2635 } | 2636 } |
| 2636 | 2637 |
| 2637 | 2638 |
| 2638 void FullCodeGenerator::EmitCallWithIC(Call* expr, | 2639 // Code common for calls using the IC. |
| 2639 Handle<Object> name, | 2640 void FullCodeGenerator::EmitCallWithIC(Call* expr) { |
| 2640 ContextualMode mode) { | 2641 Expression* callee = expr->expression(); |
| 2641 // Code common for calls using the IC. | |
| 2642 ZoneList<Expression*>* args = expr->arguments(); | 2642 ZoneList<Expression*>* args = expr->arguments(); |
| 2643 int arg_count = args->length(); | 2643 int arg_count = args->length(); |
| 2644 { PreservePositionScope scope(masm()->positions_recorder()); | 2644 |
| 2645 for (int i = 0; i < arg_count; i++) { | 2645 CallFunctionFlags flags; |
| 2646 VisitForStackValue(args->at(i)); | 2646 // Get the target function. |
| 2647 if (callee->IsVariableProxy()) { |
| 2648 { StackValueContext context(this); |
| 2649 EmitVariableLoad(callee->AsVariableProxy()); |
| 2650 PrepareForBailout(callee, NO_REGISTERS); |
| 2647 } | 2651 } |
| 2648 __ li(a2, Operand(name)); | 2652 // Push undefined as receiver. This is patched in the method prologue if it |
| 2653 // is a classic mode method. |
| 2654 __ Push(isolate()->factory()->undefined_value()); |
| 2655 flags = NO_CALL_FUNCTION_FLAGS; |
| 2656 } else { |
| 2657 // Load the function from the receiver. |
| 2658 ASSERT(callee->IsProperty()); |
| 2659 __ lw(v0, MemOperand(sp, 0)); |
| 2660 EmitNamedPropertyLoad(callee->AsProperty()); |
| 2661 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); |
| 2662 // Push the target function under the receiver. |
| 2663 __ lw(at, MemOperand(sp, 0)); |
| 2664 __ push(at); |
| 2665 __ sw(v0, MemOperand(sp, kPointerSize)); |
| 2666 flags = CALL_AS_METHOD; |
| 2649 } | 2667 } |
| 2650 // Record source position for debugger. | |
| 2651 SetSourcePosition(expr->position()); | |
| 2652 // Call the IC initialization code. | |
| 2653 Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(arg_count); | |
| 2654 TypeFeedbackId ast_id = mode == CONTEXTUAL | |
| 2655 ? TypeFeedbackId::None() | |
| 2656 : expr->CallFeedbackId(); | |
| 2657 CallIC(ic, mode, ast_id); | |
| 2658 RecordJSReturnSite(expr); | |
| 2659 // Restore context register. | |
| 2660 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
| 2661 context()->Plug(v0); | |
| 2662 } | |
| 2663 | 2668 |
| 2664 | 2669 // Load the arguments. |
| 2665 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, | |
| 2666 Expression* key) { | |
| 2667 // Load the key. | |
| 2668 VisitForAccumulatorValue(key); | |
| 2669 | |
| 2670 // Swap the name of the function and the receiver on the stack to follow | |
| 2671 // the calling convention for call ICs. | |
| 2672 __ pop(a1); | |
| 2673 __ push(v0); | |
| 2674 __ push(a1); | |
| 2675 | |
| 2676 // Code common for calls using the IC. | |
| 2677 ZoneList<Expression*>* args = expr->arguments(); | |
| 2678 int arg_count = args->length(); | |
| 2679 { PreservePositionScope scope(masm()->positions_recorder()); | 2670 { PreservePositionScope scope(masm()->positions_recorder()); |
| 2680 for (int i = 0; i < arg_count; i++) { | 2671 for (int i = 0; i < arg_count; i++) { |
| 2681 VisitForStackValue(args->at(i)); | 2672 VisitForStackValue(args->at(i)); |
| 2682 } | 2673 } |
| 2683 } | 2674 } |
| 2684 // Record source position for debugger. | 2675 // Record source position for debugger. |
| 2685 SetSourcePosition(expr->position()); | 2676 SetSourcePosition(expr->position()); |
| 2686 // Call the IC initialization code. | 2677 CallFunctionStub stub(arg_count, flags); |
| 2687 Handle<Code> ic = | 2678 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); |
| 2688 isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count); | 2679 __ CallStub(&stub); |
| 2689 __ lw(a2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key. | 2680 |
| 2690 CallIC(ic, NOT_CONTEXTUAL, expr->CallFeedbackId()); | 2681 RecordJSReturnSite(expr); |
| 2682 |
| 2683 // Restore context register. |
| 2684 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2685 |
| 2686 context()->DropAndPlug(1, v0); |
| 2687 } |
| 2688 |
| 2689 |
| 2690 // Code common for calls using the IC. |
| 2691 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, |
| 2692 Expression* key) { |
| 2693 // Load the key. |
| 2694 VisitForAccumulatorValue(key); |
| 2695 |
| 2696 Expression* callee = expr->expression(); |
| 2697 ZoneList<Expression*>* args = expr->arguments(); |
| 2698 int arg_count = args->length(); |
| 2699 |
| 2700 // Load the function from the receiver. |
| 2701 ASSERT(callee->IsProperty()); |
| 2702 __ lw(a1, MemOperand(sp, 0)); |
| 2703 EmitKeyedPropertyLoad(callee->AsProperty()); |
| 2704 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); |
| 2705 |
| 2706 // Push the target function under the receiver. |
| 2707 __ lw(at, MemOperand(sp, 0)); |
| 2708 __ push(at); |
| 2709 __ sw(v0, MemOperand(sp, kPointerSize)); |
| 2710 |
| 2711 { PreservePositionScope scope(masm()->positions_recorder()); |
| 2712 for (int i = 0; i < arg_count; i++) { |
| 2713 VisitForStackValue(args->at(i)); |
| 2714 } |
| 2715 } |
| 2716 |
| 2717 // Record source position for debugger. |
| 2718 SetSourcePosition(expr->position()); |
| 2719 CallFunctionStub stub(arg_count, CALL_AS_METHOD); |
| 2720 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); |
| 2721 __ CallStub(&stub); |
| 2722 |
| 2691 RecordJSReturnSite(expr); | 2723 RecordJSReturnSite(expr); |
| 2692 // Restore context register. | 2724 // Restore context register. |
| 2693 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2725 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2694 context()->DropAndPlug(1, v0); // Drop the key still on the stack. | 2726 |
| 2727 context()->DropAndPlug(1, v0); |
| 2695 } | 2728 } |
| 2696 | 2729 |
| 2697 | 2730 |
| 2698 void FullCodeGenerator::EmitCallWithStub(Call* expr) { | 2731 void FullCodeGenerator::EmitCallWithStub(Call* expr) { |
| 2699 // Code common for calls using the call stub. | 2732 // Code common for calls using the call stub. |
| 2700 ZoneList<Expression*>* args = expr->arguments(); | 2733 ZoneList<Expression*>* args = expr->arguments(); |
| 2701 int arg_count = args->length(); | 2734 int arg_count = args->length(); |
| 2702 { PreservePositionScope scope(masm()->positions_recorder()); | 2735 { PreservePositionScope scope(masm()->positions_recorder()); |
| 2703 for (int i = 0; i < arg_count; i++) { | 2736 for (int i = 0; i < arg_count; i++) { |
| 2704 VisitForStackValue(args->at(i)); | 2737 VisitForStackValue(args->at(i)); |
| (...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2791 // Record source position for debugger. | 2824 // Record source position for debugger. |
| 2792 SetSourcePosition(expr->position()); | 2825 SetSourcePosition(expr->position()); |
| 2793 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS); | 2826 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS); |
| 2794 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); | 2827 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); |
| 2795 __ CallStub(&stub); | 2828 __ CallStub(&stub); |
| 2796 RecordJSReturnSite(expr); | 2829 RecordJSReturnSite(expr); |
| 2797 // Restore context register. | 2830 // Restore context register. |
| 2798 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2831 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2799 context()->DropAndPlug(1, v0); | 2832 context()->DropAndPlug(1, v0); |
| 2800 } else if (call_type == Call::GLOBAL_CALL) { | 2833 } else if (call_type == Call::GLOBAL_CALL) { |
| 2801 // Push global object as receiver for the call IC. | 2834 EmitCallWithIC(expr); |
| 2802 __ lw(a0, GlobalObjectOperand()); | |
| 2803 __ push(a0); | |
| 2804 VariableProxy* proxy = callee->AsVariableProxy(); | |
| 2805 EmitCallWithIC(expr, proxy->name(), CONTEXTUAL); | |
| 2806 } else if (call_type == Call::LOOKUP_SLOT_CALL) { | 2835 } else if (call_type == Call::LOOKUP_SLOT_CALL) { |
| 2807 // Call to a lookup slot (dynamically introduced variable). | 2836 // Call to a lookup slot (dynamically introduced variable). |
| 2808 VariableProxy* proxy = callee->AsVariableProxy(); | 2837 VariableProxy* proxy = callee->AsVariableProxy(); |
| 2809 Label slow, done; | 2838 Label slow, done; |
| 2810 | 2839 |
| 2811 { PreservePositionScope scope(masm()->positions_recorder()); | 2840 { PreservePositionScope scope(masm()->positions_recorder()); |
| 2812 // Generate code for loading from variables potentially shadowed | 2841 // Generate code for loading from variables potentially shadowed |
| 2813 // by eval-introduced variables. | 2842 // by eval-introduced variables. |
| 2814 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done); | 2843 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done); |
| 2815 } | 2844 } |
| (...skipping 25 matching lines...) Expand all Loading... |
| 2841 | 2870 |
| 2842 // The receiver is either the global receiver or an object found | 2871 // The receiver is either the global receiver or an object found |
| 2843 // by LoadContextSlot. | 2872 // by LoadContextSlot. |
| 2844 EmitCallWithStub(expr); | 2873 EmitCallWithStub(expr); |
| 2845 } else if (call_type == Call::PROPERTY_CALL) { | 2874 } else if (call_type == Call::PROPERTY_CALL) { |
| 2846 Property* property = callee->AsProperty(); | 2875 Property* property = callee->AsProperty(); |
| 2847 { PreservePositionScope scope(masm()->positions_recorder()); | 2876 { PreservePositionScope scope(masm()->positions_recorder()); |
| 2848 VisitForStackValue(property->obj()); | 2877 VisitForStackValue(property->obj()); |
| 2849 } | 2878 } |
| 2850 if (property->key()->IsPropertyName()) { | 2879 if (property->key()->IsPropertyName()) { |
| 2851 EmitCallWithIC(expr, | 2880 EmitCallWithIC(expr); |
| 2852 property->key()->AsLiteral()->value(), | |
| 2853 NOT_CONTEXTUAL); | |
| 2854 } else { | 2881 } else { |
| 2855 EmitKeyedCallWithIC(expr, property->key()); | 2882 EmitKeyedCallWithIC(expr, property->key()); |
| 2856 } | 2883 } |
| 2857 } else { | 2884 } else { |
| 2858 ASSERT(call_type == Call::OTHER_CALL); | 2885 ASSERT(call_type == Call::OTHER_CALL); |
| 2859 // Call to an arbitrary expression not handled specially above. | 2886 // Call to an arbitrary expression not handled specially above. |
| 2860 { PreservePositionScope scope(masm()->positions_recorder()); | 2887 { PreservePositionScope scope(masm()->positions_recorder()); |
| 2861 VisitForStackValue(callee); | 2888 VisitForStackValue(callee); |
| 2862 } | 2889 } |
| 2863 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); | 2890 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); |
| (...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3018 Label materialize_true, materialize_false; | 3045 Label materialize_true, materialize_false; |
| 3019 Label* if_true = NULL; | 3046 Label* if_true = NULL; |
| 3020 Label* if_false = NULL; | 3047 Label* if_false = NULL; |
| 3021 Label* fall_through = NULL; | 3048 Label* fall_through = NULL; |
| 3022 context()->PrepareTest(&materialize_true, &materialize_false, | 3049 context()->PrepareTest(&materialize_true, &materialize_false, |
| 3023 &if_true, &if_false, &fall_through); | 3050 &if_true, &if_false, &fall_through); |
| 3024 | 3051 |
| 3025 __ JumpIfSmi(v0, if_false); | 3052 __ JumpIfSmi(v0, if_false); |
| 3026 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); | 3053 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); |
| 3027 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset)); | 3054 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset)); |
| 3055 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
| 3028 __ And(at, a1, Operand(1 << Map::kIsUndetectable)); | 3056 __ And(at, a1, Operand(1 << Map::kIsUndetectable)); |
| 3029 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
| 3030 Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through); | 3057 Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through); |
| 3031 | 3058 |
| 3032 context()->Plug(if_true, if_false); | 3059 context()->Plug(if_true, if_false); |
| 3033 } | 3060 } |
| 3034 | 3061 |
| 3035 | 3062 |
| 3036 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( | 3063 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( |
| 3037 CallRuntime* expr) { | 3064 CallRuntime* expr) { |
| 3038 ZoneList<Expression*>* args = expr->arguments(); | 3065 ZoneList<Expression*>* args = expr->arguments(); |
| 3039 ASSERT(args->length() == 1); | 3066 ASSERT(args->length() == 1); |
| (...skipping 1113 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4153 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { | 4180 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { |
| 4154 Handle<String> name = expr->name(); | 4181 Handle<String> name = expr->name(); |
| 4155 if (name->length() > 0 && name->Get(0) == '_') { | 4182 if (name->length() > 0 && name->Get(0) == '_') { |
| 4156 Comment cmnt(masm_, "[ InlineRuntimeCall"); | 4183 Comment cmnt(masm_, "[ InlineRuntimeCall"); |
| 4157 EmitInlineRuntimeCall(expr); | 4184 EmitInlineRuntimeCall(expr); |
| 4158 return; | 4185 return; |
| 4159 } | 4186 } |
| 4160 | 4187 |
| 4161 Comment cmnt(masm_, "[ CallRuntime"); | 4188 Comment cmnt(masm_, "[ CallRuntime"); |
| 4162 ZoneList<Expression*>* args = expr->arguments(); | 4189 ZoneList<Expression*>* args = expr->arguments(); |
| 4190 int arg_count = args->length(); |
| 4163 | 4191 |
| 4164 if (expr->is_jsruntime()) { | 4192 if (expr->is_jsruntime()) { |
| 4165 // Prepare for calling JS runtime function. | 4193 // Push the builtins object as the receiver. |
| 4166 __ lw(a0, GlobalObjectOperand()); | 4194 __ lw(a0, GlobalObjectOperand()); |
| 4167 __ lw(a0, FieldMemOperand(a0, GlobalObject::kBuiltinsOffset)); | 4195 __ lw(a0, FieldMemOperand(a0, GlobalObject::kBuiltinsOffset)); |
| 4168 __ push(a0); | 4196 __ push(a0); |
| 4169 } | 4197 // Load the function from the receiver. |
| 4198 __ li(a2, Operand(expr->name())); |
| 4199 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId()); |
| 4170 | 4200 |
| 4171 // Push the arguments ("left-to-right"). | 4201 // Push the target function under the receiver. |
| 4172 int arg_count = args->length(); | 4202 __ lw(at, MemOperand(sp, 0)); |
| 4173 for (int i = 0; i < arg_count; i++) { | 4203 __ push(at); |
| 4174 VisitForStackValue(args->at(i)); | 4204 __ sw(v0, MemOperand(sp, kPointerSize)); |
| 4175 } | |
| 4176 | 4205 |
| 4177 if (expr->is_jsruntime()) { | 4206 // Push the arguments ("left-to-right"). |
| 4178 // Call the JS runtime function. | 4207 int arg_count = args->length(); |
| 4179 __ li(a2, Operand(expr->name())); | 4208 for (int i = 0; i < arg_count; i++) { |
| 4180 Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(arg_count); | 4209 VisitForStackValue(args->at(i)); |
| 4181 CallIC(ic, NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId()); | 4210 } |
| 4211 |
| 4212 // Record source position of the IC call. |
| 4213 SetSourcePosition(expr->position()); |
| 4214 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS); |
| 4215 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); |
| 4216 __ CallStub(&stub); |
| 4217 |
| 4182 // Restore context register. | 4218 // Restore context register. |
| 4183 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 4219 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 4220 |
| 4221 context()->DropAndPlug(1, v0); |
| 4184 } else { | 4222 } else { |
| 4223 // Push the arguments ("left-to-right"). |
| 4224 for (int i = 0; i < arg_count; i++) { |
| 4225 VisitForStackValue(args->at(i)); |
| 4226 } |
| 4227 |
| 4185 // Call the C runtime function. | 4228 // Call the C runtime function. |
| 4186 __ CallRuntime(expr->function(), arg_count); | 4229 __ CallRuntime(expr->function(), arg_count); |
| 4230 context()->Plug(v0); |
| 4187 } | 4231 } |
| 4188 context()->Plug(v0); | |
| 4189 } | 4232 } |
| 4190 | 4233 |
| 4191 | 4234 |
| 4192 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { | 4235 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { |
| 4193 switch (expr->op()) { | 4236 switch (expr->op()) { |
| 4194 case Token::DELETE: { | 4237 case Token::DELETE: { |
| 4195 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); | 4238 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); |
| 4196 Property* property = expr->expression()->AsProperty(); | 4239 Property* property = expr->expression()->AsProperty(); |
| 4197 VariableProxy* proxy = expr->expression()->AsVariableProxy(); | 4240 VariableProxy* proxy = expr->expression()->AsVariableProxy(); |
| 4198 | 4241 |
| (...skipping 728 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4927 Assembler::target_address_at(pc_immediate_load_address)) == | 4970 Assembler::target_address_at(pc_immediate_load_address)) == |
| 4928 reinterpret_cast<uint32_t>( | 4971 reinterpret_cast<uint32_t>( |
| 4929 isolate->builtins()->OsrAfterStackCheck()->entry())); | 4972 isolate->builtins()->OsrAfterStackCheck()->entry())); |
| 4930 return OSR_AFTER_STACK_CHECK; | 4973 return OSR_AFTER_STACK_CHECK; |
| 4931 } | 4974 } |
| 4932 | 4975 |
| 4933 | 4976 |
| 4934 } } // namespace v8::internal | 4977 } } // namespace v8::internal |
| 4935 | 4978 |
| 4936 #endif // V8_TARGET_ARCH_MIPS | 4979 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |