OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 225 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
236 if (!function_in_register_x1) { | 236 if (!function_in_register_x1) { |
237 // Load this again, if it's used by the local context below. | 237 // Load this again, if it's used by the local context below. |
238 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 238 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
239 } else { | 239 } else { |
240 __ Mov(x3, x1); | 240 __ Mov(x3, x1); |
241 } | 241 } |
242 // Receiver is just before the parameters on the caller's stack. | 242 // Receiver is just before the parameters on the caller's stack. |
243 int num_parameters = info->scope()->num_parameters(); | 243 int num_parameters = info->scope()->num_parameters(); |
244 int offset = num_parameters * kPointerSize; | 244 int offset = num_parameters * kPointerSize; |
245 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset + offset); | 245 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset + offset); |
246 __ Mov(x1, Operand(Smi::FromInt(num_parameters))); | 246 __ Mov(x1, Smi::FromInt(num_parameters)); |
247 __ Push(x3, x2, x1); | 247 __ Push(x3, x2, x1); |
248 | 248 |
249 // Arguments to ArgumentsAccessStub: | 249 // Arguments to ArgumentsAccessStub: |
250 // function, receiver address, parameter count. | 250 // function, receiver address, parameter count. |
251 // The stub will rewrite receiver and parameter count if the previous | 251 // The stub will rewrite receiver and parameter count if the previous |
252 // stack frame was an arguments adapter frame. | 252 // stack frame was an arguments adapter frame. |
253 ArgumentsAccessStub::Type type; | 253 ArgumentsAccessStub::Type type; |
254 if (strict_mode() == STRICT) { | 254 if (strict_mode() == STRICT) { |
255 type = ArgumentsAccessStub::NEW_STRICT; | 255 type = ArgumentsAccessStub::NEW_STRICT; |
256 } else if (function()->has_duplicate_parameters()) { | 256 } else if (function()->has_duplicate_parameters()) { |
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
315 EmitReturnSequence(); | 315 EmitReturnSequence(); |
316 | 316 |
317 // Force emission of the pools, so they don't get emitted in the middle | 317 // Force emission of the pools, so they don't get emitted in the middle |
318 // of the back edge table. | 318 // of the back edge table. |
319 masm()->CheckVeneerPool(true, false); | 319 masm()->CheckVeneerPool(true, false); |
320 masm()->CheckConstPool(true, false); | 320 masm()->CheckConstPool(true, false); |
321 } | 321 } |
322 | 322 |
323 | 323 |
324 void FullCodeGenerator::ClearAccumulator() { | 324 void FullCodeGenerator::ClearAccumulator() { |
325 __ Mov(x0, Operand(Smi::FromInt(0))); | 325 __ Mov(x0, Smi::FromInt(0)); |
326 } | 326 } |
327 | 327 |
328 | 328 |
329 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { | 329 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { |
330 __ Mov(x2, Operand(profiling_counter_)); | 330 __ Mov(x2, Operand(profiling_counter_)); |
331 __ Ldr(x3, FieldMemOperand(x2, Cell::kValueOffset)); | 331 __ Ldr(x3, FieldMemOperand(x2, Cell::kValueOffset)); |
332 __ Subs(x3, x3, Operand(Smi::FromInt(delta))); | 332 __ Subs(x3, x3, Smi::FromInt(delta)); |
333 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset)); | 333 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset)); |
334 } | 334 } |
335 | 335 |
336 | 336 |
337 void FullCodeGenerator::EmitProfilingCounterReset() { | 337 void FullCodeGenerator::EmitProfilingCounterReset() { |
338 int reset_value = FLAG_interrupt_budget; | 338 int reset_value = FLAG_interrupt_budget; |
339 if (isolate()->IsDebuggerActive()) { | 339 if (isolate()->IsDebuggerActive()) { |
340 // Detect debug break requests as soon as possible. | 340 // Detect debug break requests as soon as possible. |
341 reset_value = FLAG_interrupt_budget >> 4; | 341 reset_value = FLAG_interrupt_budget >> 4; |
342 } | 342 } |
343 __ Mov(x2, Operand(profiling_counter_)); | 343 __ Mov(x2, Operand(profiling_counter_)); |
344 __ Mov(x3, Operand(Smi::FromInt(reset_value))); | 344 __ Mov(x3, Smi::FromInt(reset_value)); |
345 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset)); | 345 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset)); |
346 } | 346 } |
347 | 347 |
348 | 348 |
349 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, | 349 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, |
350 Label* back_edge_target) { | 350 Label* back_edge_target) { |
351 ASSERT(jssp.Is(__ StackPointer())); | 351 ASSERT(jssp.Is(__ StackPointer())); |
352 Comment cmnt(masm_, "[ Back edge bookkeeping"); | 352 Comment cmnt(masm_, "[ Back edge bookkeeping"); |
353 // Block literal pools whilst emitting back edge code. | 353 // Block literal pools whilst emitting back edge code. |
354 Assembler::BlockPoolsScope block_const_pool(masm_); | 354 Assembler::BlockPoolsScope block_const_pool(masm_); |
(...skipping 469 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
824 } | 824 } |
825 break; | 825 break; |
826 | 826 |
827 case Variable::LOOKUP: { | 827 case Variable::LOOKUP: { |
828 Comment cmnt(masm_, "[ VariableDeclaration"); | 828 Comment cmnt(masm_, "[ VariableDeclaration"); |
829 __ Mov(x2, Operand(variable->name())); | 829 __ Mov(x2, Operand(variable->name())); |
830 // Declaration nodes are always introduced in one of four modes. | 830 // Declaration nodes are always introduced in one of four modes. |
831 ASSERT(IsDeclaredVariableMode(mode)); | 831 ASSERT(IsDeclaredVariableMode(mode)); |
832 PropertyAttributes attr = IsImmutableVariableMode(mode) ? READ_ONLY | 832 PropertyAttributes attr = IsImmutableVariableMode(mode) ? READ_ONLY |
833 : NONE; | 833 : NONE; |
834 __ Mov(x1, Operand(Smi::FromInt(attr))); | 834 __ Mov(x1, Smi::FromInt(attr)); |
835 // Push initial value, if any. | 835 // Push initial value, if any. |
836 // Note: For variables we must not push an initial value (such as | 836 // Note: For variables we must not push an initial value (such as |
837 // 'undefined') because we may have a (legal) redeclaration and we | 837 // 'undefined') because we may have a (legal) redeclaration and we |
838 // must not destroy the current value. | 838 // must not destroy the current value. |
839 if (hole_init) { | 839 if (hole_init) { |
840 __ LoadRoot(x0, Heap::kTheHoleValueRootIndex); | 840 __ LoadRoot(x0, Heap::kTheHoleValueRootIndex); |
841 __ Push(cp, x2, x1, x0); | 841 __ Push(cp, x2, x1, x0); |
842 } else { | 842 } else { |
843 // Pushing 0 (xzr) indicates no initial value. | 843 // Pushing 0 (xzr) indicates no initial value. |
844 __ Push(cp, x2, x1, xzr); | 844 __ Push(cp, x2, x1, xzr); |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
888 kDontSaveFPRegs, | 888 kDontSaveFPRegs, |
889 EMIT_REMEMBERED_SET, | 889 EMIT_REMEMBERED_SET, |
890 OMIT_SMI_CHECK); | 890 OMIT_SMI_CHECK); |
891 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); | 891 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); |
892 break; | 892 break; |
893 } | 893 } |
894 | 894 |
895 case Variable::LOOKUP: { | 895 case Variable::LOOKUP: { |
896 Comment cmnt(masm_, "[ Function Declaration"); | 896 Comment cmnt(masm_, "[ Function Declaration"); |
897 __ Mov(x2, Operand(variable->name())); | 897 __ Mov(x2, Operand(variable->name())); |
898 __ Mov(x1, Operand(Smi::FromInt(NONE))); | 898 __ Mov(x1, Smi::FromInt(NONE)); |
899 __ Push(cp, x2, x1); | 899 __ Push(cp, x2, x1); |
900 // Push initial value for function declaration. | 900 // Push initial value for function declaration. |
901 VisitForStackValue(declaration->fun()); | 901 VisitForStackValue(declaration->fun()); |
902 __ CallRuntime(Runtime::kDeclareContextSlot, 4); | 902 __ CallRuntime(Runtime::kDeclareContextSlot, 4); |
903 break; | 903 break; |
904 } | 904 } |
905 } | 905 } |
906 } | 906 } |
907 | 907 |
908 | 908 |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
964 // TODO(rossberg) | 964 // TODO(rossberg) |
965 } | 965 } |
966 | 966 |
967 | 967 |
968 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { | 968 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { |
969 // Call the runtime to declare the globals. | 969 // Call the runtime to declare the globals. |
970 __ Mov(x11, Operand(pairs)); | 970 __ Mov(x11, Operand(pairs)); |
971 Register flags = xzr; | 971 Register flags = xzr; |
972 if (Smi::FromInt(DeclareGlobalsFlags())) { | 972 if (Smi::FromInt(DeclareGlobalsFlags())) { |
973 flags = x10; | 973 flags = x10; |
974 __ Mov(flags, Operand(Smi::FromInt(DeclareGlobalsFlags()))); | 974 __ Mov(flags, Smi::FromInt(DeclareGlobalsFlags())); |
975 } | 975 } |
976 __ Push(cp, x11, flags); | 976 __ Push(cp, x11, flags); |
977 __ CallRuntime(Runtime::kDeclareGlobals, 3); | 977 __ CallRuntime(Runtime::kDeclareGlobals, 3); |
978 // Return value is ignored. | 978 // Return value is ignored. |
979 } | 979 } |
980 | 980 |
981 | 981 |
982 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) { | 982 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) { |
983 // Call the runtime to declare the modules. | 983 // Call the runtime to declare the modules. |
984 __ Push(descriptions); | 984 __ Push(descriptions); |
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1143 __ EnumLengthUntagged(x1, x0); | 1143 __ EnumLengthUntagged(x1, x0); |
1144 __ Cbz(x1, &no_descriptors); | 1144 __ Cbz(x1, &no_descriptors); |
1145 | 1145 |
1146 __ LoadInstanceDescriptors(x0, x2); | 1146 __ LoadInstanceDescriptors(x0, x2); |
1147 __ Ldr(x2, FieldMemOperand(x2, DescriptorArray::kEnumCacheOffset)); | 1147 __ Ldr(x2, FieldMemOperand(x2, DescriptorArray::kEnumCacheOffset)); |
1148 __ Ldr(x2, | 1148 __ Ldr(x2, |
1149 FieldMemOperand(x2, DescriptorArray::kEnumCacheBridgeCacheOffset)); | 1149 FieldMemOperand(x2, DescriptorArray::kEnumCacheBridgeCacheOffset)); |
1150 | 1150 |
1151 // Set up the four remaining stack slots. | 1151 // Set up the four remaining stack slots. |
1152 __ Push(x0); // Map. | 1152 __ Push(x0); // Map. |
1153 __ Mov(x0, Operand(Smi::FromInt(0))); | 1153 __ Mov(x0, Smi::FromInt(0)); |
1154 // Push enumeration cache, enumeration cache length (as smi) and zero. | 1154 // Push enumeration cache, enumeration cache length (as smi) and zero. |
1155 __ SmiTag(x1); | 1155 __ SmiTag(x1); |
1156 __ Push(x2, x1, x0); | 1156 __ Push(x2, x1, x0); |
1157 __ B(&loop); | 1157 __ B(&loop); |
1158 | 1158 |
1159 __ Bind(&no_descriptors); | 1159 __ Bind(&no_descriptors); |
1160 __ Drop(1); | 1160 __ Drop(1); |
1161 __ B(&exit); | 1161 __ B(&exit); |
1162 | 1162 |
1163 // We got a fixed array in register x0. Iterate through that. | 1163 // We got a fixed array in register x0. Iterate through that. |
1164 __ Bind(&fixed_array); | 1164 __ Bind(&fixed_array); |
1165 | 1165 |
1166 Handle<Object> feedback = Handle<Object>( | 1166 Handle<Object> feedback = Handle<Object>( |
1167 Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker), | 1167 Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker), |
1168 isolate()); | 1168 isolate()); |
1169 StoreFeedbackVectorSlot(slot, feedback); | 1169 StoreFeedbackVectorSlot(slot, feedback); |
1170 __ LoadObject(x1, FeedbackVector()); | 1170 __ LoadObject(x1, FeedbackVector()); |
1171 __ Mov(x10, Operand(Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker))); | 1171 __ Mov(x10, Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker)); |
1172 __ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(slot))); | 1172 __ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(slot))); |
1173 | 1173 |
1174 __ Mov(x1, Operand(Smi::FromInt(1))); // Smi indicates slow check. | 1174 __ Mov(x1, Smi::FromInt(1)); // Smi indicates slow check. |
1175 __ Peek(x10, 0); // Get enumerated object. | 1175 __ Peek(x10, 0); // Get enumerated object. |
1176 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); | 1176 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); |
1177 // TODO(all): similar check was done already. Can we avoid it here? | 1177 // TODO(all): similar check was done already. Can we avoid it here? |
1178 __ CompareObjectType(x10, x11, x12, LAST_JS_PROXY_TYPE); | 1178 __ CompareObjectType(x10, x11, x12, LAST_JS_PROXY_TYPE); |
1179 ASSERT(Smi::FromInt(0) == 0); | 1179 ASSERT(Smi::FromInt(0) == 0); |
1180 __ CzeroX(x1, le); // Zero indicates proxy. | 1180 __ CzeroX(x1, le); // Zero indicates proxy. |
1181 __ Push(x1, x0); // Smi and array | 1181 __ Push(x1, x0); // Smi and array |
1182 __ Ldr(x1, FieldMemOperand(x0, FixedArray::kLengthOffset)); | 1182 __ Ldr(x1, FieldMemOperand(x0, FixedArray::kLengthOffset)); |
1183 __ Push(x1, xzr); // Fixed array length (as smi) and initial index. | 1183 __ Push(x1, xzr); // Fixed array length (as smi) and initial index. |
1184 | 1184 |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1230 } | 1230 } |
1231 | 1231 |
1232 // Generate code for the body of the loop. | 1232 // Generate code for the body of the loop. |
1233 Visit(stmt->body()); | 1233 Visit(stmt->body()); |
1234 | 1234 |
1235 // Generate code for going to the next element by incrementing | 1235 // Generate code for going to the next element by incrementing |
1236 // the index (smi) stored on top of the stack. | 1236 // the index (smi) stored on top of the stack. |
1237 __ Bind(loop_statement.continue_label()); | 1237 __ Bind(loop_statement.continue_label()); |
1238 // TODO(all): We could use a callee saved register to avoid popping. | 1238 // TODO(all): We could use a callee saved register to avoid popping. |
1239 __ Pop(x0); | 1239 __ Pop(x0); |
1240 __ Add(x0, x0, Operand(Smi::FromInt(1))); | 1240 __ Add(x0, x0, Smi::FromInt(1)); |
1241 __ Push(x0); | 1241 __ Push(x0); |
1242 | 1242 |
1243 EmitBackEdgeBookkeeping(stmt, &loop); | 1243 EmitBackEdgeBookkeeping(stmt, &loop); |
1244 __ B(&loop); | 1244 __ B(&loop); |
1245 | 1245 |
1246 // Remove the pointers stored on the stack. | 1246 // Remove the pointers stored on the stack. |
1247 __ Bind(loop_statement.break_label()); | 1247 __ Bind(loop_statement.break_label()); |
1248 __ Drop(5); | 1248 __ Drop(5); |
1249 | 1249 |
1250 // Exit and decrement the loop depth. | 1250 // Exit and decrement the loop depth. |
(...skipping 324 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1575 // x0 = RegExp literal clone | 1575 // x0 = RegExp literal clone |
1576 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 1576 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
1577 __ Ldr(x4, FieldMemOperand(x10, JSFunction::kLiteralsOffset)); | 1577 __ Ldr(x4, FieldMemOperand(x10, JSFunction::kLiteralsOffset)); |
1578 int literal_offset = | 1578 int literal_offset = |
1579 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; | 1579 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; |
1580 __ Ldr(x5, FieldMemOperand(x4, literal_offset)); | 1580 __ Ldr(x5, FieldMemOperand(x4, literal_offset)); |
1581 __ JumpIfNotRoot(x5, Heap::kUndefinedValueRootIndex, &materialized); | 1581 __ JumpIfNotRoot(x5, Heap::kUndefinedValueRootIndex, &materialized); |
1582 | 1582 |
1583 // Create regexp literal using runtime function. | 1583 // Create regexp literal using runtime function. |
1584 // Result will be in x0. | 1584 // Result will be in x0. |
1585 __ Mov(x3, Operand(Smi::FromInt(expr->literal_index()))); | 1585 __ Mov(x3, Smi::FromInt(expr->literal_index())); |
1586 __ Mov(x2, Operand(expr->pattern())); | 1586 __ Mov(x2, Operand(expr->pattern())); |
1587 __ Mov(x1, Operand(expr->flags())); | 1587 __ Mov(x1, Operand(expr->flags())); |
1588 __ Push(x4, x3, x2, x1); | 1588 __ Push(x4, x3, x2, x1); |
1589 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); | 1589 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); |
1590 __ Mov(x5, x0); | 1590 __ Mov(x5, x0); |
1591 | 1591 |
1592 __ Bind(&materialized); | 1592 __ Bind(&materialized); |
1593 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; | 1593 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; |
1594 Label allocated, runtime_allocate; | 1594 Label allocated, runtime_allocate; |
1595 __ Allocate(size, x0, x2, x3, &runtime_allocate, TAG_OBJECT); | 1595 __ Allocate(size, x0, x2, x3, &runtime_allocate, TAG_OBJECT); |
1596 __ B(&allocated); | 1596 __ B(&allocated); |
1597 | 1597 |
1598 __ Bind(&runtime_allocate); | 1598 __ Bind(&runtime_allocate); |
1599 __ Mov(x10, Operand(Smi::FromInt(size))); | 1599 __ Mov(x10, Smi::FromInt(size)); |
1600 __ Push(x5, x10); | 1600 __ Push(x5, x10); |
1601 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); | 1601 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); |
1602 __ Pop(x5); | 1602 __ Pop(x5); |
1603 | 1603 |
1604 __ Bind(&allocated); | 1604 __ Bind(&allocated); |
1605 // After this, registers are used as follows: | 1605 // After this, registers are used as follows: |
1606 // x0: Newly allocated regexp. | 1606 // x0: Newly allocated regexp. |
1607 // x5: Materialized regexp. | 1607 // x5: Materialized regexp. |
1608 // x10, x11, x12: temps. | 1608 // x10, x11, x12: temps. |
1609 __ CopyFields(x0, x5, CPURegList(x10, x11, x12), size / kPointerSize); | 1609 __ CopyFields(x0, x5, CPURegList(x10, x11, x12), size / kPointerSize); |
(...skipping 11 matching lines...) Expand all Loading... |
1621 } | 1621 } |
1622 | 1622 |
1623 | 1623 |
1624 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { | 1624 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { |
1625 Comment cmnt(masm_, "[ ObjectLiteral"); | 1625 Comment cmnt(masm_, "[ ObjectLiteral"); |
1626 | 1626 |
1627 expr->BuildConstantProperties(isolate()); | 1627 expr->BuildConstantProperties(isolate()); |
1628 Handle<FixedArray> constant_properties = expr->constant_properties(); | 1628 Handle<FixedArray> constant_properties = expr->constant_properties(); |
1629 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 1629 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
1630 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset)); | 1630 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset)); |
1631 __ Mov(x2, Operand(Smi::FromInt(expr->literal_index()))); | 1631 __ Mov(x2, Smi::FromInt(expr->literal_index())); |
1632 __ Mov(x1, Operand(constant_properties)); | 1632 __ Mov(x1, Operand(constant_properties)); |
1633 int flags = expr->fast_elements() | 1633 int flags = expr->fast_elements() |
1634 ? ObjectLiteral::kFastElements | 1634 ? ObjectLiteral::kFastElements |
1635 : ObjectLiteral::kNoFlags; | 1635 : ObjectLiteral::kNoFlags; |
1636 flags |= expr->has_function() | 1636 flags |= expr->has_function() |
1637 ? ObjectLiteral::kHasFunction | 1637 ? ObjectLiteral::kHasFunction |
1638 : ObjectLiteral::kNoFlags; | 1638 : ObjectLiteral::kNoFlags; |
1639 __ Mov(x0, Operand(Smi::FromInt(flags))); | 1639 __ Mov(x0, Smi::FromInt(flags)); |
1640 int properties_count = constant_properties->length() / 2; | 1640 int properties_count = constant_properties->length() / 2; |
1641 const int max_cloned_properties = | 1641 const int max_cloned_properties = |
1642 FastCloneShallowObjectStub::kMaximumClonedProperties; | 1642 FastCloneShallowObjectStub::kMaximumClonedProperties; |
1643 if (expr->may_store_doubles() || expr->depth() > 1 || Serializer::enabled() || | 1643 if (expr->may_store_doubles() || expr->depth() > 1 || Serializer::enabled() || |
1644 flags != ObjectLiteral::kFastElements || | 1644 flags != ObjectLiteral::kFastElements || |
1645 properties_count > max_cloned_properties) { | 1645 properties_count > max_cloned_properties) { |
1646 __ Push(x3, x2, x1, x0); | 1646 __ Push(x3, x2, x1, x0); |
1647 __ CallRuntime(Runtime::kCreateObjectLiteral, 4); | 1647 __ CallRuntime(Runtime::kCreateObjectLiteral, 4); |
1648 } else { | 1648 } else { |
1649 FastCloneShallowObjectStub stub(properties_count); | 1649 FastCloneShallowObjectStub stub(properties_count); |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1688 VisitForEffect(value); | 1688 VisitForEffect(value); |
1689 } | 1689 } |
1690 break; | 1690 break; |
1691 } | 1691 } |
1692 if (property->emit_store()) { | 1692 if (property->emit_store()) { |
1693 // Duplicate receiver on stack. | 1693 // Duplicate receiver on stack. |
1694 __ Peek(x0, 0); | 1694 __ Peek(x0, 0); |
1695 __ Push(x0); | 1695 __ Push(x0); |
1696 VisitForStackValue(key); | 1696 VisitForStackValue(key); |
1697 VisitForStackValue(value); | 1697 VisitForStackValue(value); |
1698 __ Mov(x0, Operand(Smi::FromInt(NONE))); // PropertyAttributes | 1698 __ Mov(x0, Smi::FromInt(NONE)); // PropertyAttributes |
1699 __ Push(x0); | 1699 __ Push(x0); |
1700 __ CallRuntime(Runtime::kSetProperty, 4); | 1700 __ CallRuntime(Runtime::kSetProperty, 4); |
1701 } else { | 1701 } else { |
1702 VisitForEffect(key); | 1702 VisitForEffect(key); |
1703 VisitForEffect(value); | 1703 VisitForEffect(value); |
1704 } | 1704 } |
1705 break; | 1705 break; |
1706 case ObjectLiteral::Property::PROTOTYPE: | 1706 case ObjectLiteral::Property::PROTOTYPE: |
1707 if (property->emit_store()) { | 1707 if (property->emit_store()) { |
1708 // Duplicate receiver on stack. | 1708 // Duplicate receiver on stack. |
(...skipping 17 matching lines...) Expand all Loading... |
1726 // Emit code to define accessors, using only a single call to the runtime for | 1726 // Emit code to define accessors, using only a single call to the runtime for |
1727 // each pair of corresponding getters and setters. | 1727 // each pair of corresponding getters and setters. |
1728 for (AccessorTable::Iterator it = accessor_table.begin(); | 1728 for (AccessorTable::Iterator it = accessor_table.begin(); |
1729 it != accessor_table.end(); | 1729 it != accessor_table.end(); |
1730 ++it) { | 1730 ++it) { |
1731 __ Peek(x10, 0); // Duplicate receiver. | 1731 __ Peek(x10, 0); // Duplicate receiver. |
1732 __ Push(x10); | 1732 __ Push(x10); |
1733 VisitForStackValue(it->first); | 1733 VisitForStackValue(it->first); |
1734 EmitAccessor(it->second->getter); | 1734 EmitAccessor(it->second->getter); |
1735 EmitAccessor(it->second->setter); | 1735 EmitAccessor(it->second->setter); |
1736 __ Mov(x10, Operand(Smi::FromInt(NONE))); | 1736 __ Mov(x10, Smi::FromInt(NONE)); |
1737 __ Push(x10); | 1737 __ Push(x10); |
1738 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5); | 1738 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5); |
1739 } | 1739 } |
1740 | 1740 |
1741 if (expr->has_function()) { | 1741 if (expr->has_function()) { |
1742 ASSERT(result_saved); | 1742 ASSERT(result_saved); |
1743 __ Peek(x0, 0); | 1743 __ Peek(x0, 0); |
1744 __ Push(x0); | 1744 __ Push(x0); |
1745 __ CallRuntime(Runtime::kToFastProperties, 1); | 1745 __ CallRuntime(Runtime::kToFastProperties, 1); |
1746 } | 1746 } |
(...skipping 25 matching lines...) Expand all Loading... |
1772 | 1772 |
1773 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE; | 1773 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE; |
1774 if (has_fast_elements && !FLAG_allocation_site_pretenuring) { | 1774 if (has_fast_elements && !FLAG_allocation_site_pretenuring) { |
1775 // If the only customer of allocation sites is transitioning, then | 1775 // If the only customer of allocation sites is transitioning, then |
1776 // we can turn it off if we don't have anywhere else to transition to. | 1776 // we can turn it off if we don't have anywhere else to transition to. |
1777 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; | 1777 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; |
1778 } | 1778 } |
1779 | 1779 |
1780 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 1780 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
1781 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset)); | 1781 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset)); |
1782 // TODO(jbramley): Can these Operand constructors be implicit? | 1782 __ Mov(x2, Smi::FromInt(expr->literal_index())); |
1783 __ Mov(x2, Operand(Smi::FromInt(expr->literal_index()))); | |
1784 __ Mov(x1, Operand(constant_elements)); | 1783 __ Mov(x1, Operand(constant_elements)); |
1785 if (has_fast_elements && constant_elements_values->map() == | 1784 if (has_fast_elements && constant_elements_values->map() == |
1786 isolate()->heap()->fixed_cow_array_map()) { | 1785 isolate()->heap()->fixed_cow_array_map()) { |
1787 FastCloneShallowArrayStub stub( | 1786 FastCloneShallowArrayStub stub( |
1788 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, | 1787 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, |
1789 allocation_site_mode, | 1788 allocation_site_mode, |
1790 length); | 1789 length); |
1791 __ CallStub(&stub); | 1790 __ CallStub(&stub); |
1792 __ IncrementCounter( | 1791 __ IncrementCounter( |
1793 isolate()->counters()->cow_arrays_created_stub(), 1, x10, x11); | 1792 isolate()->counters()->cow_arrays_created_stub(), 1, x10, x11); |
1794 } else if ((expr->depth() > 1) || Serializer::enabled() || | 1793 } else if ((expr->depth() > 1) || Serializer::enabled() || |
1795 length > FastCloneShallowArrayStub::kMaximumClonedLength) { | 1794 length > FastCloneShallowArrayStub::kMaximumClonedLength) { |
1796 __ Mov(x0, Operand(Smi::FromInt(flags))); | 1795 __ Mov(x0, Smi::FromInt(flags)); |
1797 __ Push(x3, x2, x1, x0); | 1796 __ Push(x3, x2, x1, x0); |
1798 __ CallRuntime(Runtime::kCreateArrayLiteral, 4); | 1797 __ CallRuntime(Runtime::kCreateArrayLiteral, 4); |
1799 } else { | 1798 } else { |
1800 ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) || | 1799 ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) || |
1801 FLAG_smi_only_arrays); | 1800 FLAG_smi_only_arrays); |
1802 FastCloneShallowArrayStub::Mode mode = | 1801 FastCloneShallowArrayStub::Mode mode = |
1803 FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS; | 1802 FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS; |
1804 | 1803 |
1805 if (has_fast_elements) { | 1804 if (has_fast_elements) { |
1806 mode = FastCloneShallowArrayStub::CLONE_ELEMENTS; | 1805 mode = FastCloneShallowArrayStub::CLONE_ELEMENTS; |
(...skipping 23 matching lines...) Expand all Loading... |
1830 if (IsFastObjectElementsKind(constant_elements_kind)) { | 1829 if (IsFastObjectElementsKind(constant_elements_kind)) { |
1831 int offset = FixedArray::kHeaderSize + (i * kPointerSize); | 1830 int offset = FixedArray::kHeaderSize + (i * kPointerSize); |
1832 __ Peek(x6, kPointerSize); // Copy of array literal. | 1831 __ Peek(x6, kPointerSize); // Copy of array literal. |
1833 __ Ldr(x1, FieldMemOperand(x6, JSObject::kElementsOffset)); | 1832 __ Ldr(x1, FieldMemOperand(x6, JSObject::kElementsOffset)); |
1834 __ Str(result_register(), FieldMemOperand(x1, offset)); | 1833 __ Str(result_register(), FieldMemOperand(x1, offset)); |
1835 // Update the write barrier for the array store. | 1834 // Update the write barrier for the array store. |
1836 __ RecordWriteField(x1, offset, result_register(), x10, | 1835 __ RecordWriteField(x1, offset, result_register(), x10, |
1837 kLRHasBeenSaved, kDontSaveFPRegs, | 1836 kLRHasBeenSaved, kDontSaveFPRegs, |
1838 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK); | 1837 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK); |
1839 } else { | 1838 } else { |
1840 __ Mov(x3, Operand(Smi::FromInt(i))); | 1839 __ Mov(x3, Smi::FromInt(i)); |
1841 StoreArrayLiteralElementStub stub; | 1840 StoreArrayLiteralElementStub stub; |
1842 __ CallStub(&stub); | 1841 __ CallStub(&stub); |
1843 } | 1842 } |
1844 | 1843 |
1845 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); | 1844 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); |
1846 } | 1845 } |
1847 | 1846 |
1848 if (result_saved) { | 1847 if (result_saved) { |
1849 __ Drop(1); // literal index | 1848 __ Drop(1); // literal index |
1850 context()->PlugTOS(); | 1849 context()->PlugTOS(); |
(...skipping 302 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2153 int offset = Context::SlotOffset(var->index()); | 2152 int offset = Context::SlotOffset(var->index()); |
2154 __ RecordWriteContextSlot( | 2153 __ RecordWriteContextSlot( |
2155 x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs); | 2154 x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs); |
2156 } | 2155 } |
2157 } | 2156 } |
2158 | 2157 |
2159 | 2158 |
2160 void FullCodeGenerator::EmitCallStoreContextSlot( | 2159 void FullCodeGenerator::EmitCallStoreContextSlot( |
2161 Handle<String> name, StrictMode strict_mode) { | 2160 Handle<String> name, StrictMode strict_mode) { |
2162 __ Mov(x11, Operand(name)); | 2161 __ Mov(x11, Operand(name)); |
2163 __ Mov(x10, Operand(Smi::FromInt(strict_mode))); | 2162 __ Mov(x10, Smi::FromInt(strict_mode)); |
2164 // jssp[0] : mode. | 2163 // jssp[0] : mode. |
2165 // jssp[8] : name. | 2164 // jssp[8] : name. |
2166 // jssp[16] : context. | 2165 // jssp[16] : context. |
2167 // jssp[24] : value. | 2166 // jssp[24] : value. |
2168 __ Push(x0, cp, x11, x10); | 2167 __ Push(x0, cp, x11, x10); |
2169 __ CallRuntime(Runtime::kStoreContextSlot, 4); | 2168 __ CallRuntime(Runtime::kStoreContextSlot, 4); |
2170 } | 2169 } |
2171 | 2170 |
2172 | 2171 |
2173 void FullCodeGenerator::EmitVariableAssignment(Variable* var, | 2172 void FullCodeGenerator::EmitVariableAssignment(Variable* var, |
(...skipping 229 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2403 VisitForStackValue(args->at(i)); | 2402 VisitForStackValue(args->at(i)); |
2404 } | 2403 } |
2405 } | 2404 } |
2406 // Record source position for debugger. | 2405 // Record source position for debugger. |
2407 SetSourcePosition(expr->position()); | 2406 SetSourcePosition(expr->position()); |
2408 | 2407 |
2409 Handle<Object> uninitialized = | 2408 Handle<Object> uninitialized = |
2410 TypeFeedbackInfo::UninitializedSentinel(isolate()); | 2409 TypeFeedbackInfo::UninitializedSentinel(isolate()); |
2411 StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized); | 2410 StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized); |
2412 __ LoadObject(x2, FeedbackVector()); | 2411 __ LoadObject(x2, FeedbackVector()); |
2413 __ Mov(x3, Operand(Smi::FromInt(expr->CallFeedbackSlot()))); | 2412 __ Mov(x3, Smi::FromInt(expr->CallFeedbackSlot())); |
2414 | 2413 |
2415 // Record call targets in unoptimized code. | 2414 // Record call targets in unoptimized code. |
2416 CallFunctionStub stub(arg_count, RECORD_CALL_TARGET); | 2415 CallFunctionStub stub(arg_count, RECORD_CALL_TARGET); |
2417 __ Peek(x1, (arg_count + 1) * kXRegSize); | 2416 __ Peek(x1, (arg_count + 1) * kXRegSize); |
2418 __ CallStub(&stub); | 2417 __ CallStub(&stub); |
2419 RecordJSReturnSite(expr); | 2418 RecordJSReturnSite(expr); |
2420 // Restore context register. | 2419 // Restore context register. |
2421 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2420 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
2422 context()->DropAndPlug(1, x0); | 2421 context()->DropAndPlug(1, x0); |
2423 } | 2422 } |
(...skipping 10 matching lines...) Expand all Loading... |
2434 } | 2433 } |
2435 | 2434 |
2436 // Prepare to push the receiver of the enclosing function. | 2435 // Prepare to push the receiver of the enclosing function. |
2437 int receiver_offset = 2 + info_->scope()->num_parameters(); | 2436 int receiver_offset = 2 + info_->scope()->num_parameters(); |
2438 __ Ldr(x11, MemOperand(fp, receiver_offset * kPointerSize)); | 2437 __ Ldr(x11, MemOperand(fp, receiver_offset * kPointerSize)); |
2439 | 2438 |
2440 // Push. | 2439 // Push. |
2441 __ Push(x10, x11); | 2440 __ Push(x10, x11); |
2442 | 2441 |
2443 // Prepare to push the language mode. | 2442 // Prepare to push the language mode. |
2444 __ Mov(x10, Operand(Smi::FromInt(strict_mode()))); | 2443 __ Mov(x10, Smi::FromInt(strict_mode())); |
2445 // Prepare to push the start position of the scope the calls resides in. | 2444 // Prepare to push the start position of the scope the calls resides in. |
2446 __ Mov(x11, Operand(Smi::FromInt(scope()->start_position()))); | 2445 __ Mov(x11, Smi::FromInt(scope()->start_position())); |
2447 | 2446 |
2448 // Push. | 2447 // Push. |
2449 __ Push(x10, x11); | 2448 __ Push(x10, x11); |
2450 | 2449 |
2451 // Do the runtime call. | 2450 // Do the runtime call. |
2452 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5); | 2451 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5); |
2453 } | 2452 } |
2454 | 2453 |
2455 | 2454 |
2456 void FullCodeGenerator::VisitCall(Call* expr) { | 2455 void FullCodeGenerator::VisitCall(Call* expr) { |
(...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2609 TypeFeedbackInfo::UninitializedSentinel(isolate()); | 2608 TypeFeedbackInfo::UninitializedSentinel(isolate()); |
2610 StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized); | 2609 StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized); |
2611 if (FLAG_pretenuring_call_new) { | 2610 if (FLAG_pretenuring_call_new) { |
2612 StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(), | 2611 StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(), |
2613 isolate()->factory()->NewAllocationSite()); | 2612 isolate()->factory()->NewAllocationSite()); |
2614 ASSERT(expr->AllocationSiteFeedbackSlot() == | 2613 ASSERT(expr->AllocationSiteFeedbackSlot() == |
2615 expr->CallNewFeedbackSlot() + 1); | 2614 expr->CallNewFeedbackSlot() + 1); |
2616 } | 2615 } |
2617 | 2616 |
2618 __ LoadObject(x2, FeedbackVector()); | 2617 __ LoadObject(x2, FeedbackVector()); |
2619 __ Mov(x3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot()))); | 2618 __ Mov(x3, Smi::FromInt(expr->CallNewFeedbackSlot())); |
2620 | 2619 |
2621 CallConstructStub stub(RECORD_CALL_TARGET); | 2620 CallConstructStub stub(RECORD_CALL_TARGET); |
2622 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL); | 2621 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL); |
2623 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); | 2622 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); |
2624 context()->Plug(x0); | 2623 context()->Plug(x0); |
2625 } | 2624 } |
2626 | 2625 |
2627 | 2626 |
2628 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { | 2627 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { |
2629 ZoneList<Expression*>* args = expr->arguments(); | 2628 ZoneList<Expression*>* args = expr->arguments(); |
(...skipping 318 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2948 Label* fall_through = NULL; | 2947 Label* fall_through = NULL; |
2949 context()->PrepareTest(&materialize_true, &materialize_false, | 2948 context()->PrepareTest(&materialize_true, &materialize_false, |
2950 &if_true, &if_false, &fall_through); | 2949 &if_true, &if_false, &fall_through); |
2951 | 2950 |
2952 // Get the frame pointer for the calling frame. | 2951 // Get the frame pointer for the calling frame. |
2953 __ Ldr(x2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | 2952 __ Ldr(x2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
2954 | 2953 |
2955 // Skip the arguments adaptor frame if it exists. | 2954 // Skip the arguments adaptor frame if it exists. |
2956 Label check_frame_marker; | 2955 Label check_frame_marker; |
2957 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kContextOffset)); | 2956 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kContextOffset)); |
2958 __ Cmp(x1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 2957 __ Cmp(x1, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
2959 __ B(ne, &check_frame_marker); | 2958 __ B(ne, &check_frame_marker); |
2960 __ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset)); | 2959 __ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset)); |
2961 | 2960 |
2962 // Check the marker in the calling frame. | 2961 // Check the marker in the calling frame. |
2963 __ Bind(&check_frame_marker); | 2962 __ Bind(&check_frame_marker); |
2964 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kMarkerOffset)); | 2963 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kMarkerOffset)); |
2965 __ Cmp(x1, Operand(Smi::FromInt(StackFrame::CONSTRUCT))); | 2964 __ Cmp(x1, Smi::FromInt(StackFrame::CONSTRUCT)); |
2966 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 2965 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
2967 Split(eq, if_true, if_false, fall_through); | 2966 Split(eq, if_true, if_false, fall_through); |
2968 | 2967 |
2969 context()->Plug(if_true, if_false); | 2968 context()->Plug(if_true, if_false); |
2970 } | 2969 } |
2971 | 2970 |
2972 | 2971 |
2973 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) { | 2972 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) { |
2974 ZoneList<Expression*>* args = expr->arguments(); | 2973 ZoneList<Expression*>* args = expr->arguments(); |
2975 ASSERT(args->length() == 2); | 2974 ASSERT(args->length() == 2); |
(...skipping 18 matching lines...) Expand all Loading... |
2994 } | 2993 } |
2995 | 2994 |
2996 | 2995 |
2997 void FullCodeGenerator::EmitArguments(CallRuntime* expr) { | 2996 void FullCodeGenerator::EmitArguments(CallRuntime* expr) { |
2998 ZoneList<Expression*>* args = expr->arguments(); | 2997 ZoneList<Expression*>* args = expr->arguments(); |
2999 ASSERT(args->length() == 1); | 2998 ASSERT(args->length() == 1); |
3000 | 2999 |
3001 // ArgumentsAccessStub expects the key in x1. | 3000 // ArgumentsAccessStub expects the key in x1. |
3002 VisitForAccumulatorValue(args->at(0)); | 3001 VisitForAccumulatorValue(args->at(0)); |
3003 __ Mov(x1, x0); | 3002 __ Mov(x1, x0); |
3004 __ Mov(x0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); | 3003 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters())); |
3005 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); | 3004 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); |
3006 __ CallStub(&stub); | 3005 __ CallStub(&stub); |
3007 context()->Plug(x0); | 3006 context()->Plug(x0); |
3008 } | 3007 } |
3009 | 3008 |
3010 | 3009 |
3011 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) { | 3010 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) { |
3012 ASSERT(expr->arguments()->length() == 0); | 3011 ASSERT(expr->arguments()->length() == 0); |
3013 Label exit; | 3012 Label exit; |
3014 // Get the number of formal parameters. | 3013 // Get the number of formal parameters. |
3015 __ Mov(x0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); | 3014 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters())); |
3016 | 3015 |
3017 // Check if the calling frame is an arguments adaptor frame. | 3016 // Check if the calling frame is an arguments adaptor frame. |
3018 __ Ldr(x12, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | 3017 __ Ldr(x12, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
3019 __ Ldr(x13, MemOperand(x12, StandardFrameConstants::kContextOffset)); | 3018 __ Ldr(x13, MemOperand(x12, StandardFrameConstants::kContextOffset)); |
3020 __ Cmp(x13, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 3019 __ Cmp(x13, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
3021 __ B(ne, &exit); | 3020 __ B(ne, &exit); |
3022 | 3021 |
3023 // Arguments adaptor case: Read the arguments length from the | 3022 // Arguments adaptor case: Read the arguments length from the |
3024 // adaptor frame. | 3023 // adaptor frame. |
3025 __ Ldr(x0, MemOperand(x12, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 3024 __ Ldr(x0, MemOperand(x12, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
3026 | 3025 |
3027 __ Bind(&exit); | 3026 __ Bind(&exit); |
3028 context()->Plug(x0); | 3027 context()->Plug(x0); |
3029 } | 3028 } |
3030 | 3029 |
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3176 | 3175 |
3177 __ JumpIfSmi(object, ¬_date_object); | 3176 __ JumpIfSmi(object, ¬_date_object); |
3178 __ JumpIfNotObjectType(object, x10, x10, JS_DATE_TYPE, ¬_date_object); | 3177 __ JumpIfNotObjectType(object, x10, x10, JS_DATE_TYPE, ¬_date_object); |
3179 | 3178 |
3180 if (index->value() == 0) { | 3179 if (index->value() == 0) { |
3181 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset)); | 3180 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset)); |
3182 __ B(&done); | 3181 __ B(&done); |
3183 } else { | 3182 } else { |
3184 if (index->value() < JSDate::kFirstUncachedField) { | 3183 if (index->value() < JSDate::kFirstUncachedField) { |
3185 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); | 3184 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); |
3186 __ Mov(x10, Operand(stamp)); | 3185 __ Mov(x10, stamp); |
3187 __ Ldr(stamp_addr, MemOperand(x10)); | 3186 __ Ldr(stamp_addr, MemOperand(x10)); |
3188 __ Ldr(stamp_cache, FieldMemOperand(object, JSDate::kCacheStampOffset)); | 3187 __ Ldr(stamp_cache, FieldMemOperand(object, JSDate::kCacheStampOffset)); |
3189 __ Cmp(stamp_addr, stamp_cache); | 3188 __ Cmp(stamp_addr, stamp_cache); |
3190 __ B(ne, &runtime); | 3189 __ B(ne, &runtime); |
3191 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset + | 3190 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset + |
3192 kPointerSize * index->value())); | 3191 kPointerSize * index->value())); |
3193 __ B(&done); | 3192 __ B(&done); |
3194 } | 3193 } |
3195 | 3194 |
3196 __ Bind(&runtime); | 3195 __ Bind(&runtime); |
3197 __ Mov(x1, Operand(index)); | 3196 __ Mov(x1, index); |
3198 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); | 3197 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); |
3199 __ B(&done); | 3198 __ B(&done); |
3200 } | 3199 } |
3201 | 3200 |
3202 __ Bind(¬_date_object); | 3201 __ Bind(¬_date_object); |
3203 __ CallRuntime(Runtime::kThrowNotDateError, 0); | 3202 __ CallRuntime(Runtime::kThrowNotDateError, 0); |
3204 __ Bind(&done); | 3203 __ Bind(&done); |
3205 context()->Plug(x0); | 3204 context()->Plug(x0); |
3206 } | 3205 } |
3207 | 3206 |
(...skipping 207 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3415 __ B(&done); | 3414 __ B(&done); |
3416 | 3415 |
3417 __ Bind(&index_out_of_range); | 3416 __ Bind(&index_out_of_range); |
3418 // When the index is out of range, the spec requires us to return | 3417 // When the index is out of range, the spec requires us to return |
3419 // the empty string. | 3418 // the empty string. |
3420 __ LoadRoot(result, Heap::kempty_stringRootIndex); | 3419 __ LoadRoot(result, Heap::kempty_stringRootIndex); |
3421 __ B(&done); | 3420 __ B(&done); |
3422 | 3421 |
3423 __ Bind(&need_conversion); | 3422 __ Bind(&need_conversion); |
3424 // Move smi zero into the result register, which will trigger conversion. | 3423 // Move smi zero into the result register, which will trigger conversion. |
3425 __ Mov(result, Operand(Smi::FromInt(0))); | 3424 __ Mov(result, Smi::FromInt(0)); |
3426 __ B(&done); | 3425 __ B(&done); |
3427 | 3426 |
3428 NopRuntimeCallHelper call_helper; | 3427 NopRuntimeCallHelper call_helper; |
3429 generator.GenerateSlow(masm_, call_helper); | 3428 generator.GenerateSlow(masm_, call_helper); |
3430 | 3429 |
3431 __ Bind(&done); | 3430 __ Bind(&done); |
3432 context()->Plug(result); | 3431 context()->Plug(result); |
3433 } | 3432 } |
3434 | 3433 |
3435 | 3434 |
(...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3668 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2)); | 3667 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2)); |
3669 // Loop condition: while (element < elements_end). | 3668 // Loop condition: while (element < elements_end). |
3670 // Live values in registers: | 3669 // Live values in registers: |
3671 // elements: Fixed array of strings. | 3670 // elements: Fixed array of strings. |
3672 // array_length: Length of the fixed array of strings (not smi) | 3671 // array_length: Length of the fixed array of strings (not smi) |
3673 // separator: Separator string | 3672 // separator: Separator string |
3674 // string_length: Accumulated sum of string lengths (not smi). | 3673 // string_length: Accumulated sum of string lengths (not smi). |
3675 // element: Current array element. | 3674 // element: Current array element. |
3676 // elements_end: Array end. | 3675 // elements_end: Array end. |
3677 if (FLAG_debug_code) { | 3676 if (FLAG_debug_code) { |
3678 __ Cmp(array_length, Operand(0)); | 3677 __ Cmp(array_length, 0); |
3679 __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin); | 3678 __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin); |
3680 } | 3679 } |
3681 __ Bind(&loop); | 3680 __ Bind(&loop); |
3682 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex)); | 3681 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex)); |
3683 __ JumpIfSmi(string, &bailout); | 3682 __ JumpIfSmi(string, &bailout); |
3684 __ Ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset)); | 3683 __ Ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset)); |
3685 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); | 3684 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); |
3686 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout); | 3685 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout); |
3687 __ Ldrsw(scratch1, | 3686 __ Ldrsw(scratch1, |
3688 UntagSmiFieldMemOperand(string, SeqOneByteString::kLengthOffset)); | 3687 UntagSmiFieldMemOperand(string, SeqOneByteString::kLengthOffset)); |
(...skipping 192 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3881 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { | 3880 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { |
3882 switch (expr->op()) { | 3881 switch (expr->op()) { |
3883 case Token::DELETE: { | 3882 case Token::DELETE: { |
3884 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); | 3883 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); |
3885 Property* property = expr->expression()->AsProperty(); | 3884 Property* property = expr->expression()->AsProperty(); |
3886 VariableProxy* proxy = expr->expression()->AsVariableProxy(); | 3885 VariableProxy* proxy = expr->expression()->AsVariableProxy(); |
3887 | 3886 |
3888 if (property != NULL) { | 3887 if (property != NULL) { |
3889 VisitForStackValue(property->obj()); | 3888 VisitForStackValue(property->obj()); |
3890 VisitForStackValue(property->key()); | 3889 VisitForStackValue(property->key()); |
3891 __ Mov(x10, Operand(Smi::FromInt(strict_mode()))); | 3890 __ Mov(x10, Smi::FromInt(strict_mode())); |
3892 __ Push(x10); | 3891 __ Push(x10); |
3893 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); | 3892 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); |
3894 context()->Plug(x0); | 3893 context()->Plug(x0); |
3895 } else if (proxy != NULL) { | 3894 } else if (proxy != NULL) { |
3896 Variable* var = proxy->var(); | 3895 Variable* var = proxy->var(); |
3897 // Delete of an unqualified identifier is disallowed in strict mode | 3896 // Delete of an unqualified identifier is disallowed in strict mode |
3898 // but "delete this" is allowed. | 3897 // but "delete this" is allowed. |
3899 ASSERT(strict_mode() == SLOPPY || var->is_this()); | 3898 ASSERT(strict_mode() == SLOPPY || var->is_this()); |
3900 if (var->IsUnallocated()) { | 3899 if (var->IsUnallocated()) { |
3901 __ Ldr(x12, GlobalObjectMemOperand()); | 3900 __ Ldr(x12, GlobalObjectMemOperand()); |
3902 __ Mov(x11, Operand(var->name())); | 3901 __ Mov(x11, Operand(var->name())); |
3903 __ Mov(x10, Operand(Smi::FromInt(SLOPPY))); | 3902 __ Mov(x10, Smi::FromInt(SLOPPY)); |
3904 __ Push(x12, x11, x10); | 3903 __ Push(x12, x11, x10); |
3905 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); | 3904 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); |
3906 context()->Plug(x0); | 3905 context()->Plug(x0); |
3907 } else if (var->IsStackAllocated() || var->IsContextSlot()) { | 3906 } else if (var->IsStackAllocated() || var->IsContextSlot()) { |
3908 // Result of deleting non-global, non-dynamic variables is false. | 3907 // Result of deleting non-global, non-dynamic variables is false. |
3909 // The subexpression does not have side effects. | 3908 // The subexpression does not have side effects. |
3910 context()->Plug(var->is_this()); | 3909 context()->Plug(var->is_this()); |
3911 } else { | 3910 } else { |
3912 // Non-global variable. Call the runtime to try to delete from the | 3911 // Non-global variable. Call the runtime to try to delete from the |
3913 // context where the variable was introduced. | 3912 // context where the variable was introduced. |
(...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4061 case NAMED_PROPERTY: | 4060 case NAMED_PROPERTY: |
4062 __ Poke(x0, kPointerSize); | 4061 __ Poke(x0, kPointerSize); |
4063 break; | 4062 break; |
4064 case KEYED_PROPERTY: | 4063 case KEYED_PROPERTY: |
4065 __ Poke(x0, kPointerSize * 2); | 4064 __ Poke(x0, kPointerSize * 2); |
4066 break; | 4065 break; |
4067 } | 4066 } |
4068 } | 4067 } |
4069 } | 4068 } |
4070 | 4069 |
4071 __ Adds(x0, x0, Operand(Smi::FromInt(count_value))); | 4070 __ Adds(x0, x0, Smi::FromInt(count_value)); |
4072 __ B(vc, &done); | 4071 __ B(vc, &done); |
4073 // Call stub. Undo operation first. | 4072 // Call stub. Undo operation first. |
4074 __ Sub(x0, x0, Operand(Smi::FromInt(count_value))); | 4073 __ Sub(x0, x0, Smi::FromInt(count_value)); |
4075 __ B(&stub_call); | 4074 __ B(&stub_call); |
4076 __ Bind(&slow); | 4075 __ Bind(&slow); |
4077 } | 4076 } |
4078 ToNumberStub convert_stub; | 4077 ToNumberStub convert_stub; |
4079 __ CallStub(&convert_stub); | 4078 __ CallStub(&convert_stub); |
4080 | 4079 |
4081 // Save result for postfix expressions. | 4080 // Save result for postfix expressions. |
4082 if (expr->is_postfix()) { | 4081 if (expr->is_postfix()) { |
4083 if (!context()->IsEffect()) { | 4082 if (!context()->IsEffect()) { |
4084 // Save the result on the stack. If we have a named or keyed property | 4083 // Save the result on the stack. If we have a named or keyed property |
4085 // we store the result under the receiver that is currently on top | 4084 // we store the result under the receiver that is currently on top |
4086 // of the stack. | 4085 // of the stack. |
4087 switch (assign_type) { | 4086 switch (assign_type) { |
4088 case VARIABLE: | 4087 case VARIABLE: |
4089 __ Push(x0); | 4088 __ Push(x0); |
4090 break; | 4089 break; |
4091 case NAMED_PROPERTY: | 4090 case NAMED_PROPERTY: |
4092 __ Poke(x0, kXRegSize); | 4091 __ Poke(x0, kXRegSize); |
4093 break; | 4092 break; |
4094 case KEYED_PROPERTY: | 4093 case KEYED_PROPERTY: |
4095 __ Poke(x0, 2 * kXRegSize); | 4094 __ Poke(x0, 2 * kXRegSize); |
4096 break; | 4095 break; |
4097 } | 4096 } |
4098 } | 4097 } |
4099 } | 4098 } |
4100 | 4099 |
4101 __ Bind(&stub_call); | 4100 __ Bind(&stub_call); |
4102 __ Mov(x1, x0); | 4101 __ Mov(x1, x0); |
4103 __ Mov(x0, Operand(Smi::FromInt(count_value))); | 4102 __ Mov(x0, Smi::FromInt(count_value)); |
4104 | 4103 |
4105 // Record position before stub call. | 4104 // Record position before stub call. |
4106 SetSourcePosition(expr->position()); | 4105 SetSourcePosition(expr->position()); |
4107 | 4106 |
4108 { | 4107 { |
4109 Assembler::BlockPoolsScope scope(masm_); | 4108 Assembler::BlockPoolsScope scope(masm_); |
4110 BinaryOpICStub stub(Token::ADD, NO_OVERWRITE); | 4109 BinaryOpICStub stub(Token::ADD, NO_OVERWRITE); |
4111 CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId()); | 4110 CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId()); |
4112 patch_site.EmitPatchInfo(); | 4111 patch_site.EmitPatchInfo(); |
4113 } | 4112 } |
(...skipping 313 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4427 | 4426 |
4428 // TODO(jbramley): This label is bound here because the following code | 4427 // TODO(jbramley): This label is bound here because the following code |
4429 // looks at its pos(). Is it possible to do something more efficient here, | 4428 // looks at its pos(). Is it possible to do something more efficient here, |
4430 // perhaps using Adr? | 4429 // perhaps using Adr? |
4431 __ Bind(&continuation); | 4430 __ Bind(&continuation); |
4432 __ B(&resume); | 4431 __ B(&resume); |
4433 | 4432 |
4434 __ Bind(&suspend); | 4433 __ Bind(&suspend); |
4435 VisitForAccumulatorValue(expr->generator_object()); | 4434 VisitForAccumulatorValue(expr->generator_object()); |
4436 ASSERT((continuation.pos() > 0) && Smi::IsValid(continuation.pos())); | 4435 ASSERT((continuation.pos() > 0) && Smi::IsValid(continuation.pos())); |
4437 __ Mov(x1, Operand(Smi::FromInt(continuation.pos()))); | 4436 __ Mov(x1, Smi::FromInt(continuation.pos())); |
4438 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset)); | 4437 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset)); |
4439 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset)); | 4438 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset)); |
4440 __ Mov(x1, cp); | 4439 __ Mov(x1, cp); |
4441 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2, | 4440 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2, |
4442 kLRHasBeenSaved, kDontSaveFPRegs); | 4441 kLRHasBeenSaved, kDontSaveFPRegs); |
4443 __ Add(x1, fp, StandardFrameConstants::kExpressionsOffset); | 4442 __ Add(x1, fp, StandardFrameConstants::kExpressionsOffset); |
4444 __ Cmp(__ StackPointer(), x1); | 4443 __ Cmp(__ StackPointer(), x1); |
4445 __ B(eq, &post_runtime); | 4444 __ B(eq, &post_runtime); |
4446 __ Push(x0); // generator object | 4445 __ Push(x0); // generator object |
4447 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); | 4446 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); |
4448 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 4447 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
4449 __ Bind(&post_runtime); | 4448 __ Bind(&post_runtime); |
4450 __ Pop(result_register()); | 4449 __ Pop(result_register()); |
4451 EmitReturnSequence(); | 4450 EmitReturnSequence(); |
4452 | 4451 |
4453 __ Bind(&resume); | 4452 __ Bind(&resume); |
4454 context()->Plug(result_register()); | 4453 context()->Plug(result_register()); |
4455 break; | 4454 break; |
4456 } | 4455 } |
4457 | 4456 |
4458 case Yield::FINAL: { | 4457 case Yield::FINAL: { |
4459 VisitForAccumulatorValue(expr->generator_object()); | 4458 VisitForAccumulatorValue(expr->generator_object()); |
4460 __ Mov(x1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed))); | 4459 __ Mov(x1, Smi::FromInt(JSGeneratorObject::kGeneratorClosed)); |
4461 __ Str(x1, FieldMemOperand(result_register(), | 4460 __ Str(x1, FieldMemOperand(result_register(), |
4462 JSGeneratorObject::kContinuationOffset)); | 4461 JSGeneratorObject::kContinuationOffset)); |
4463 // Pop value from top-of-stack slot, box result into result register. | 4462 // Pop value from top-of-stack slot, box result into result register. |
4464 EmitCreateIteratorResult(true); | 4463 EmitCreateIteratorResult(true); |
4465 EmitUnwindBeforeReturn(); | 4464 EmitUnwindBeforeReturn(); |
4466 EmitReturnSequence(); | 4465 EmitReturnSequence(); |
4467 break; | 4466 break; |
4468 } | 4467 } |
4469 | 4468 |
4470 case Yield::DELEGATING: { | 4469 case Yield::DELEGATING: { |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4502 // looks at its pos(). Is it possible to do something more efficient here, | 4501 // looks at its pos(). Is it possible to do something more efficient here, |
4503 // perhaps using Adr? | 4502 // perhaps using Adr? |
4504 __ Bind(&l_continuation); | 4503 __ Bind(&l_continuation); |
4505 __ B(&l_resume); | 4504 __ B(&l_resume); |
4506 | 4505 |
4507 __ Bind(&l_suspend); | 4506 __ Bind(&l_suspend); |
4508 const int generator_object_depth = kPointerSize + handler_size; | 4507 const int generator_object_depth = kPointerSize + handler_size; |
4509 __ Peek(x0, generator_object_depth); | 4508 __ Peek(x0, generator_object_depth); |
4510 __ Push(x0); // g | 4509 __ Push(x0); // g |
4511 ASSERT((l_continuation.pos() > 0) && Smi::IsValid(l_continuation.pos())); | 4510 ASSERT((l_continuation.pos() > 0) && Smi::IsValid(l_continuation.pos())); |
4512 __ Mov(x1, Operand(Smi::FromInt(l_continuation.pos()))); | 4511 __ Mov(x1, Smi::FromInt(l_continuation.pos())); |
4513 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset)); | 4512 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset)); |
4514 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset)); | 4513 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset)); |
4515 __ Mov(x1, cp); | 4514 __ Mov(x1, cp); |
4516 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2, | 4515 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2, |
4517 kLRHasBeenSaved, kDontSaveFPRegs); | 4516 kLRHasBeenSaved, kDontSaveFPRegs); |
4518 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); | 4517 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); |
4519 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 4518 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
4520 __ Pop(x0); // result | 4519 __ Pop(x0); // result |
4521 EmitReturnSequence(); | 4520 EmitReturnSequence(); |
4522 __ Bind(&l_resume); // received in x0 | 4521 __ Bind(&l_resume); // received in x0 |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4580 VisitForStackValue(generator); | 4579 VisitForStackValue(generator); |
4581 VisitForAccumulatorValue(value); | 4580 VisitForAccumulatorValue(value); |
4582 __ Pop(generator_object); | 4581 __ Pop(generator_object); |
4583 | 4582 |
4584 // Check generator state. | 4583 // Check generator state. |
4585 Label wrong_state, closed_state, done; | 4584 Label wrong_state, closed_state, done; |
4586 __ Ldr(x10, FieldMemOperand(generator_object, | 4585 __ Ldr(x10, FieldMemOperand(generator_object, |
4587 JSGeneratorObject::kContinuationOffset)); | 4586 JSGeneratorObject::kContinuationOffset)); |
4588 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0); | 4587 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0); |
4589 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0); | 4588 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0); |
4590 __ CompareAndBranch(x10, Operand(Smi::FromInt(0)), eq, &closed_state); | 4589 __ CompareAndBranch(x10, Smi::FromInt(0), eq, &closed_state); |
4591 __ CompareAndBranch(x10, Operand(Smi::FromInt(0)), lt, &wrong_state); | 4590 __ CompareAndBranch(x10, Smi::FromInt(0), lt, &wrong_state); |
4592 | 4591 |
4593 // Load suspended function and context. | 4592 // Load suspended function and context. |
4594 __ Ldr(cp, FieldMemOperand(generator_object, | 4593 __ Ldr(cp, FieldMemOperand(generator_object, |
4595 JSGeneratorObject::kContextOffset)); | 4594 JSGeneratorObject::kContextOffset)); |
4596 __ Ldr(function, FieldMemOperand(generator_object, | 4595 __ Ldr(function, FieldMemOperand(generator_object, |
4597 JSGeneratorObject::kFunctionOffset)); | 4596 JSGeneratorObject::kFunctionOffset)); |
4598 | 4597 |
4599 // Load receiver and store as the first argument. | 4598 // Load receiver and store as the first argument. |
4600 __ Ldr(x10, FieldMemOperand(generator_object, | 4599 __ Ldr(x10, FieldMemOperand(generator_object, |
4601 JSGeneratorObject::kReceiverOffset)); | 4600 JSGeneratorObject::kReceiverOffset)); |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4635 // If we are sending a value and there is no operand stack, we can jump back | 4634 // If we are sending a value and there is no operand stack, we can jump back |
4636 // in directly. | 4635 // in directly. |
4637 if (resume_mode == JSGeneratorObject::NEXT) { | 4636 if (resume_mode == JSGeneratorObject::NEXT) { |
4638 Label slow_resume; | 4637 Label slow_resume; |
4639 __ Cbnz(operand_stack_size, &slow_resume); | 4638 __ Cbnz(operand_stack_size, &slow_resume); |
4640 __ Ldr(x10, FieldMemOperand(function, JSFunction::kCodeEntryOffset)); | 4639 __ Ldr(x10, FieldMemOperand(function, JSFunction::kCodeEntryOffset)); |
4641 __ Ldrsw(x11, | 4640 __ Ldrsw(x11, |
4642 UntagSmiFieldMemOperand(generator_object, | 4641 UntagSmiFieldMemOperand(generator_object, |
4643 JSGeneratorObject::kContinuationOffset)); | 4642 JSGeneratorObject::kContinuationOffset)); |
4644 __ Add(x10, x10, x11); | 4643 __ Add(x10, x10, x11); |
4645 __ Mov(x12, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting))); | 4644 __ Mov(x12, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)); |
4646 __ Str(x12, FieldMemOperand(generator_object, | 4645 __ Str(x12, FieldMemOperand(generator_object, |
4647 JSGeneratorObject::kContinuationOffset)); | 4646 JSGeneratorObject::kContinuationOffset)); |
4648 __ Br(x10); | 4647 __ Br(x10); |
4649 | 4648 |
4650 __ Bind(&slow_resume); | 4649 __ Bind(&slow_resume); |
4651 } | 4650 } |
4652 | 4651 |
4653 // Otherwise, we push holes for the operand stack and call the runtime to fix | 4652 // Otherwise, we push holes for the operand stack and call the runtime to fix |
4654 // up the stack and the handlers. | 4653 // up the stack and the handlers. |
4655 __ PushMultipleTimes(the_hole, operand_stack_size); | 4654 __ PushMultipleTimes(the_hole, operand_stack_size); |
4656 | 4655 |
4657 __ Mov(x10, Operand(Smi::FromInt(resume_mode))); | 4656 __ Mov(x10, Smi::FromInt(resume_mode)); |
4658 __ Push(generator_object, result_register(), x10); | 4657 __ Push(generator_object, result_register(), x10); |
4659 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3); | 4658 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3); |
4660 // Not reached: the runtime call returns elsewhere. | 4659 // Not reached: the runtime call returns elsewhere. |
4661 __ Unreachable(); | 4660 __ Unreachable(); |
4662 | 4661 |
4663 // Reach here when generator is closed. | 4662 // Reach here when generator is closed. |
4664 __ Bind(&closed_state); | 4663 __ Bind(&closed_state); |
4665 if (resume_mode == JSGeneratorObject::NEXT) { | 4664 if (resume_mode == JSGeneratorObject::NEXT) { |
4666 // Return completed iterator result when generator is closed. | 4665 // Return completed iterator result when generator is closed. |
4667 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex); | 4666 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex); |
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4791 ASSERT(!result_register().is(x10)); | 4790 ASSERT(!result_register().is(x10)); |
4792 // Preserve the result register while executing finally block. | 4791 // Preserve the result register while executing finally block. |
4793 // Also cook the return address in lr to the stack (smi encoded Code* delta). | 4792 // Also cook the return address in lr to the stack (smi encoded Code* delta). |
4794 __ Sub(x10, lr, Operand(masm_->CodeObject())); | 4793 __ Sub(x10, lr, Operand(masm_->CodeObject())); |
4795 __ SmiTag(x10); | 4794 __ SmiTag(x10); |
4796 __ Push(result_register(), x10); | 4795 __ Push(result_register(), x10); |
4797 | 4796 |
4798 // Store pending message while executing finally block. | 4797 // Store pending message while executing finally block. |
4799 ExternalReference pending_message_obj = | 4798 ExternalReference pending_message_obj = |
4800 ExternalReference::address_of_pending_message_obj(isolate()); | 4799 ExternalReference::address_of_pending_message_obj(isolate()); |
4801 __ Mov(x10, Operand(pending_message_obj)); | 4800 __ Mov(x10, pending_message_obj); |
4802 __ Ldr(x10, MemOperand(x10)); | 4801 __ Ldr(x10, MemOperand(x10)); |
4803 | 4802 |
4804 ExternalReference has_pending_message = | 4803 ExternalReference has_pending_message = |
4805 ExternalReference::address_of_has_pending_message(isolate()); | 4804 ExternalReference::address_of_has_pending_message(isolate()); |
4806 __ Mov(x11, Operand(has_pending_message)); | 4805 __ Mov(x11, has_pending_message); |
4807 __ Ldr(x11, MemOperand(x11)); | 4806 __ Ldr(x11, MemOperand(x11)); |
4808 __ SmiTag(x11); | 4807 __ SmiTag(x11); |
4809 | 4808 |
4810 __ Push(x10, x11); | 4809 __ Push(x10, x11); |
4811 | 4810 |
4812 ExternalReference pending_message_script = | 4811 ExternalReference pending_message_script = |
4813 ExternalReference::address_of_pending_message_script(isolate()); | 4812 ExternalReference::address_of_pending_message_script(isolate()); |
4814 __ Mov(x10, Operand(pending_message_script)); | 4813 __ Mov(x10, pending_message_script); |
4815 __ Ldr(x10, MemOperand(x10)); | 4814 __ Ldr(x10, MemOperand(x10)); |
4816 __ Push(x10); | 4815 __ Push(x10); |
4817 } | 4816 } |
4818 | 4817 |
4819 | 4818 |
4820 void FullCodeGenerator::ExitFinallyBlock() { | 4819 void FullCodeGenerator::ExitFinallyBlock() { |
4821 ASM_LOCATION("FullCodeGenerator::ExitFinallyBlock"); | 4820 ASM_LOCATION("FullCodeGenerator::ExitFinallyBlock"); |
4822 ASSERT(!result_register().is(x10)); | 4821 ASSERT(!result_register().is(x10)); |
4823 | 4822 |
4824 // Restore pending message from stack. | 4823 // Restore pending message from stack. |
4825 __ Pop(x10, x11, x12); | 4824 __ Pop(x10, x11, x12); |
4826 ExternalReference pending_message_script = | 4825 ExternalReference pending_message_script = |
4827 ExternalReference::address_of_pending_message_script(isolate()); | 4826 ExternalReference::address_of_pending_message_script(isolate()); |
4828 __ Mov(x13, Operand(pending_message_script)); | 4827 __ Mov(x13, pending_message_script); |
4829 __ Str(x10, MemOperand(x13)); | 4828 __ Str(x10, MemOperand(x13)); |
4830 | 4829 |
4831 __ SmiUntag(x11); | 4830 __ SmiUntag(x11); |
4832 ExternalReference has_pending_message = | 4831 ExternalReference has_pending_message = |
4833 ExternalReference::address_of_has_pending_message(isolate()); | 4832 ExternalReference::address_of_has_pending_message(isolate()); |
4834 __ Mov(x13, Operand(has_pending_message)); | 4833 __ Mov(x13, has_pending_message); |
4835 __ Str(x11, MemOperand(x13)); | 4834 __ Str(x11, MemOperand(x13)); |
4836 | 4835 |
4837 ExternalReference pending_message_obj = | 4836 ExternalReference pending_message_obj = |
4838 ExternalReference::address_of_pending_message_obj(isolate()); | 4837 ExternalReference::address_of_pending_message_obj(isolate()); |
4839 __ Mov(x13, Operand(pending_message_obj)); | 4838 __ Mov(x13, pending_message_obj); |
4840 __ Str(x12, MemOperand(x13)); | 4839 __ Str(x12, MemOperand(x13)); |
4841 | 4840 |
4842 // Restore result register and cooked return address from the stack. | 4841 // Restore result register and cooked return address from the stack. |
4843 __ Pop(x10, result_register()); | 4842 __ Pop(x10, result_register()); |
4844 | 4843 |
4845 // Uncook the return address (see EnterFinallyBlock). | 4844 // Uncook the return address (see EnterFinallyBlock). |
4846 __ SmiUntag(x10); | 4845 __ SmiUntag(x10); |
4847 __ Add(x11, x10, Operand(masm_->CodeObject())); | 4846 __ Add(x11, x10, Operand(masm_->CodeObject())); |
4848 __ Br(x11); | 4847 __ Br(x11); |
4849 } | 4848 } |
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4973 return previous_; | 4972 return previous_; |
4974 } | 4973 } |
4975 | 4974 |
4976 | 4975 |
4977 #undef __ | 4976 #undef __ |
4978 | 4977 |
4979 | 4978 |
4980 } } // namespace v8::internal | 4979 } } // namespace v8::internal |
4981 | 4980 |
4982 #endif // V8_TARGET_ARCH_A64 | 4981 #endif // V8_TARGET_ARCH_A64 |
OLD | NEW |