| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1068 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1079 // Use the fast case closure allocation code that allocates in new | 1079 // Use the fast case closure allocation code that allocates in new |
| 1080 // space for nested functions that don't need literals cloning. If | 1080 // space for nested functions that don't need literals cloning. If |
| 1081 // we're running with the --always-opt or the --prepare-always-opt | 1081 // we're running with the --always-opt or the --prepare-always-opt |
| 1082 // flag, we need to use the runtime function so that the new function | 1082 // flag, we need to use the runtime function so that the new function |
| 1083 // we are creating here gets a chance to have its code optimized and | 1083 // we are creating here gets a chance to have its code optimized and |
| 1084 // doesn't just get a copy of the existing unoptimized code. | 1084 // doesn't just get a copy of the existing unoptimized code. |
| 1085 if (!FLAG_always_opt && | 1085 if (!FLAG_always_opt && |
| 1086 !FLAG_prepare_always_opt && | 1086 !FLAG_prepare_always_opt && |
| 1087 !pretenure && | 1087 !pretenure && |
| 1088 scope()->is_function_scope() && | 1088 scope()->is_function_scope() && |
| 1089 info->num_literals() == 0 && | 1089 info->num_literals() == 0) { |
| 1090 !info->strict_mode()) { // Strict mode functions use slow path. | 1090 FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode); |
| 1091 FastNewClosureStub stub; | |
| 1092 __ mov(r0, Operand(info)); | 1091 __ mov(r0, Operand(info)); |
| 1093 __ push(r0); | 1092 __ push(r0); |
| 1094 __ CallStub(&stub); | 1093 __ CallStub(&stub); |
| 1095 } else { | 1094 } else { |
| 1096 __ mov(r0, Operand(info)); | 1095 __ mov(r0, Operand(info)); |
| 1097 __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex | 1096 __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex |
| 1098 : Heap::kFalseValueRootIndex); | 1097 : Heap::kFalseValueRootIndex); |
| 1099 __ Push(cp, r0, r1); | 1098 __ Push(cp, r0, r1); |
| 1100 __ CallRuntime(Runtime::kNewClosure, 3); | 1099 __ CallRuntime(Runtime::kNewClosure, 3); |
| 1101 } | 1100 } |
| (...skipping 3181 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4283 __ mov(r1, Operand(r1, ASR, 1)); // Un-smi-tag value. | 4282 __ mov(r1, Operand(r1, ASR, 1)); // Un-smi-tag value. |
| 4284 __ add(pc, r1, Operand(masm_->CodeObject())); | 4283 __ add(pc, r1, Operand(masm_->CodeObject())); |
| 4285 } | 4284 } |
| 4286 | 4285 |
| 4287 | 4286 |
| 4288 #undef __ | 4287 #undef __ |
| 4289 | 4288 |
| 4290 } } // namespace v8::internal | 4289 } } // namespace v8::internal |
| 4291 | 4290 |
| 4292 #endif // V8_TARGET_ARCH_ARM | 4291 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |