OLD | NEW |
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1204 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1215 __ mov(tos, Operand(tos, LSL, scratch)); // Shift constant. | 1215 __ mov(tos, Operand(tos, LSL, scratch)); // Shift constant. |
1216 deferred->BindExit(); | 1216 deferred->BindExit(); |
1217 TypeInfo result = TypeInfo::Integer32(); | 1217 TypeInfo result = TypeInfo::Integer32(); |
1218 frame_->EmitPush(tos, result); | 1218 frame_->EmitPush(tos, result); |
1219 break; | 1219 break; |
1220 } | 1220 } |
1221 // Fall through! | 1221 // Fall through! |
1222 case Token::SHR: | 1222 case Token::SHR: |
1223 case Token::SAR: { | 1223 case Token::SAR: { |
1224 ASSERT(!reversed); | 1224 ASSERT(!reversed); |
1225 TypeInfo result = | 1225 int shift_amount = int_value & 0x1f; |
1226 (op == Token::SAR) ? TypeInfo::Integer32() : TypeInfo::Number(); | 1226 TypeInfo result = TypeInfo::Number(); |
1227 if (!reversed) { | 1227 |
1228 if (op == Token::SHR) { | 1228 if (op == Token::SHR) { |
1229 if (int_value >= 2) { | 1229 if (shift_amount > 1) { |
1230 result = TypeInfo::Smi(); | 1230 result = TypeInfo::Smi(); |
1231 } else if (int_value >= 1) { | 1231 } else if (shift_amount > 0) { |
1232 result = TypeInfo::Integer32(); | 1232 result = TypeInfo::Integer32(); |
1233 } | 1233 } |
| 1234 } else if (op == Token::SAR) { |
| 1235 if (shift_amount > 0) { |
| 1236 result = TypeInfo::Smi(); |
1234 } else { | 1237 } else { |
1235 if (int_value >= 1) { | 1238 result = TypeInfo::Integer32(); |
1236 result = TypeInfo::Smi(); | |
1237 } | |
1238 } | 1239 } |
| 1240 } else { |
| 1241 ASSERT(op == Token::SHL); |
| 1242 result = TypeInfo::Integer32(); |
1239 } | 1243 } |
| 1244 |
1240 Register scratch = VirtualFrame::scratch0(); | 1245 Register scratch = VirtualFrame::scratch0(); |
1241 Register scratch2 = VirtualFrame::scratch1(); | 1246 Register scratch2 = VirtualFrame::scratch1(); |
1242 int shift_value = int_value & 0x1f; // least significant 5 bits | 1247 int shift_value = int_value & 0x1f; // least significant 5 bits |
1243 DeferredCode* deferred = | 1248 DeferredCode* deferred = |
1244 new DeferredInlineSmiOperation(op, shift_value, false, mode, tos); | 1249 new DeferredInlineSmiOperation(op, shift_value, false, mode, tos); |
1245 uint32_t problematic_mask = kSmiTagMask; | 1250 uint32_t problematic_mask = kSmiTagMask; |
1246 // For unsigned shift by zero all negative smis are problematic. | 1251 // For unsigned shift by zero all negative smis are problematic. |
1247 bool skip_smi_test = both_sides_are_smi; | 1252 bool skip_smi_test = both_sides_are_smi; |
1248 if (shift_value == 0 && op == Token::SHR) { | 1253 if (shift_value == 0 && op == Token::SHR) { |
1249 problematic_mask |= 0x80000000; | 1254 problematic_mask |= 0x80000000; |
(...skipping 299 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1549 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1); | 1554 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1); |
1550 __ CompareObjectType(receiver_reg, r2, r3, FIRST_JS_OBJECT_TYPE); | 1555 __ CompareObjectType(receiver_reg, r2, r3, FIRST_JS_OBJECT_TYPE); |
1551 __ b(lt, &build_args); | 1556 __ b(lt, &build_args); |
1552 | 1557 |
1553 // Check that applicand.apply is Function.prototype.apply. | 1558 // Check that applicand.apply is Function.prototype.apply. |
1554 __ ldr(r0, MemOperand(sp, kPointerSize)); | 1559 __ ldr(r0, MemOperand(sp, kPointerSize)); |
1555 __ BranchOnSmi(r0, &build_args); | 1560 __ BranchOnSmi(r0, &build_args); |
1556 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE); | 1561 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE); |
1557 __ b(ne, &build_args); | 1562 __ b(ne, &build_args); |
1558 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply)); | 1563 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply)); |
1559 __ ldr(r1, FieldMemOperand(r0, JSFunction::kCodeOffset)); | 1564 __ ldr(r1, FieldMemOperand(r0, JSFunction::kCodeEntryOffset)); |
| 1565 __ sub(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag)); |
1560 __ cmp(r1, Operand(apply_code)); | 1566 __ cmp(r1, Operand(apply_code)); |
1561 __ b(ne, &build_args); | 1567 __ b(ne, &build_args); |
1562 | 1568 |
1563 // Check that applicand is a function. | 1569 // Check that applicand is a function. |
1564 __ ldr(r1, MemOperand(sp, 2 * kPointerSize)); | 1570 __ ldr(r1, MemOperand(sp, 2 * kPointerSize)); |
1565 __ BranchOnSmi(r1, &build_args); | 1571 __ BranchOnSmi(r1, &build_args); |
1566 __ CompareObjectType(r1, r2, r3, JS_FUNCTION_TYPE); | 1572 __ CompareObjectType(r1, r2, r3, JS_FUNCTION_TYPE); |
1567 __ b(ne, &build_args); | 1573 __ b(ne, &build_args); |
1568 | 1574 |
1569 // Copy the arguments to this function possibly from the | 1575 // Copy the arguments to this function possibly from the |
(...skipping 3687 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5257 __ bind(&slowcase); | 5263 __ bind(&slowcase); |
5258 __ CallRuntime(Runtime::kRegExpConstructResult, 3); | 5264 __ CallRuntime(Runtime::kRegExpConstructResult, 3); |
5259 | 5265 |
5260 __ bind(&done); | 5266 __ bind(&done); |
5261 } | 5267 } |
5262 frame_->Forget(3); | 5268 frame_->Forget(3); |
5263 frame_->EmitPush(r0); | 5269 frame_->EmitPush(r0); |
5264 } | 5270 } |
5265 | 5271 |
5266 | 5272 |
| 5273 void CodeGenerator::GenerateRegExpCloneResult(ZoneList<Expression*>* args) { |
| 5274 ASSERT_EQ(1, args->length()); |
| 5275 |
| 5276 Load(args->at(0)); |
| 5277 frame_->PopToR0(); |
| 5278 { |
| 5279 VirtualFrame::SpilledScope spilled_scope(frame_); |
| 5280 |
| 5281 Label done; |
| 5282 Label call_runtime; |
| 5283 __ BranchOnSmi(r0, &done); |
| 5284 |
| 5285 // Load JSRegExp map into r1. Check that argument object has this map. |
| 5286 // Arguments to this function should be results of calling RegExp exec, |
| 5287 // which is either an unmodified JSRegExpResult or null. Anything not having |
| 5288 // the unmodified JSRegExpResult map is returned unmodified. |
| 5289 // This also ensures that elements are fast. |
| 5290 |
| 5291 __ ldr(r1, ContextOperand(cp, Context::GLOBAL_INDEX)); |
| 5292 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalContextOffset)); |
| 5293 __ ldr(r1, ContextOperand(r1, Context::REGEXP_RESULT_MAP_INDEX)); |
| 5294 __ ldr(ip, FieldMemOperand(r0, HeapObject::kMapOffset)); |
| 5295 __ cmp(r1, Operand(ip)); |
| 5296 __ b(ne, &done); |
| 5297 |
| 5298 // All set, copy the contents to a new object. |
| 5299 __ AllocateInNewSpace(JSRegExpResult::kSize, |
| 5300 r2, |
| 5301 r3, |
| 5302 r4, |
| 5303 &call_runtime, |
| 5304 NO_ALLOCATION_FLAGS); |
| 5305 // Store RegExpResult map as map of allocated object. |
| 5306 ASSERT(JSRegExpResult::kSize == 6 * kPointerSize); |
| 5307 // Copy all fields (map is already in r1) from (untagged) r0 to r2. |
| 5308 // Change map of elements array (ends up in r4) to be a FixedCOWArray. |
| 5309 __ bic(r0, r0, Operand(kHeapObjectTagMask)); |
| 5310 __ ldm(ib, r0, r3.bit() | r4.bit() | r5.bit() | r6.bit() | r7.bit()); |
| 5311 __ stm(ia, r2, |
| 5312 r1.bit() | r3.bit() | r4.bit() | r5.bit() | r6.bit() | r7.bit()); |
| 5313 ASSERT(!Heap::InNewSpace(Heap::fixed_cow_array_map())); |
| 5314 ASSERT(JSRegExp::kElementsOffset == 2 * kPointerSize); |
| 5315 // Check whether elements array is empty fixed array, and otherwise make |
| 5316 // it copy-on-write (it never should be empty unless someone is messing |
| 5317 // with the arguments to the runtime function). |
| 5318 __ LoadRoot(ip, Heap::kEmptyFixedArrayRootIndex); |
| 5319 __ add(r0, r2, Operand(kHeapObjectTag)); // Tag result and move it to r0. |
| 5320 __ cmp(r4, ip); |
| 5321 __ b(eq, &done); |
| 5322 __ LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex); |
| 5323 __ str(ip, FieldMemOperand(r4, HeapObject::kMapOffset)); |
| 5324 __ b(&done); |
| 5325 __ bind(&call_runtime); |
| 5326 __ push(r0); |
| 5327 __ CallRuntime(Runtime::kRegExpCloneResult, 1); |
| 5328 __ bind(&done); |
| 5329 } |
| 5330 frame_->EmitPush(r0); |
| 5331 } |
| 5332 |
| 5333 |
5267 class DeferredSearchCache: public DeferredCode { | 5334 class DeferredSearchCache: public DeferredCode { |
5268 public: | 5335 public: |
5269 DeferredSearchCache(Register dst, Register cache, Register key) | 5336 DeferredSearchCache(Register dst, Register cache, Register key) |
5270 : dst_(dst), cache_(cache), key_(key) { | 5337 : dst_(dst), cache_(cache), key_(key) { |
5271 set_comment("[ DeferredSearchCache"); | 5338 set_comment("[ DeferredSearchCache"); |
5272 } | 5339 } |
5273 | 5340 |
5274 virtual void Generate(); | 5341 virtual void Generate(); |
5275 | 5342 |
5276 private: | 5343 private: |
(...skipping 1744 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7021 __ str(r1, FieldMemOperand(r0, JSObject::kPropertiesOffset)); | 7088 __ str(r1, FieldMemOperand(r0, JSObject::kPropertiesOffset)); |
7022 __ str(r1, FieldMemOperand(r0, JSObject::kElementsOffset)); | 7089 __ str(r1, FieldMemOperand(r0, JSObject::kElementsOffset)); |
7023 __ str(r2, FieldMemOperand(r0, JSFunction::kPrototypeOrInitialMapOffset)); | 7090 __ str(r2, FieldMemOperand(r0, JSFunction::kPrototypeOrInitialMapOffset)); |
7024 __ str(r3, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset)); | 7091 __ str(r3, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset)); |
7025 __ str(cp, FieldMemOperand(r0, JSFunction::kContextOffset)); | 7092 __ str(cp, FieldMemOperand(r0, JSFunction::kContextOffset)); |
7026 __ str(r1, FieldMemOperand(r0, JSFunction::kLiteralsOffset)); | 7093 __ str(r1, FieldMemOperand(r0, JSFunction::kLiteralsOffset)); |
7027 | 7094 |
7028 // Initialize the code pointer in the function to be the one | 7095 // Initialize the code pointer in the function to be the one |
7029 // found in the shared function info object. | 7096 // found in the shared function info object. |
7030 __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kCodeOffset)); | 7097 __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kCodeOffset)); |
7031 __ str(r3, FieldMemOperand(r0, JSFunction::kCodeOffset)); | 7098 __ add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 7099 __ str(r3, FieldMemOperand(r0, JSFunction::kCodeEntryOffset)); |
7032 | 7100 |
7033 // Return result. The argument function info has been popped already. | 7101 // Return result. The argument function info has been popped already. |
7034 __ Ret(); | 7102 __ Ret(); |
7035 | 7103 |
7036 // Create a new closure through the slower runtime call. | 7104 // Create a new closure through the slower runtime call. |
7037 __ bind(&gc); | 7105 __ bind(&gc); |
7038 __ Push(cp, r3); | 7106 __ Push(cp, r3); |
7039 __ TailCallRuntime(Runtime::kNewClosure, 2, 1); | 7107 __ TailCallRuntime(Runtime::kNewClosure, 2, 1); |
7040 } | 7108 } |
7041 | 7109 |
(...skipping 4656 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
11698 __ bind(&string_add_runtime); | 11766 __ bind(&string_add_runtime); |
11699 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); | 11767 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); |
11700 } | 11768 } |
11701 | 11769 |
11702 | 11770 |
11703 #undef __ | 11771 #undef __ |
11704 | 11772 |
11705 } } // namespace v8::internal | 11773 } } // namespace v8::internal |
11706 | 11774 |
11707 #endif // V8_TARGET_ARCH_ARM | 11775 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |