Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(22)

Side by Side Diff: src/arm/code-stubs-arm.cc

Issue 185653004: Experimental parser: merge to r19637 (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/parser
Patch Set: Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/code-stubs-arm.h ('k') | src/arm/codegen-arm.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
42 Isolate* isolate, 42 Isolate* isolate,
43 CodeStubInterfaceDescriptor* descriptor) { 43 CodeStubInterfaceDescriptor* descriptor) {
44 static Register registers[] = { r2 }; 44 static Register registers[] = { r2 };
45 descriptor->register_param_count_ = 1; 45 descriptor->register_param_count_ = 1;
46 descriptor->register_params_ = registers; 46 descriptor->register_params_ = registers;
47 descriptor->deoptimization_handler_ = 47 descriptor->deoptimization_handler_ =
48 Runtime::FunctionForId(Runtime::kNewClosureFromStubFailure)->entry; 48 Runtime::FunctionForId(Runtime::kNewClosureFromStubFailure)->entry;
49 } 49 }
50 50
51 51
52 void FastNewContextStub::InitializeInterfaceDescriptor(
53 Isolate* isolate,
54 CodeStubInterfaceDescriptor* descriptor) {
55 static Register registers[] = { r1 };
56 descriptor->register_param_count_ = 1;
57 descriptor->register_params_ = registers;
58 descriptor->deoptimization_handler_ = NULL;
59 }
60
61
52 void ToNumberStub::InitializeInterfaceDescriptor( 62 void ToNumberStub::InitializeInterfaceDescriptor(
53 Isolate* isolate, 63 Isolate* isolate,
54 CodeStubInterfaceDescriptor* descriptor) { 64 CodeStubInterfaceDescriptor* descriptor) {
55 static Register registers[] = { r0 }; 65 static Register registers[] = { r0 };
56 descriptor->register_param_count_ = 1; 66 descriptor->register_param_count_ = 1;
57 descriptor->register_params_ = registers; 67 descriptor->register_params_ = registers;
58 descriptor->deoptimization_handler_ = NULL; 68 descriptor->deoptimization_handler_ = NULL;
59 } 69 }
60 70
61 71
(...skipping 26 matching lines...) Expand all
88 descriptor->register_param_count_ = 4; 98 descriptor->register_param_count_ = 4;
89 descriptor->register_params_ = registers; 99 descriptor->register_params_ = registers;
90 descriptor->deoptimization_handler_ = 100 descriptor->deoptimization_handler_ =
91 Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry; 101 Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry;
92 } 102 }
93 103
94 104
95 void CreateAllocationSiteStub::InitializeInterfaceDescriptor( 105 void CreateAllocationSiteStub::InitializeInterfaceDescriptor(
96 Isolate* isolate, 106 Isolate* isolate,
97 CodeStubInterfaceDescriptor* descriptor) { 107 CodeStubInterfaceDescriptor* descriptor) {
98 static Register registers[] = { r2 }; 108 static Register registers[] = { r2, r3 };
99 descriptor->register_param_count_ = 1; 109 descriptor->register_param_count_ = 2;
100 descriptor->register_params_ = registers; 110 descriptor->register_params_ = registers;
101 descriptor->deoptimization_handler_ = NULL; 111 descriptor->deoptimization_handler_ = NULL;
102 } 112 }
103 113
104 114
105 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( 115 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor(
106 Isolate* isolate, 116 Isolate* isolate,
107 CodeStubInterfaceDescriptor* descriptor) { 117 CodeStubInterfaceDescriptor* descriptor) {
108 static Register registers[] = { r1, r0 }; 118 static Register registers[] = { r1, r0 };
109 descriptor->register_param_count_ = 2; 119 descriptor->register_param_count_ = 2;
110 descriptor->register_params_ = registers; 120 descriptor->register_params_ = registers;
111 descriptor->deoptimization_handler_ = 121 descriptor->deoptimization_handler_ =
112 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure); 122 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure);
113 } 123 }
114 124
115 125
116 void KeyedLoadDictionaryElementStub::InitializeInterfaceDescriptor( 126 void KeyedLoadDictionaryElementStub::InitializeInterfaceDescriptor(
117 Isolate* isolate, 127 Isolate* isolate,
118 CodeStubInterfaceDescriptor* descriptor) { 128 CodeStubInterfaceDescriptor* descriptor) {
119 static Register registers[] = { r1, r0 }; 129 static Register registers[] = { r1, r0 };
120 descriptor->register_param_count_ = 2; 130 descriptor->register_param_count_ = 2;
121 descriptor->register_params_ = registers; 131 descriptor->register_params_ = registers;
122 descriptor->deoptimization_handler_ = 132 descriptor->deoptimization_handler_ =
123 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure); 133 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure);
124 } 134 }
125 135
126 136
137 void RegExpConstructResultStub::InitializeInterfaceDescriptor(
138 Isolate* isolate,
139 CodeStubInterfaceDescriptor* descriptor) {
140 static Register registers[] = { r2, r1, r0 };
141 descriptor->register_param_count_ = 3;
142 descriptor->register_params_ = registers;
143 descriptor->deoptimization_handler_ =
144 Runtime::FunctionForId(Runtime::kRegExpConstructResult)->entry;
145 }
146
147
127 void LoadFieldStub::InitializeInterfaceDescriptor( 148 void LoadFieldStub::InitializeInterfaceDescriptor(
128 Isolate* isolate, 149 Isolate* isolate,
129 CodeStubInterfaceDescriptor* descriptor) { 150 CodeStubInterfaceDescriptor* descriptor) {
130 static Register registers[] = { r0 }; 151 static Register registers[] = { r0 };
131 descriptor->register_param_count_ = 1; 152 descriptor->register_param_count_ = 1;
132 descriptor->register_params_ = registers; 153 descriptor->register_params_ = registers;
133 descriptor->deoptimization_handler_ = NULL; 154 descriptor->deoptimization_handler_ = NULL;
134 } 155 }
135 156
136 157
137 void KeyedLoadFieldStub::InitializeInterfaceDescriptor( 158 void KeyedLoadFieldStub::InitializeInterfaceDescriptor(
138 Isolate* isolate, 159 Isolate* isolate,
139 CodeStubInterfaceDescriptor* descriptor) { 160 CodeStubInterfaceDescriptor* descriptor) {
140 static Register registers[] = { r1 }; 161 static Register registers[] = { r1 };
141 descriptor->register_param_count_ = 1; 162 descriptor->register_param_count_ = 1;
142 descriptor->register_params_ = registers; 163 descriptor->register_params_ = registers;
143 descriptor->deoptimization_handler_ = NULL; 164 descriptor->deoptimization_handler_ = NULL;
144 } 165 }
145 166
146 167
147 void KeyedArrayCallStub::InitializeInterfaceDescriptor(
148 Isolate* isolate,
149 CodeStubInterfaceDescriptor* descriptor) {
150 static Register registers[] = { r2 };
151 descriptor->register_param_count_ = 1;
152 descriptor->register_params_ = registers;
153 descriptor->continuation_type_ = TAIL_CALL_CONTINUATION;
154 descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
155 descriptor->deoptimization_handler_ =
156 FUNCTION_ADDR(KeyedCallIC_MissFromStubFailure);
157 }
158
159
160 void KeyedStoreFastElementStub::InitializeInterfaceDescriptor( 168 void KeyedStoreFastElementStub::InitializeInterfaceDescriptor(
161 Isolate* isolate, 169 Isolate* isolate,
162 CodeStubInterfaceDescriptor* descriptor) { 170 CodeStubInterfaceDescriptor* descriptor) {
163 static Register registers[] = { r2, r1, r0 }; 171 static Register registers[] = { r2, r1, r0 };
164 descriptor->register_param_count_ = 3; 172 descriptor->register_param_count_ = 3;
165 descriptor->register_params_ = registers; 173 descriptor->register_params_ = registers;
166 descriptor->deoptimization_handler_ = 174 descriptor->deoptimization_handler_ =
167 FUNCTION_ADDR(KeyedStoreIC_MissFromStubFailure); 175 FUNCTION_ADDR(KeyedStoreIC_MissFromStubFailure);
168 } 176 }
169 177
(...skipping 23 matching lines...) Expand all
193 } 201 }
194 202
195 203
196 static void InitializeArrayConstructorDescriptor( 204 static void InitializeArrayConstructorDescriptor(
197 Isolate* isolate, 205 Isolate* isolate,
198 CodeStubInterfaceDescriptor* descriptor, 206 CodeStubInterfaceDescriptor* descriptor,
199 int constant_stack_parameter_count) { 207 int constant_stack_parameter_count) {
200 // register state 208 // register state
201 // r0 -- number of arguments 209 // r0 -- number of arguments
202 // r1 -- function 210 // r1 -- function
203 // r2 -- type info cell with elements kind 211 // r2 -- allocation site with elements kind
204 static Register registers_variable_args[] = { r1, r2, r0 }; 212 static Register registers_variable_args[] = { r1, r2, r0 };
205 static Register registers_no_args[] = { r1, r2 }; 213 static Register registers_no_args[] = { r1, r2 };
206 214
207 if (constant_stack_parameter_count == 0) { 215 if (constant_stack_parameter_count == 0) {
208 descriptor->register_param_count_ = 2; 216 descriptor->register_param_count_ = 2;
209 descriptor->register_params_ = registers_no_args; 217 descriptor->register_params_ = registers_no_args;
210 } else { 218 } else {
211 // stack param count needs (constructor pointer, and single argument) 219 // stack param count needs (constructor pointer, and single argument)
212 descriptor->handler_arguments_mode_ = PASS_ARGUMENTS; 220 descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
213 descriptor->stack_parameter_count_ = r0; 221 descriptor->stack_parameter_count_ = r0;
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after
343 Isolate* isolate, 351 Isolate* isolate,
344 CodeStubInterfaceDescriptor* descriptor) { 352 CodeStubInterfaceDescriptor* descriptor) {
345 static Register registers[] = { r2, r1, r0 }; 353 static Register registers[] = { r2, r1, r0 };
346 descriptor->register_param_count_ = 3; 354 descriptor->register_param_count_ = 3;
347 descriptor->register_params_ = registers; 355 descriptor->register_params_ = registers;
348 descriptor->deoptimization_handler_ = 356 descriptor->deoptimization_handler_ =
349 FUNCTION_ADDR(BinaryOpIC_MissWithAllocationSite); 357 FUNCTION_ADDR(BinaryOpIC_MissWithAllocationSite);
350 } 358 }
351 359
352 360
353 void NewStringAddStub::InitializeInterfaceDescriptor( 361 void StringAddStub::InitializeInterfaceDescriptor(
354 Isolate* isolate, 362 Isolate* isolate,
355 CodeStubInterfaceDescriptor* descriptor) { 363 CodeStubInterfaceDescriptor* descriptor) {
356 static Register registers[] = { r1, r0 }; 364 static Register registers[] = { r1, r0 };
357 descriptor->register_param_count_ = 2; 365 descriptor->register_param_count_ = 2;
358 descriptor->register_params_ = registers; 366 descriptor->register_params_ = registers;
359 descriptor->deoptimization_handler_ = 367 descriptor->deoptimization_handler_ =
360 Runtime::FunctionForId(Runtime::kStringAdd)->entry; 368 Runtime::FunctionForId(Runtime::kStringAdd)->entry;
361 } 369 }
362 370
363 371
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
410 }; 418 };
411 static Representation representations[] = { 419 static Representation representations[] = {
412 Representation::Tagged(), // context 420 Representation::Tagged(), // context
413 Representation::Tagged(), // name 421 Representation::Tagged(), // name
414 }; 422 };
415 descriptor->register_param_count_ = 2; 423 descriptor->register_param_count_ = 2;
416 descriptor->register_params_ = registers; 424 descriptor->register_params_ = registers;
417 descriptor->param_representations_ = representations; 425 descriptor->param_representations_ = representations;
418 descriptor->platform_specific_descriptor_ = &noInlineDescriptor; 426 descriptor->platform_specific_descriptor_ = &noInlineDescriptor;
419 } 427 }
428 {
429 CallInterfaceDescriptor* descriptor =
430 isolate->call_descriptor(Isolate::CallHandler);
431 static Register registers[] = { cp, // context
432 r0, // receiver
433 };
434 static Representation representations[] = {
435 Representation::Tagged(), // context
436 Representation::Tagged(), // receiver
437 };
438 descriptor->register_param_count_ = 2;
439 descriptor->register_params_ = registers;
440 descriptor->param_representations_ = representations;
441 descriptor->platform_specific_descriptor_ = &default_descriptor;
442 }
443 {
444 CallInterfaceDescriptor* descriptor =
445 isolate->call_descriptor(Isolate::ApiFunctionCall);
446 static Register registers[] = { r0, // callee
447 r4, // call_data
448 r2, // holder
449 r1, // api_function_address
450 cp, // context
451 };
452 static Representation representations[] = {
453 Representation::Tagged(), // callee
454 Representation::Tagged(), // call_data
455 Representation::Tagged(), // holder
456 Representation::External(), // api_function_address
457 Representation::Tagged(), // context
458 };
459 descriptor->register_param_count_ = 5;
460 descriptor->register_params_ = registers;
461 descriptor->param_representations_ = representations;
462 descriptor->platform_specific_descriptor_ = &default_descriptor;
463 }
420 } 464 }
421 465
422 466
423 #define __ ACCESS_MASM(masm) 467 #define __ ACCESS_MASM(masm)
424 468
425 469
426 static void EmitIdenticalObjectComparison(MacroAssembler* masm, 470 static void EmitIdenticalObjectComparison(MacroAssembler* masm,
427 Label* slow, 471 Label* slow,
428 Condition cond); 472 Condition cond);
429 static void EmitSmiNonsmiComparison(MacroAssembler* masm, 473 static void EmitSmiNonsmiComparison(MacroAssembler* masm,
(...skipping 24 matching lines...) Expand all
454 __ push(descriptor->register_params_[i]); 498 __ push(descriptor->register_params_[i]);
455 } 499 }
456 ExternalReference miss = descriptor->miss_handler(); 500 ExternalReference miss = descriptor->miss_handler();
457 __ CallExternalReference(miss, descriptor->register_param_count_); 501 __ CallExternalReference(miss, descriptor->register_param_count_);
458 } 502 }
459 503
460 __ Ret(); 504 __ Ret();
461 } 505 }
462 506
463 507
464 void FastNewContextStub::Generate(MacroAssembler* masm) {
465 // Try to allocate the context in new space.
466 Label gc;
467 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
468
469 // Attempt to allocate the context in new space.
470 __ Allocate(FixedArray::SizeFor(length), r0, r1, r2, &gc, TAG_OBJECT);
471
472 // Load the function from the stack.
473 __ ldr(r3, MemOperand(sp, 0));
474
475 // Set up the object header.
476 __ LoadRoot(r1, Heap::kFunctionContextMapRootIndex);
477 __ mov(r2, Operand(Smi::FromInt(length)));
478 __ str(r2, FieldMemOperand(r0, FixedArray::kLengthOffset));
479 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
480
481 // Set up the fixed slots, copy the global object from the previous context.
482 __ ldr(r2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
483 __ mov(r1, Operand(Smi::FromInt(0)));
484 __ str(r3, MemOperand(r0, Context::SlotOffset(Context::CLOSURE_INDEX)));
485 __ str(cp, MemOperand(r0, Context::SlotOffset(Context::PREVIOUS_INDEX)));
486 __ str(r1, MemOperand(r0, Context::SlotOffset(Context::EXTENSION_INDEX)));
487 __ str(r2, MemOperand(r0, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
488
489 // Initialize the rest of the slots to undefined.
490 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
491 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
492 __ str(r1, MemOperand(r0, Context::SlotOffset(i)));
493 }
494
495 // Remove the on-stack argument and return.
496 __ mov(cp, r0);
497 __ pop();
498 __ Ret();
499
500 // Need to collect. Call into runtime system.
501 __ bind(&gc);
502 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1);
503 }
504
505
506 void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
507 // Stack layout on entry:
508 //
509 // [sp]: function.
510 // [sp + kPointerSize]: serialized scope info
511
512 // Try to allocate the context in new space.
513 Label gc;
514 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
515 __ Allocate(FixedArray::SizeFor(length), r0, r1, r2, &gc, TAG_OBJECT);
516
517 // Load the function from the stack.
518 __ ldr(r3, MemOperand(sp, 0));
519
520 // Load the serialized scope info from the stack.
521 __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
522
523 // Set up the object header.
524 __ LoadRoot(r2, Heap::kBlockContextMapRootIndex);
525 __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
526 __ mov(r2, Operand(Smi::FromInt(length)));
527 __ str(r2, FieldMemOperand(r0, FixedArray::kLengthOffset));
528
529 // If this block context is nested in the native context we get a smi
530 // sentinel instead of a function. The block context should get the
531 // canonical empty function of the native context as its closure which
532 // we still have to look up.
533 Label after_sentinel;
534 __ JumpIfNotSmi(r3, &after_sentinel);
535 if (FLAG_debug_code) {
536 __ cmp(r3, Operand::Zero());
537 __ Assert(eq, kExpected0AsASmiSentinel);
538 }
539 __ ldr(r3, GlobalObjectOperand());
540 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset));
541 __ ldr(r3, ContextOperand(r3, Context::CLOSURE_INDEX));
542 __ bind(&after_sentinel);
543
544 // Set up the fixed slots, copy the global object from the previous context.
545 __ ldr(r2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
546 __ str(r3, ContextOperand(r0, Context::CLOSURE_INDEX));
547 __ str(cp, ContextOperand(r0, Context::PREVIOUS_INDEX));
548 __ str(r1, ContextOperand(r0, Context::EXTENSION_INDEX));
549 __ str(r2, ContextOperand(r0, Context::GLOBAL_OBJECT_INDEX));
550
551 // Initialize the rest of the slots to the hole value.
552 __ LoadRoot(r1, Heap::kTheHoleValueRootIndex);
553 for (int i = 0; i < slots_; i++) {
554 __ str(r1, ContextOperand(r0, i + Context::MIN_CONTEXT_SLOTS));
555 }
556
557 // Remove the on-stack argument and return.
558 __ mov(cp, r0);
559 __ add(sp, sp, Operand(2 * kPointerSize));
560 __ Ret();
561
562 // Need to collect. Call into runtime system.
563 __ bind(&gc);
564 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
565 }
566
567
568 // Takes a Smi and converts to an IEEE 64 bit floating point value in two 508 // Takes a Smi and converts to an IEEE 64 bit floating point value in two
569 // registers. The format is 1 sign bit, 11 exponent bits (biased 1023) and 509 // registers. The format is 1 sign bit, 11 exponent bits (biased 1023) and
570 // 52 fraction bits (20 in the first word, 32 in the second). Zeros is a 510 // 52 fraction bits (20 in the first word, 32 in the second). Zeros is a
571 // scratch register. Destroys the source register. No GC occurs during this 511 // scratch register. Destroys the source register. No GC occurs during this
572 // stub so you don't have to set up the frame. 512 // stub so you don't have to set up the frame.
573 class ConvertToDoubleStub : public PlatformCodeStub { 513 class ConvertToDoubleStub : public PlatformCodeStub {
574 public: 514 public:
575 ConvertToDoubleStub(Register result_reg_1, 515 ConvertToDoubleStub(Register result_reg_1,
576 Register result_reg_2, 516 Register result_reg_2,
577 Register source_reg, 517 Register source_reg,
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after
655 exponent, 595 exponent,
656 Operand(source_, LSR, 32 - HeapNumber::kMantissaBitsInTopWord)); 596 Operand(source_, LSR, 32 - HeapNumber::kMantissaBitsInTopWord));
657 __ Ret(); 597 __ Ret();
658 } 598 }
659 599
660 600
661 void DoubleToIStub::Generate(MacroAssembler* masm) { 601 void DoubleToIStub::Generate(MacroAssembler* masm) {
662 Label out_of_range, only_low, negate, done; 602 Label out_of_range, only_low, negate, done;
663 Register input_reg = source(); 603 Register input_reg = source();
664 Register result_reg = destination(); 604 Register result_reg = destination();
605 ASSERT(is_truncating());
665 606
666 int double_offset = offset(); 607 int double_offset = offset();
667 // Account for saved regs if input is sp. 608 // Account for saved regs if input is sp.
668 if (input_reg.is(sp)) double_offset += 2 * kPointerSize; 609 if (input_reg.is(sp)) double_offset += 3 * kPointerSize;
669 610
670 // Immediate values for this stub fit in instructions, so it's safe to use ip. 611 Register scratch = GetRegisterThatIsNotOneOf(input_reg, result_reg);
671 Register scratch = ip;
672 Register scratch_low = 612 Register scratch_low =
673 GetRegisterThatIsNotOneOf(input_reg, result_reg, scratch); 613 GetRegisterThatIsNotOneOf(input_reg, result_reg, scratch);
674 Register scratch_high = 614 Register scratch_high =
675 GetRegisterThatIsNotOneOf(input_reg, result_reg, scratch, scratch_low); 615 GetRegisterThatIsNotOneOf(input_reg, result_reg, scratch, scratch_low);
676 LowDwVfpRegister double_scratch = kScratchDoubleReg; 616 LowDwVfpRegister double_scratch = kScratchDoubleReg;
677 617
678 __ Push(scratch_high, scratch_low); 618 __ Push(scratch_high, scratch_low, scratch);
679 619
680 if (!skip_fastpath()) { 620 if (!skip_fastpath()) {
681 // Load double input. 621 // Load double input.
682 __ vldr(double_scratch, MemOperand(input_reg, double_offset)); 622 __ vldr(double_scratch, MemOperand(input_reg, double_offset));
683 __ vmov(scratch_low, scratch_high, double_scratch); 623 __ vmov(scratch_low, scratch_high, double_scratch);
684 624
685 // Do fast-path convert from double to int. 625 // Do fast-path convert from double to int.
686 __ vcvt_s32_f64(double_scratch.low(), double_scratch); 626 __ vcvt_s32_f64(double_scratch.low(), double_scratch);
687 __ vmov(result_reg, double_scratch.low()); 627 __ vmov(result_reg, double_scratch.low());
688 628
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
751 // scratch_high LSR 31 equals zero. 691 // scratch_high LSR 31 equals zero.
752 // New result = (result eor 0) + 0 = result. 692 // New result = (result eor 0) + 0 = result.
753 // If the input was negative, we have to negate the result. 693 // If the input was negative, we have to negate the result.
754 // Input_high ASR 31 equals 0xffffffff and scratch_high LSR 31 equals 1. 694 // Input_high ASR 31 equals 0xffffffff and scratch_high LSR 31 equals 1.
755 // New result = (result eor 0xffffffff) + 1 = 0 - result. 695 // New result = (result eor 0xffffffff) + 1 = 0 - result.
756 __ eor(result_reg, result_reg, Operand(scratch_high, ASR, 31)); 696 __ eor(result_reg, result_reg, Operand(scratch_high, ASR, 31));
757 __ add(result_reg, result_reg, Operand(scratch_high, LSR, 31)); 697 __ add(result_reg, result_reg, Operand(scratch_high, LSR, 31));
758 698
759 __ bind(&done); 699 __ bind(&done);
760 700
761 __ Pop(scratch_high, scratch_low); 701 __ Pop(scratch_high, scratch_low, scratch);
762 __ Ret(); 702 __ Ret();
763 } 703 }
764 704
765 705
766 void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime( 706 void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(
767 Isolate* isolate) { 707 Isolate* isolate) {
768 WriteInt32ToHeapNumberStub stub1(r1, r0, r2); 708 WriteInt32ToHeapNumberStub stub1(r1, r0, r2);
769 WriteInt32ToHeapNumberStub stub2(r2, r0, r3); 709 WriteInt32ToHeapNumberStub stub2(r2, r0, r3);
770 stub1.GetCode(isolate); 710 stub1.GetCode(isolate);
771 stub2.GetCode(isolate); 711 stub2.GetCode(isolate);
(...skipping 2285 matching lines...) Expand 10 before | Expand all | Expand 10 after
3057 // (9) Sliced string. Replace subject with parent. Go to (4). 2997 // (9) Sliced string. Replace subject with parent. Go to (4).
3058 // Load offset into r9 and replace subject string with parent. 2998 // Load offset into r9 and replace subject string with parent.
3059 __ ldr(r9, FieldMemOperand(subject, SlicedString::kOffsetOffset)); 2999 __ ldr(r9, FieldMemOperand(subject, SlicedString::kOffsetOffset));
3060 __ SmiUntag(r9); 3000 __ SmiUntag(r9);
3061 __ ldr(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); 3001 __ ldr(subject, FieldMemOperand(subject, SlicedString::kParentOffset));
3062 __ jmp(&check_underlying); // Go to (4). 3002 __ jmp(&check_underlying); // Go to (4).
3063 #endif // V8_INTERPRETED_REGEXP 3003 #endif // V8_INTERPRETED_REGEXP
3064 } 3004 }
3065 3005
3066 3006
3067 void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
3068 const int kMaxInlineLength = 100;
3069 Label slowcase;
3070 Label done;
3071 Factory* factory = masm->isolate()->factory();
3072
3073 __ ldr(r1, MemOperand(sp, kPointerSize * 2));
3074 STATIC_ASSERT(kSmiTag == 0);
3075 STATIC_ASSERT(kSmiTagSize == 1);
3076 __ JumpIfNotSmi(r1, &slowcase);
3077 __ cmp(r1, Operand(Smi::FromInt(kMaxInlineLength)));
3078 __ b(hi, &slowcase);
3079 // Smi-tagging is equivalent to multiplying by 2.
3080 // Allocate RegExpResult followed by FixedArray with size in ebx.
3081 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
3082 // Elements: [Map][Length][..elements..]
3083 // Size of JSArray with two in-object properties and the header of a
3084 // FixedArray.
3085 int objects_size =
3086 (JSRegExpResult::kSize + FixedArray::kHeaderSize) / kPointerSize;
3087 __ SmiUntag(r5, r1);
3088 __ add(r2, r5, Operand(objects_size));
3089 __ Allocate(
3090 r2, // In: Size, in words.
3091 r0, // Out: Start of allocation (tagged).
3092 r3, // Scratch register.
3093 r4, // Scratch register.
3094 &slowcase,
3095 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
3096 // r0: Start of allocated area, object-tagged.
3097 // r1: Number of elements in array, as smi.
3098 // r5: Number of elements, untagged.
3099
3100 // Set JSArray map to global.regexp_result_map().
3101 // Set empty properties FixedArray.
3102 // Set elements to point to FixedArray allocated right after the JSArray.
3103 // Interleave operations for better latency.
3104 __ ldr(r2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3105 __ add(r3, r0, Operand(JSRegExpResult::kSize));
3106 __ mov(r4, Operand(factory->empty_fixed_array()));
3107 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kNativeContextOffset));
3108 __ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset));
3109 __ ldr(r2, ContextOperand(r2, Context::REGEXP_RESULT_MAP_INDEX));
3110 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
3111 __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
3112
3113 // Set input, index and length fields from arguments.
3114 __ ldr(r1, MemOperand(sp, kPointerSize * 0));
3115 __ ldr(r2, MemOperand(sp, kPointerSize * 1));
3116 __ ldr(r6, MemOperand(sp, kPointerSize * 2));
3117 __ str(r1, FieldMemOperand(r0, JSRegExpResult::kInputOffset));
3118 __ str(r2, FieldMemOperand(r0, JSRegExpResult::kIndexOffset));
3119 __ str(r6, FieldMemOperand(r0, JSArray::kLengthOffset));
3120
3121 // Fill out the elements FixedArray.
3122 // r0: JSArray, tagged.
3123 // r3: FixedArray, tagged.
3124 // r5: Number of elements in array, untagged.
3125
3126 // Set map.
3127 __ mov(r2, Operand(factory->fixed_array_map()));
3128 __ str(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
3129 // Set FixedArray length.
3130 __ SmiTag(r6, r5);
3131 __ str(r6, FieldMemOperand(r3, FixedArray::kLengthOffset));
3132 // Fill contents of fixed-array with undefined.
3133 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
3134 __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3135 // Fill fixed array elements with undefined.
3136 // r0: JSArray, tagged.
3137 // r2: undefined.
3138 // r3: Start of elements in FixedArray.
3139 // r5: Number of elements to fill.
3140 Label loop;
3141 __ cmp(r5, Operand::Zero());
3142 __ bind(&loop);
3143 __ b(le, &done); // Jump if r5 is negative or zero.
3144 __ sub(r5, r5, Operand(1), SetCC);
3145 __ str(r2, MemOperand(r3, r5, LSL, kPointerSizeLog2));
3146 __ jmp(&loop);
3147
3148 __ bind(&done);
3149 __ add(sp, sp, Operand(3 * kPointerSize));
3150 __ Ret();
3151
3152 __ bind(&slowcase);
3153 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
3154 }
3155
3156
3157 static void GenerateRecordCallTarget(MacroAssembler* masm) { 3007 static void GenerateRecordCallTarget(MacroAssembler* masm) {
3158 // Cache the called function in a global property cell. Cache states 3008 // Cache the called function in a feedback vector slot. Cache states
3159 // are uninitialized, monomorphic (indicated by a JSFunction), and 3009 // are uninitialized, monomorphic (indicated by a JSFunction), and
3160 // megamorphic. 3010 // megamorphic.
3161 // r0 : number of arguments to the construct function 3011 // r0 : number of arguments to the construct function
3162 // r1 : the function to call 3012 // r1 : the function to call
3163 // r2 : cache cell for call target 3013 // r2 : Feedback vector
3014 // r3 : slot in feedback vector (Smi)
3164 Label initialize, done, miss, megamorphic, not_array_function; 3015 Label initialize, done, miss, megamorphic, not_array_function;
3165 3016
3166 ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()), 3017 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()),
3167 masm->isolate()->heap()->undefined_value()); 3018 masm->isolate()->heap()->undefined_value());
3168 ASSERT_EQ(*TypeFeedbackCells::UninitializedSentinel(masm->isolate()), 3019 ASSERT_EQ(*TypeFeedbackInfo::UninitializedSentinel(masm->isolate()),
3169 masm->isolate()->heap()->the_hole_value()); 3020 masm->isolate()->heap()->the_hole_value());
3170 3021
3171 // Load the cache state into r3. 3022 // Load the cache state into r4.
3172 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset)); 3023 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
3024 __ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize));
3173 3025
3174 // A monomorphic cache hit or an already megamorphic state: invoke the 3026 // A monomorphic cache hit or an already megamorphic state: invoke the
3175 // function without changing the state. 3027 // function without changing the state.
3176 __ cmp(r3, r1); 3028 __ cmp(r4, r1);
3177 __ b(eq, &done); 3029 __ b(eq, &done);
3178 3030
3179 // If we came here, we need to see if we are the array function. 3031 // If we came here, we need to see if we are the array function.
3180 // If we didn't have a matching function, and we didn't find the megamorph 3032 // If we didn't have a matching function, and we didn't find the megamorph
3181 // sentinel, then we have in the cell either some other function or an 3033 // sentinel, then we have in the slot either some other function or an
3182 // AllocationSite. Do a map check on the object in ecx. 3034 // AllocationSite. Do a map check on the object in ecx.
3183 __ ldr(r5, FieldMemOperand(r3, 0)); 3035 __ ldr(r5, FieldMemOperand(r4, 0));
3184 __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex); 3036 __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
3185 __ b(ne, &miss); 3037 __ b(ne, &miss);
3186 3038
3187 // Make sure the function is the Array() function 3039 // Make sure the function is the Array() function
3188 __ LoadArrayFunction(r3); 3040 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r4);
3189 __ cmp(r1, r3); 3041 __ cmp(r1, r4);
3190 __ b(ne, &megamorphic); 3042 __ b(ne, &megamorphic);
3191 __ jmp(&done); 3043 __ jmp(&done);
3192 3044
3193 __ bind(&miss); 3045 __ bind(&miss);
3194 3046
3195 // A monomorphic miss (i.e, here the cache is not uninitialized) goes 3047 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
3196 // megamorphic. 3048 // megamorphic.
3197 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex); 3049 __ CompareRoot(r4, Heap::kTheHoleValueRootIndex);
3198 __ b(eq, &initialize); 3050 __ b(eq, &initialize);
3199 // MegamorphicSentinel is an immortal immovable object (undefined) so no 3051 // MegamorphicSentinel is an immortal immovable object (undefined) so no
3200 // write-barrier is needed. 3052 // write-barrier is needed.
3201 __ bind(&megamorphic); 3053 __ bind(&megamorphic);
3054 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
3202 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 3055 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3203 __ str(ip, FieldMemOperand(r2, Cell::kValueOffset)); 3056 __ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize));
3204 __ jmp(&done); 3057 __ jmp(&done);
3205 3058
3206 // An uninitialized cache is patched with the function or sentinel to 3059 // An uninitialized cache is patched with the function or sentinel to
3207 // indicate the ElementsKind if function is the Array constructor. 3060 // indicate the ElementsKind if function is the Array constructor.
3208 __ bind(&initialize); 3061 __ bind(&initialize);
3209 // Make sure the function is the Array() function 3062 // Make sure the function is the Array() function
3210 __ LoadArrayFunction(r3); 3063 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r4);
3211 __ cmp(r1, r3); 3064 __ cmp(r1, r4);
3212 __ b(ne, &not_array_function); 3065 __ b(ne, &not_array_function);
3213 3066
3214 // The target function is the Array constructor, 3067 // The target function is the Array constructor,
3215 // Create an AllocationSite if we don't already have it, store it in the cell 3068 // Create an AllocationSite if we don't already have it, store it in the slot.
3216 { 3069 {
3217 FrameScope scope(masm, StackFrame::INTERNAL); 3070 FrameScope scope(masm, StackFrame::INTERNAL);
3218 3071
3219 // Arguments register must be smi-tagged to call out. 3072 // Arguments register must be smi-tagged to call out.
3220 __ SmiTag(r0); 3073 __ SmiTag(r0);
3221 __ Push(r2, r1, r0); 3074 __ Push(r3, r2, r1, r0);
3222 3075
3223 CreateAllocationSiteStub create_stub; 3076 CreateAllocationSiteStub create_stub;
3224 __ CallStub(&create_stub); 3077 __ CallStub(&create_stub);
3225 3078
3226 __ Pop(r2, r1, r0); 3079 __ Pop(r3, r2, r1, r0);
3227 __ SmiUntag(r0); 3080 __ SmiUntag(r0);
3228 } 3081 }
3229 __ b(&done); 3082 __ b(&done);
3230 3083
3231 __ bind(&not_array_function); 3084 __ bind(&not_array_function);
3232 __ str(r1, FieldMemOperand(r2, Cell::kValueOffset)); 3085
3233 // No need for a write barrier here - cells are rescanned. 3086 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
3087 __ add(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3088 __ str(r1, MemOperand(r4, 0));
3089
3090 __ Push(r4, r2, r1);
3091 __ RecordWrite(r2, r4, r1, kLRHasNotBeenSaved, kDontSaveFPRegs,
3092 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
3093 __ Pop(r4, r2, r1);
3234 3094
3235 __ bind(&done); 3095 __ bind(&done);
3236 } 3096 }
3237 3097
3238 3098
3239 void CallFunctionStub::Generate(MacroAssembler* masm) { 3099 void CallFunctionStub::Generate(MacroAssembler* masm) {
3240 // r1 : the function to call 3100 // r1 : the function to call
3241 // r2 : cache cell for call target 3101 // r2 : feedback vector
3242 Label slow, non_function; 3102 // r3 : (only if r2 is not undefined) slot in feedback vector (Smi)
3103 Label slow, non_function, wrap, cont;
3243 3104
3244 // Check that the function is really a JavaScript function. 3105 if (NeedsChecks()) {
3245 // r1: pushed function (to be verified) 3106 // Check that the function is really a JavaScript function.
3246 __ JumpIfSmi(r1, &non_function); 3107 // r1: pushed function (to be verified)
3108 __ JumpIfSmi(r1, &non_function);
3247 3109
3248 // Goto slow case if we do not have a function. 3110 // Goto slow case if we do not have a function.
3249 __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE); 3111 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE);
3250 __ b(ne, &slow); 3112 __ b(ne, &slow);
3251 3113
3252 if (RecordCallTarget()) { 3114 if (RecordCallTarget()) {
3253 GenerateRecordCallTarget(masm); 3115 GenerateRecordCallTarget(masm);
3116 }
3254 } 3117 }
3255 3118
3256 // Fast-case: Invoke the function now. 3119 // Fast-case: Invoke the function now.
3257 // r1: pushed function 3120 // r1: pushed function
3258 ParameterCount actual(argc_); 3121 ParameterCount actual(argc_);
3259 3122
3123 if (CallAsMethod()) {
3124 if (NeedsChecks()) {
3125 // Do not transform the receiver for strict mode functions.
3126 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
3127 __ ldr(r4, FieldMemOperand(r3, SharedFunctionInfo::kCompilerHintsOffset));
3128 __ tst(r4, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
3129 kSmiTagSize)));
3130 __ b(ne, &cont);
3131
3132 // Do not transform the receiver for native (Compilerhints already in r3).
3133 __ tst(r4, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
3134 __ b(ne, &cont);
3135 }
3136
3137 // Compute the receiver in non-strict mode.
3138 __ ldr(r3, MemOperand(sp, argc_ * kPointerSize));
3139
3140 if (NeedsChecks()) {
3141 __ JumpIfSmi(r3, &wrap);
3142 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
3143 __ b(lt, &wrap);
3144 } else {
3145 __ jmp(&wrap);
3146 }
3147
3148 __ bind(&cont);
3149 }
3260 __ InvokeFunction(r1, actual, JUMP_FUNCTION, NullCallWrapper()); 3150 __ InvokeFunction(r1, actual, JUMP_FUNCTION, NullCallWrapper());
3261 3151
3262 // Slow-case: Non-function called. 3152 if (NeedsChecks()) {
3263 __ bind(&slow); 3153 // Slow-case: Non-function called.
3264 if (RecordCallTarget()) { 3154 __ bind(&slow);
3265 // If there is a call target cache, mark it megamorphic in the 3155 if (RecordCallTarget()) {
3266 // non-function case. MegamorphicSentinel is an immortal immovable 3156 // If there is a call target cache, mark it megamorphic in the
3267 // object (undefined) so no write barrier is needed. 3157 // non-function case. MegamorphicSentinel is an immortal immovable
3268 ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()), 3158 // object (undefined) so no write barrier is needed.
3269 masm->isolate()->heap()->undefined_value()); 3159 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()),
3270 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 3160 masm->isolate()->heap()->undefined_value());
3271 __ str(ip, FieldMemOperand(r2, Cell::kValueOffset)); 3161 __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3));
3272 } 3162 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3273 // Check for function proxy. 3163 __ str(ip, FieldMemOperand(r5, FixedArray::kHeaderSize));
3274 __ cmp(r3, Operand(JS_FUNCTION_PROXY_TYPE)); 3164 }
3275 __ b(ne, &non_function); 3165 // Check for function proxy.
3276 __ push(r1); // put proxy as additional argument 3166 __ cmp(r4, Operand(JS_FUNCTION_PROXY_TYPE));
3277 __ mov(r0, Operand(argc_ + 1, RelocInfo::NONE32)); 3167 __ b(ne, &non_function);
3278 __ mov(r2, Operand::Zero()); 3168 __ push(r1); // put proxy as additional argument
3279 __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY); 3169 __ mov(r0, Operand(argc_ + 1, RelocInfo::NONE32));
3280 { 3170 __ mov(r2, Operand::Zero());
3281 Handle<Code> adaptor = 3171 __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY);
3282 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); 3172 {
3283 __ Jump(adaptor, RelocInfo::CODE_TARGET); 3173 Handle<Code> adaptor =
3174 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
3175 __ Jump(adaptor, RelocInfo::CODE_TARGET);
3176 }
3177
3178 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
3179 // of the original receiver from the call site).
3180 __ bind(&non_function);
3181 __ str(r1, MemOperand(sp, argc_ * kPointerSize));
3182 __ mov(r0, Operand(argc_)); // Set up the number of arguments.
3183 __ mov(r2, Operand::Zero());
3184 __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION);
3185 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
3186 RelocInfo::CODE_TARGET);
3284 } 3187 }
3285 3188
3286 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead 3189 if (CallAsMethod()) {
3287 // of the original receiver from the call site). 3190 __ bind(&wrap);
3288 __ bind(&non_function); 3191 // Wrap the receiver and patch it back onto the stack.
3289 __ str(r1, MemOperand(sp, argc_ * kPointerSize)); 3192 { FrameScope frame_scope(masm, StackFrame::INTERNAL);
3290 __ mov(r0, Operand(argc_)); // Set up the number of arguments. 3193 __ Push(r1, r3);
3291 __ mov(r2, Operand::Zero()); 3194 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
3292 __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION); 3195 __ pop(r1);
3293 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 3196 }
3294 RelocInfo::CODE_TARGET); 3197 __ str(r0, MemOperand(sp, argc_ * kPointerSize));
3198 __ jmp(&cont);
3199 }
3295 } 3200 }
3296 3201
3297 3202
3298 void CallConstructStub::Generate(MacroAssembler* masm) { 3203 void CallConstructStub::Generate(MacroAssembler* masm) {
3299 // r0 : number of arguments 3204 // r0 : number of arguments
3300 // r1 : the function to call 3205 // r1 : the function to call
3301 // r2 : cache cell for call target 3206 // r2 : feedback vector
3207 // r3 : (only if r2 is not undefined) slot in feedback vector (Smi)
3302 Label slow, non_function_call; 3208 Label slow, non_function_call;
3303 3209
3304 // Check that the function is not a smi. 3210 // Check that the function is not a smi.
3305 __ JumpIfSmi(r1, &non_function_call); 3211 __ JumpIfSmi(r1, &non_function_call);
3306 // Check that the function is a JSFunction. 3212 // Check that the function is a JSFunction.
3307 __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE); 3213 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE);
3308 __ b(ne, &slow); 3214 __ b(ne, &slow);
3309 3215
3310 if (RecordCallTarget()) { 3216 if (RecordCallTarget()) {
3311 GenerateRecordCallTarget(masm); 3217 GenerateRecordCallTarget(masm);
3312 } 3218 }
3313 3219
3314 // Jump to the function-specific construct stub. 3220 // Jump to the function-specific construct stub.
3315 Register jmp_reg = r3; 3221 Register jmp_reg = r4;
3316 __ ldr(jmp_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); 3222 __ ldr(jmp_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
3317 __ ldr(jmp_reg, FieldMemOperand(jmp_reg, 3223 __ ldr(jmp_reg, FieldMemOperand(jmp_reg,
3318 SharedFunctionInfo::kConstructStubOffset)); 3224 SharedFunctionInfo::kConstructStubOffset));
3319 __ add(pc, jmp_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); 3225 __ add(pc, jmp_reg, Operand(Code::kHeaderSize - kHeapObjectTag));
3320 3226
3321 // r0: number of arguments 3227 // r0: number of arguments
3322 // r1: called object 3228 // r1: called object
3323 // r3: object type 3229 // r4: object type
3324 Label do_call; 3230 Label do_call;
3325 __ bind(&slow); 3231 __ bind(&slow);
3326 __ cmp(r3, Operand(JS_FUNCTION_PROXY_TYPE)); 3232 __ cmp(r4, Operand(JS_FUNCTION_PROXY_TYPE));
3327 __ b(ne, &non_function_call); 3233 __ b(ne, &non_function_call);
3328 __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR); 3234 __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
3329 __ jmp(&do_call); 3235 __ jmp(&do_call);
3330 3236
3331 __ bind(&non_function_call); 3237 __ bind(&non_function_call);
3332 __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); 3238 __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
3333 __ bind(&do_call); 3239 __ bind(&do_call);
3334 // Set expected number of arguments to zero (not changing r0). 3240 // Set expected number of arguments to zero (not changing r0).
3335 __ mov(r2, Operand::Zero()); 3241 __ mov(r2, Operand::Zero());
3336 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 3242 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
(...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after
3462 __ push(code_); 3368 __ push(code_);
3463 __ CallRuntime(Runtime::kCharFromCode, 1); 3369 __ CallRuntime(Runtime::kCharFromCode, 1);
3464 __ Move(result_, r0); 3370 __ Move(result_, r0);
3465 call_helper.AfterCall(masm); 3371 call_helper.AfterCall(masm);
3466 __ jmp(&exit_); 3372 __ jmp(&exit_);
3467 3373
3468 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); 3374 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
3469 } 3375 }
3470 3376
3471 3377
3472 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
3473 Register dest,
3474 Register src,
3475 Register count,
3476 Register scratch,
3477 bool ascii) {
3478 Label loop;
3479 Label done;
3480 // This loop just copies one character at a time, as it is only used for very
3481 // short strings.
3482 if (!ascii) {
3483 __ add(count, count, Operand(count), SetCC);
3484 } else {
3485 __ cmp(count, Operand::Zero());
3486 }
3487 __ b(eq, &done);
3488
3489 __ bind(&loop);
3490 __ ldrb(scratch, MemOperand(src, 1, PostIndex));
3491 // Perform sub between load and dependent store to get the load time to
3492 // complete.
3493 __ sub(count, count, Operand(1), SetCC);
3494 __ strb(scratch, MemOperand(dest, 1, PostIndex));
3495 // last iteration.
3496 __ b(gt, &loop);
3497
3498 __ bind(&done);
3499 }
3500
3501
3502 enum CopyCharactersFlags { 3378 enum CopyCharactersFlags {
3503 COPY_ASCII = 1, 3379 COPY_ASCII = 1,
3504 DEST_ALWAYS_ALIGNED = 2 3380 DEST_ALWAYS_ALIGNED = 2
3505 }; 3381 };
3506 3382
3507 3383
3508 void StringHelper::GenerateCopyCharactersLong(MacroAssembler* masm, 3384 void StringHelper::GenerateCopyCharactersLong(MacroAssembler* masm,
3509 Register dest, 3385 Register dest,
3510 Register src, 3386 Register src,
3511 Register count, 3387 Register count,
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after
3639 __ cmp(dest, Operand(limit)); 3515 __ cmp(dest, Operand(limit));
3640 __ ldrb(scratch1, MemOperand(src, 1, PostIndex), lt); 3516 __ ldrb(scratch1, MemOperand(src, 1, PostIndex), lt);
3641 __ b(ge, &done); 3517 __ b(ge, &done);
3642 __ strb(scratch1, MemOperand(dest, 1, PostIndex)); 3518 __ strb(scratch1, MemOperand(dest, 1, PostIndex));
3643 __ b(&byte_loop); 3519 __ b(&byte_loop);
3644 3520
3645 __ bind(&done); 3521 __ bind(&done);
3646 } 3522 }
3647 3523
3648 3524
3649 void StringHelper::GenerateTwoCharacterStringTableProbe(MacroAssembler* masm,
3650 Register c1,
3651 Register c2,
3652 Register scratch1,
3653 Register scratch2,
3654 Register scratch3,
3655 Register scratch4,
3656 Register scratch5,
3657 Label* not_found) {
3658 // Register scratch3 is the general scratch register in this function.
3659 Register scratch = scratch3;
3660
3661 // Make sure that both characters are not digits as such strings has a
3662 // different hash algorithm. Don't try to look for these in the string table.
3663 Label not_array_index;
3664 __ sub(scratch, c1, Operand(static_cast<int>('0')));
3665 __ cmp(scratch, Operand(static_cast<int>('9' - '0')));
3666 __ b(hi, &not_array_index);
3667 __ sub(scratch, c2, Operand(static_cast<int>('0')));
3668 __ cmp(scratch, Operand(static_cast<int>('9' - '0')));
3669
3670 // If check failed combine both characters into single halfword.
3671 // This is required by the contract of the method: code at the
3672 // not_found branch expects this combination in c1 register
3673 __ orr(c1, c1, Operand(c2, LSL, kBitsPerByte), LeaveCC, ls);
3674 __ b(ls, not_found);
3675
3676 __ bind(&not_array_index);
3677 // Calculate the two character string hash.
3678 Register hash = scratch1;
3679 StringHelper::GenerateHashInit(masm, hash, c1);
3680 StringHelper::GenerateHashAddCharacter(masm, hash, c2);
3681 StringHelper::GenerateHashGetHash(masm, hash);
3682
3683 // Collect the two characters in a register.
3684 Register chars = c1;
3685 __ orr(chars, chars, Operand(c2, LSL, kBitsPerByte));
3686
3687 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
3688 // hash: hash of two character string.
3689
3690 // Load string table
3691 // Load address of first element of the string table.
3692 Register string_table = c2;
3693 __ LoadRoot(string_table, Heap::kStringTableRootIndex);
3694
3695 Register undefined = scratch4;
3696 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
3697
3698 // Calculate capacity mask from the string table capacity.
3699 Register mask = scratch2;
3700 __ ldr(mask, FieldMemOperand(string_table, StringTable::kCapacityOffset));
3701 __ mov(mask, Operand(mask, ASR, 1));
3702 __ sub(mask, mask, Operand(1));
3703
3704 // Calculate untagged address of the first element of the string table.
3705 Register first_string_table_element = string_table;
3706 __ add(first_string_table_element, string_table,
3707 Operand(StringTable::kElementsStartOffset - kHeapObjectTag));
3708
3709 // Registers
3710 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
3711 // hash: hash of two character string
3712 // mask: capacity mask
3713 // first_string_table_element: address of the first element of
3714 // the string table
3715 // undefined: the undefined object
3716 // scratch: -
3717
3718 // Perform a number of probes in the string table.
3719 const int kProbes = 4;
3720 Label found_in_string_table;
3721 Label next_probe[kProbes];
3722 Register candidate = scratch5; // Scratch register contains candidate.
3723 for (int i = 0; i < kProbes; i++) {
3724 // Calculate entry in string table.
3725 if (i > 0) {
3726 __ add(candidate, hash, Operand(StringTable::GetProbeOffset(i)));
3727 } else {
3728 __ mov(candidate, hash);
3729 }
3730
3731 __ and_(candidate, candidate, Operand(mask));
3732
3733 // Load the entry from the symble table.
3734 STATIC_ASSERT(StringTable::kEntrySize == 1);
3735 __ ldr(candidate,
3736 MemOperand(first_string_table_element,
3737 candidate,
3738 LSL,
3739 kPointerSizeLog2));
3740
3741 // If entry is undefined no string with this hash can be found.
3742 Label is_string;
3743 __ CompareObjectType(candidate, scratch, scratch, ODDBALL_TYPE);
3744 __ b(ne, &is_string);
3745
3746 __ cmp(undefined, candidate);
3747 __ b(eq, not_found);
3748 // Must be the hole (deleted entry).
3749 if (FLAG_debug_code) {
3750 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
3751 __ cmp(ip, candidate);
3752 __ Assert(eq, kOddballInStringTableIsNotUndefinedOrTheHole);
3753 }
3754 __ jmp(&next_probe[i]);
3755
3756 __ bind(&is_string);
3757
3758 // Check that the candidate is a non-external ASCII string. The instance
3759 // type is still in the scratch register from the CompareObjectType
3760 // operation.
3761 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &next_probe[i]);
3762
3763 // If length is not 2 the string is not a candidate.
3764 __ ldr(scratch, FieldMemOperand(candidate, String::kLengthOffset));
3765 __ cmp(scratch, Operand(Smi::FromInt(2)));
3766 __ b(ne, &next_probe[i]);
3767
3768 // Check if the two characters match.
3769 // Assumes that word load is little endian.
3770 __ ldrh(scratch, FieldMemOperand(candidate, SeqOneByteString::kHeaderSize));
3771 __ cmp(chars, scratch);
3772 __ b(eq, &found_in_string_table);
3773 __ bind(&next_probe[i]);
3774 }
3775
3776 // No matching 2 character string found by probing.
3777 __ jmp(not_found);
3778
3779 // Scratch register contains result when we fall through to here.
3780 Register result = candidate;
3781 __ bind(&found_in_string_table);
3782 __ Move(r0, result);
3783 }
3784
3785
3786 void StringHelper::GenerateHashInit(MacroAssembler* masm, 3525 void StringHelper::GenerateHashInit(MacroAssembler* masm,
3787 Register hash, 3526 Register hash,
3788 Register character) { 3527 Register character) {
3789 // hash = character + (character << 10); 3528 // hash = character + (character << 10);
3790 __ LoadRoot(hash, Heap::kHashSeedRootIndex); 3529 __ LoadRoot(hash, Heap::kHashSeedRootIndex);
3791 // Untag smi seed and add the character. 3530 // Untag smi seed and add the character.
3792 __ add(hash, character, Operand(hash, LSR, kSmiTagSize)); 3531 __ add(hash, character, Operand(hash, LSR, kSmiTagSize));
3793 // hash += hash << 10; 3532 // hash += hash << 10;
3794 __ add(hash, hash, Operand(hash, LSL, 10)); 3533 __ add(hash, hash, Operand(hash, LSL, 10));
3795 // hash ^= hash >> 6; 3534 // hash ^= hash >> 6;
(...skipping 394 matching lines...) Expand 10 before | Expand all | Expand 10 after
4190 __ add(sp, sp, Operand(2 * kPointerSize)); 3929 __ add(sp, sp, Operand(2 * kPointerSize));
4191 GenerateCompareFlatAsciiStrings(masm, r1, r0, r2, r3, r4, r5); 3930 GenerateCompareFlatAsciiStrings(masm, r1, r0, r2, r3, r4, r5);
4192 3931
4193 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) 3932 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
4194 // tagged as a small integer. 3933 // tagged as a small integer.
4195 __ bind(&runtime); 3934 __ bind(&runtime);
4196 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); 3935 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
4197 } 3936 }
4198 3937
4199 3938
3939 void ArrayPushStub::Generate(MacroAssembler* masm) {
3940 Register receiver = r0;
3941 Register scratch = r1;
3942
3943 int argc = arguments_count();
3944
3945 if (argc == 0) {
3946 // Nothing to do, just return the length.
3947 __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
3948 __ Drop(argc + 1);
3949 __ Ret();
3950 return;
3951 }
3952
3953 Isolate* isolate = masm->isolate();
3954
3955 if (argc != 1) {
3956 __ TailCallExternalReference(
3957 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
3958 return;
3959 }
3960
3961 Label call_builtin, attempt_to_grow_elements, with_write_barrier;
3962
3963 Register elements = r6;
3964 Register end_elements = r5;
3965 // Get the elements array of the object.
3966 __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
3967
3968 if (IsFastSmiOrObjectElementsKind(elements_kind())) {
3969 // Check that the elements are in fast mode and writable.
3970 __ CheckMap(elements,
3971 scratch,
3972 Heap::kFixedArrayMapRootIndex,
3973 &call_builtin,
3974 DONT_DO_SMI_CHECK);
3975 }
3976
3977 // Get the array's length into scratch and calculate new length.
3978 __ ldr(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset));
3979 __ add(scratch, scratch, Operand(Smi::FromInt(argc)));
3980
3981 // Get the elements' length.
3982 __ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
3983
3984 // Check if we could survive without allocation.
3985 __ cmp(scratch, r4);
3986
3987 const int kEndElementsOffset =
3988 FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize;
3989
3990 if (IsFastSmiOrObjectElementsKind(elements_kind())) {
3991 __ b(gt, &attempt_to_grow_elements);
3992
3993 // Check if value is a smi.
3994 __ ldr(r4, MemOperand(sp, (argc - 1) * kPointerSize));
3995 __ JumpIfNotSmi(r4, &with_write_barrier);
3996
3997 // Store the value.
3998 // We may need a register containing the address end_elements below, so
3999 // write back the value in end_elements.
4000 __ add(end_elements, elements, Operand::PointerOffsetFromSmiKey(scratch));
4001 __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex));
4002 } else {
4003 // Check if we could survive without allocation.
4004 __ cmp(scratch, r4);
4005 __ b(gt, &call_builtin);
4006
4007 __ ldr(r4, MemOperand(sp, (argc - 1) * kPointerSize));
4008 __ StoreNumberToDoubleElements(r4, scratch, elements, r5, d0,
4009 &call_builtin, argc * kDoubleSize);
4010 }
4011
4012 // Save new length.
4013 __ str(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset));
4014 __ Drop(argc + 1);
4015 __ mov(r0, scratch);
4016 __ Ret();
4017
4018 if (IsFastDoubleElementsKind(elements_kind())) {
4019 __ bind(&call_builtin);
4020 __ TailCallExternalReference(
4021 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
4022 return;
4023 }
4024
4025 __ bind(&with_write_barrier);
4026
4027 if (IsFastSmiElementsKind(elements_kind())) {
4028 if (FLAG_trace_elements_transitions) __ jmp(&call_builtin);
4029
4030 __ ldr(r9, FieldMemOperand(r4, HeapObject::kMapOffset));
4031 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4032 __ cmp(r9, ip);
4033 __ b(eq, &call_builtin);
4034
4035 ElementsKind target_kind = IsHoleyElementsKind(elements_kind())
4036 ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS;
4037 __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
4038 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset));
4039 __ ldr(r3, ContextOperand(r3, Context::JS_ARRAY_MAPS_INDEX));
4040 const int header_size = FixedArrayBase::kHeaderSize;
4041 // Verify that the object can be transitioned in place.
4042 const int origin_offset = header_size + elements_kind() * kPointerSize;
4043 __ ldr(r2, FieldMemOperand(receiver, origin_offset));
4044 __ ldr(ip, FieldMemOperand(r3, HeapObject::kMapOffset));
4045 __ cmp(r2, ip);
4046 __ b(ne, &call_builtin);
4047
4048 const int target_offset = header_size + target_kind * kPointerSize;
4049 __ ldr(r3, FieldMemOperand(r3, target_offset));
4050 __ mov(r2, receiver);
4051 ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
4052 masm, DONT_TRACK_ALLOCATION_SITE, NULL);
4053 }
4054
4055 // Save new length.
4056 __ str(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset));
4057
4058 // Store the value.
4059 // We may need a register containing the address end_elements below, so write
4060 // back the value in end_elements.
4061 __ add(end_elements, elements, Operand::PointerOffsetFromSmiKey(scratch));
4062 __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex));
4063
4064 __ RecordWrite(elements,
4065 end_elements,
4066 r4,
4067 kLRHasNotBeenSaved,
4068 kDontSaveFPRegs,
4069 EMIT_REMEMBERED_SET,
4070 OMIT_SMI_CHECK);
4071 __ Drop(argc + 1);
4072 __ mov(r0, scratch);
4073 __ Ret();
4074
4075 __ bind(&attempt_to_grow_elements);
4076 // scratch: array's length + 1.
4077
4078 if (!FLAG_inline_new) {
4079 __ bind(&call_builtin);
4080 __ TailCallExternalReference(
4081 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
4082 return;
4083 }
4084
4085 __ ldr(r2, MemOperand(sp, (argc - 1) * kPointerSize));
4086 // Growing elements that are SMI-only requires special handling in case the
4087 // new element is non-Smi. For now, delegate to the builtin.
4088 if (IsFastSmiElementsKind(elements_kind())) {
4089 __ JumpIfNotSmi(r2, &call_builtin);
4090 }
4091
4092 // We could be lucky and the elements array could be at the top of new-space.
4093 // In this case we can just grow it in place by moving the allocation pointer
4094 // up.
4095 ExternalReference new_space_allocation_top =
4096 ExternalReference::new_space_allocation_top_address(isolate);
4097 ExternalReference new_space_allocation_limit =
4098 ExternalReference::new_space_allocation_limit_address(isolate);
4099
4100 const int kAllocationDelta = 4;
4101 ASSERT(kAllocationDelta >= argc);
4102 // Load top and check if it is the end of elements.
4103 __ add(end_elements, elements, Operand::PointerOffsetFromSmiKey(scratch));
4104 __ add(end_elements, end_elements, Operand(kEndElementsOffset));
4105 __ mov(r4, Operand(new_space_allocation_top));
4106 __ ldr(r3, MemOperand(r4));
4107 __ cmp(end_elements, r3);
4108 __ b(ne, &call_builtin);
4109
4110 __ mov(r9, Operand(new_space_allocation_limit));
4111 __ ldr(r9, MemOperand(r9));
4112 __ add(r3, r3, Operand(kAllocationDelta * kPointerSize));
4113 __ cmp(r3, r9);
4114 __ b(hi, &call_builtin);
4115
4116 // We fit and could grow elements.
4117 // Update new_space_allocation_top.
4118 __ str(r3, MemOperand(r4));
4119 // Push the argument.
4120 __ str(r2, MemOperand(end_elements));
4121 // Fill the rest with holes.
4122 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
4123 for (int i = 1; i < kAllocationDelta; i++) {
4124 __ str(r3, MemOperand(end_elements, i * kPointerSize));
4125 }
4126
4127 // Update elements' and array's sizes.
4128 __ str(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset));
4129 __ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
4130 __ add(r4, r4, Operand(Smi::FromInt(kAllocationDelta)));
4131 __ str(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
4132
4133 // Elements are in new space, so write barrier is not required.
4134 __ Drop(argc + 1);
4135 __ mov(r0, scratch);
4136 __ Ret();
4137
4138 __ bind(&call_builtin);
4139 __ TailCallExternalReference(
4140 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
4141 }
4142
4143
4200 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { 4144 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
4201 // ----------- S t a t e ------------- 4145 // ----------- S t a t e -------------
4202 // -- r1 : left 4146 // -- r1 : left
4203 // -- r0 : right 4147 // -- r0 : right
4204 // -- lr : return address 4148 // -- lr : return address
4205 // ----------------------------------- 4149 // -----------------------------------
4206 Isolate* isolate = masm->isolate(); 4150 Isolate* isolate = masm->isolate();
4207 4151
4208 // Load r2 with the allocation site. We stick an undefined dummy value here 4152 // Load r2 with the allocation site. We stick an undefined dummy value here
4209 // and replace it with the real allocation site later when we instantiate this 4153 // and replace it with the real allocation site later when we instantiate this
(...skipping 12 matching lines...) Expand all
4222 __ Assert(eq, kExpectedAllocationSite); 4166 __ Assert(eq, kExpectedAllocationSite);
4223 } 4167 }
4224 4168
4225 // Tail call into the stub that handles binary operations with allocation 4169 // Tail call into the stub that handles binary operations with allocation
4226 // sites. 4170 // sites.
4227 BinaryOpWithAllocationSiteStub stub(state_); 4171 BinaryOpWithAllocationSiteStub stub(state_);
4228 __ TailCallStub(&stub); 4172 __ TailCallStub(&stub);
4229 } 4173 }
4230 4174
4231 4175
4232 void StringAddStub::Generate(MacroAssembler* masm) {
4233 Label call_runtime, call_builtin;
4234 Builtins::JavaScript builtin_id = Builtins::ADD;
4235
4236 Counters* counters = masm->isolate()->counters();
4237
4238 // Stack on entry:
4239 // sp[0]: second argument (right).
4240 // sp[4]: first argument (left).
4241
4242 // Load the two arguments.
4243 __ ldr(r0, MemOperand(sp, 1 * kPointerSize)); // First argument.
4244 __ ldr(r1, MemOperand(sp, 0 * kPointerSize)); // Second argument.
4245
4246 // Make sure that both arguments are strings if not known in advance.
4247 // Otherwise, at least one of the arguments is definitely a string,
4248 // and we convert the one that is not known to be a string.
4249 if ((flags_ & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) {
4250 ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT);
4251 ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT);
4252 __ JumpIfEitherSmi(r0, r1, &call_runtime);
4253 // Load instance types.
4254 __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
4255 __ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset));
4256 __ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
4257 __ ldrb(r5, FieldMemOperand(r5, Map::kInstanceTypeOffset));
4258 STATIC_ASSERT(kStringTag == 0);
4259 // If either is not a string, go to runtime.
4260 __ tst(r4, Operand(kIsNotStringMask));
4261 __ tst(r5, Operand(kIsNotStringMask), eq);
4262 __ b(ne, &call_runtime);
4263 } else if ((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) {
4264 ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == 0);
4265 GenerateConvertArgument(
4266 masm, 1 * kPointerSize, r0, r2, r3, r4, r5, &call_builtin);
4267 builtin_id = Builtins::STRING_ADD_RIGHT;
4268 } else if ((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) {
4269 ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == 0);
4270 GenerateConvertArgument(
4271 masm, 0 * kPointerSize, r1, r2, r3, r4, r5, &call_builtin);
4272 builtin_id = Builtins::STRING_ADD_LEFT;
4273 }
4274
4275 // Both arguments are strings.
4276 // r0: first string
4277 // r1: second string
4278 // r4: first string instance type (if flags_ == NO_STRING_ADD_FLAGS)
4279 // r5: second string instance type (if flags_ == NO_STRING_ADD_FLAGS)
4280 {
4281 Label strings_not_empty;
4282 // Check if either of the strings are empty. In that case return the other.
4283 __ ldr(r2, FieldMemOperand(r0, String::kLengthOffset));
4284 __ ldr(r3, FieldMemOperand(r1, String::kLengthOffset));
4285 STATIC_ASSERT(kSmiTag == 0);
4286 __ cmp(r2, Operand(Smi::FromInt(0))); // Test if first string is empty.
4287 __ mov(r0, Operand(r1), LeaveCC, eq); // If first is empty, return second.
4288 STATIC_ASSERT(kSmiTag == 0);
4289 // Else test if second string is empty.
4290 __ cmp(r3, Operand(Smi::FromInt(0)), ne);
4291 __ b(ne, &strings_not_empty); // If either string was empty, return r0.
4292
4293 __ IncrementCounter(counters->string_add_native(), 1, r2, r3);
4294 __ add(sp, sp, Operand(2 * kPointerSize));
4295 __ Ret();
4296
4297 __ bind(&strings_not_empty);
4298 }
4299
4300 __ SmiUntag(r2);
4301 __ SmiUntag(r3);
4302 // Both strings are non-empty.
4303 // r0: first string
4304 // r1: second string
4305 // r2: length of first string
4306 // r3: length of second string
4307 // r4: first string instance type (if flags_ == NO_STRING_ADD_FLAGS)
4308 // r5: second string instance type (if flags_ == NO_STRING_ADD_FLAGS)
4309 // Look at the length of the result of adding the two strings.
4310 Label string_add_flat_result, longer_than_two;
4311 // Adding two lengths can't overflow.
4312 STATIC_ASSERT(String::kMaxLength < String::kMaxLength * 2);
4313 __ add(r6, r2, Operand(r3));
4314 // Use the string table when adding two one character strings, as it
4315 // helps later optimizations to return a string here.
4316 __ cmp(r6, Operand(2));
4317 __ b(ne, &longer_than_two);
4318
4319 // Check that both strings are non-external ASCII strings.
4320 if ((flags_ & STRING_ADD_CHECK_BOTH) != STRING_ADD_CHECK_BOTH) {
4321 __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
4322 __ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset));
4323 __ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
4324 __ ldrb(r5, FieldMemOperand(r5, Map::kInstanceTypeOffset));
4325 }
4326 __ JumpIfBothInstanceTypesAreNotSequentialAscii(r4, r5, r6, r3,
4327 &call_runtime);
4328
4329 // Get the two characters forming the sub string.
4330 __ ldrb(r2, FieldMemOperand(r0, SeqOneByteString::kHeaderSize));
4331 __ ldrb(r3, FieldMemOperand(r1, SeqOneByteString::kHeaderSize));
4332
4333 // Try to lookup two character string in string table. If it is not found
4334 // just allocate a new one.
4335 Label make_two_character_string;
4336 StringHelper::GenerateTwoCharacterStringTableProbe(
4337 masm, r2, r3, r6, r0, r4, r5, r9, &make_two_character_string);
4338 __ IncrementCounter(counters->string_add_native(), 1, r2, r3);
4339 __ add(sp, sp, Operand(2 * kPointerSize));
4340 __ Ret();
4341
4342 __ bind(&make_two_character_string);
4343 // Resulting string has length 2 and first chars of two strings
4344 // are combined into single halfword in r2 register.
4345 // So we can fill resulting string without two loops by a single
4346 // halfword store instruction (which assumes that processor is
4347 // in a little endian mode)
4348 __ mov(r6, Operand(2));
4349 __ AllocateAsciiString(r0, r6, r4, r5, r9, &call_runtime);
4350 __ strh(r2, FieldMemOperand(r0, SeqOneByteString::kHeaderSize));
4351 __ IncrementCounter(counters->string_add_native(), 1, r2, r3);
4352 __ add(sp, sp, Operand(2 * kPointerSize));
4353 __ Ret();
4354
4355 __ bind(&longer_than_two);
4356 // Check if resulting string will be flat.
4357 __ cmp(r6, Operand(ConsString::kMinLength));
4358 __ b(lt, &string_add_flat_result);
4359 // Handle exceptionally long strings in the runtime system.
4360 STATIC_ASSERT((String::kMaxLength & 0x80000000) == 0);
4361 ASSERT(IsPowerOf2(String::kMaxLength + 1));
4362 // kMaxLength + 1 is representable as shifted literal, kMaxLength is not.
4363 __ cmp(r6, Operand(String::kMaxLength + 1));
4364 __ b(hs, &call_runtime);
4365
4366 // If result is not supposed to be flat, allocate a cons string object.
4367 // If both strings are ASCII the result is an ASCII cons string.
4368 if ((flags_ & STRING_ADD_CHECK_BOTH) != STRING_ADD_CHECK_BOTH) {
4369 __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
4370 __ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset));
4371 __ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
4372 __ ldrb(r5, FieldMemOperand(r5, Map::kInstanceTypeOffset));
4373 }
4374 Label non_ascii, allocated, ascii_data;
4375 STATIC_ASSERT(kTwoByteStringTag == 0);
4376 __ tst(r4, Operand(kStringEncodingMask));
4377 __ tst(r5, Operand(kStringEncodingMask), ne);
4378 __ b(eq, &non_ascii);
4379
4380 // Allocate an ASCII cons string.
4381 __ bind(&ascii_data);
4382 __ AllocateAsciiConsString(r3, r6, r4, r5, &call_runtime);
4383 __ bind(&allocated);
4384 // Fill the fields of the cons string.
4385 Label skip_write_barrier, after_writing;
4386 ExternalReference high_promotion_mode = ExternalReference::
4387 new_space_high_promotion_mode_active_address(masm->isolate());
4388 __ mov(r4, Operand(high_promotion_mode));
4389 __ ldr(r4, MemOperand(r4, 0));
4390 __ cmp(r4, Operand::Zero());
4391 __ b(eq, &skip_write_barrier);
4392
4393 __ str(r0, FieldMemOperand(r3, ConsString::kFirstOffset));
4394 __ RecordWriteField(r3,
4395 ConsString::kFirstOffset,
4396 r0,
4397 r4,
4398 kLRHasNotBeenSaved,
4399 kDontSaveFPRegs);
4400 __ str(r1, FieldMemOperand(r3, ConsString::kSecondOffset));
4401 __ RecordWriteField(r3,
4402 ConsString::kSecondOffset,
4403 r1,
4404 r4,
4405 kLRHasNotBeenSaved,
4406 kDontSaveFPRegs);
4407 __ jmp(&after_writing);
4408
4409 __ bind(&skip_write_barrier);
4410 __ str(r0, FieldMemOperand(r3, ConsString::kFirstOffset));
4411 __ str(r1, FieldMemOperand(r3, ConsString::kSecondOffset));
4412
4413 __ bind(&after_writing);
4414
4415 __ mov(r0, Operand(r3));
4416 __ IncrementCounter(counters->string_add_native(), 1, r2, r3);
4417 __ add(sp, sp, Operand(2 * kPointerSize));
4418 __ Ret();
4419
4420 __ bind(&non_ascii);
4421 // At least one of the strings is two-byte. Check whether it happens
4422 // to contain only one byte characters.
4423 // r4: first instance type.
4424 // r5: second instance type.
4425 __ tst(r4, Operand(kOneByteDataHintMask));
4426 __ tst(r5, Operand(kOneByteDataHintMask), ne);
4427 __ b(ne, &ascii_data);
4428 __ eor(r4, r4, Operand(r5));
4429 STATIC_ASSERT(kOneByteStringTag != 0 && kOneByteDataHintTag != 0);
4430 __ and_(r4, r4, Operand(kOneByteStringTag | kOneByteDataHintTag));
4431 __ cmp(r4, Operand(kOneByteStringTag | kOneByteDataHintTag));
4432 __ b(eq, &ascii_data);
4433
4434 // Allocate a two byte cons string.
4435 __ AllocateTwoByteConsString(r3, r6, r4, r5, &call_runtime);
4436 __ jmp(&allocated);
4437
4438 // We cannot encounter sliced strings or cons strings here since:
4439 STATIC_ASSERT(SlicedString::kMinLength >= ConsString::kMinLength);
4440 // Handle creating a flat result from either external or sequential strings.
4441 // Locate the first characters' locations.
4442 // r0: first string
4443 // r1: second string
4444 // r2: length of first string
4445 // r3: length of second string
4446 // r4: first string instance type (if flags_ == NO_STRING_ADD_FLAGS)
4447 // r5: second string instance type (if flags_ == NO_STRING_ADD_FLAGS)
4448 // r6: sum of lengths.
4449 Label first_prepared, second_prepared;
4450 __ bind(&string_add_flat_result);
4451 if ((flags_ & STRING_ADD_CHECK_BOTH) != STRING_ADD_CHECK_BOTH) {
4452 __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
4453 __ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset));
4454 __ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
4455 __ ldrb(r5, FieldMemOperand(r5, Map::kInstanceTypeOffset));
4456 }
4457
4458 // Check whether both strings have same encoding
4459 __ eor(ip, r4, Operand(r5));
4460 ASSERT(__ ImmediateFitsAddrMode1Instruction(kStringEncodingMask));
4461 __ tst(ip, Operand(kStringEncodingMask));
4462 __ b(ne, &call_runtime);
4463
4464 STATIC_ASSERT(kSeqStringTag == 0);
4465 __ tst(r4, Operand(kStringRepresentationMask));
4466 STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
4467 __ add(r6,
4468 r0,
4469 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag),
4470 LeaveCC,
4471 eq);
4472 __ b(eq, &first_prepared);
4473 // External string: rule out short external string and load string resource.
4474 STATIC_ASSERT(kShortExternalStringTag != 0);
4475 __ tst(r4, Operand(kShortExternalStringMask));
4476 __ b(ne, &call_runtime);
4477 __ ldr(r6, FieldMemOperand(r0, ExternalString::kResourceDataOffset));
4478 __ bind(&first_prepared);
4479
4480 STATIC_ASSERT(kSeqStringTag == 0);
4481 __ tst(r5, Operand(kStringRepresentationMask));
4482 STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
4483 __ add(r1,
4484 r1,
4485 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag),
4486 LeaveCC,
4487 eq);
4488 __ b(eq, &second_prepared);
4489 // External string: rule out short external string and load string resource.
4490 STATIC_ASSERT(kShortExternalStringTag != 0);
4491 __ tst(r5, Operand(kShortExternalStringMask));
4492 __ b(ne, &call_runtime);
4493 __ ldr(r1, FieldMemOperand(r1, ExternalString::kResourceDataOffset));
4494 __ bind(&second_prepared);
4495
4496 Label non_ascii_string_add_flat_result;
4497 // r6: first character of first string
4498 // r1: first character of second string
4499 // r2: length of first string.
4500 // r3: length of second string.
4501 // Both strings have the same encoding.
4502 STATIC_ASSERT(kTwoByteStringTag == 0);
4503 __ tst(r5, Operand(kStringEncodingMask));
4504 __ b(eq, &non_ascii_string_add_flat_result);
4505
4506 __ add(r2, r2, Operand(r3));
4507 __ AllocateAsciiString(r0, r2, r4, r5, r9, &call_runtime);
4508 __ sub(r2, r2, Operand(r3));
4509 __ add(r5, r0, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4510 // r0: result string.
4511 // r6: first character of first string.
4512 // r1: first character of second string.
4513 // r2: length of first string.
4514 // r3: length of second string.
4515 // r5: first character of result.
4516 StringHelper::GenerateCopyCharacters(masm, r5, r6, r2, r4, true);
4517 // r5: next character of result.
4518 StringHelper::GenerateCopyCharacters(masm, r5, r1, r3, r4, true);
4519 __ IncrementCounter(counters->string_add_native(), 1, r2, r3);
4520 __ add(sp, sp, Operand(2 * kPointerSize));
4521 __ Ret();
4522
4523 __ bind(&non_ascii_string_add_flat_result);
4524 __ add(r2, r2, Operand(r3));
4525 __ AllocateTwoByteString(r0, r2, r4, r5, r9, &call_runtime);
4526 __ sub(r2, r2, Operand(r3));
4527 __ add(r5, r0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
4528 // r0: result string.
4529 // r6: first character of first string.
4530 // r1: first character of second string.
4531 // r2: length of first string.
4532 // r3: length of second string.
4533 // r5: first character of result.
4534 StringHelper::GenerateCopyCharacters(masm, r5, r6, r2, r4, false);
4535 // r5: next character of result.
4536 StringHelper::GenerateCopyCharacters(masm, r5, r1, r3, r4, false);
4537 __ IncrementCounter(counters->string_add_native(), 1, r2, r3);
4538 __ add(sp, sp, Operand(2 * kPointerSize));
4539 __ Ret();
4540
4541 // Just jump to runtime to add the two strings.
4542 __ bind(&call_runtime);
4543 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
4544
4545 if (call_builtin.is_linked()) {
4546 __ bind(&call_builtin);
4547 __ InvokeBuiltin(builtin_id, JUMP_FUNCTION);
4548 }
4549 }
4550
4551
4552 void StringAddStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
4553 __ push(r0);
4554 __ push(r1);
4555 }
4556
4557
4558 void StringAddStub::GenerateRegisterArgsPop(MacroAssembler* masm) {
4559 __ pop(r1);
4560 __ pop(r0);
4561 }
4562
4563
4564 void StringAddStub::GenerateConvertArgument(MacroAssembler* masm,
4565 int stack_offset,
4566 Register arg,
4567 Register scratch1,
4568 Register scratch2,
4569 Register scratch3,
4570 Register scratch4,
4571 Label* slow) {
4572 // First check if the argument is already a string.
4573 Label not_string, done;
4574 __ JumpIfSmi(arg, &not_string);
4575 __ CompareObjectType(arg, scratch1, scratch1, FIRST_NONSTRING_TYPE);
4576 __ b(lt, &done);
4577
4578 // Check the number to string cache.
4579 __ bind(&not_string);
4580 // Puts the cached result into scratch1.
4581 __ LookupNumberStringCache(arg, scratch1, scratch2, scratch3, scratch4, slow);
4582 __ mov(arg, scratch1);
4583 __ str(arg, MemOperand(sp, stack_offset));
4584 __ bind(&done);
4585 }
4586
4587
4588 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { 4176 void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
4589 ASSERT(state_ == CompareIC::SMI); 4177 ASSERT(state_ == CompareIC::SMI);
4590 Label miss; 4178 Label miss;
4591 __ orr(r2, r1, r0); 4179 __ orr(r2, r1, r0);
4592 __ JumpIfNotSmi(r2, &miss); 4180 __ JumpIfNotSmi(r2, &miss);
4593 4181
4594 if (GetCondition() == eq) { 4182 if (GetCondition() == eq) {
4595 // For equality we do not care about the sign of the result. 4183 // For equality we do not care about the sign of the result.
4596 __ sub(r0, r0, r1, SetCC); 4184 __ sub(r0, r0, r1, SetCC);
4597 } else { 4185 } else {
(...skipping 858 matching lines...) Expand 10 before | Expand all | Expand 10 after
5456 if (function_mode_ == JS_FUNCTION_STUB_MODE) { 5044 if (function_mode_ == JS_FUNCTION_STUB_MODE) {
5457 __ add(r1, r1, Operand(1)); 5045 __ add(r1, r1, Operand(1));
5458 } 5046 }
5459 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); 5047 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
5460 __ mov(r1, Operand(r1, LSL, kPointerSizeLog2)); 5048 __ mov(r1, Operand(r1, LSL, kPointerSizeLog2));
5461 __ add(sp, sp, r1); 5049 __ add(sp, sp, r1);
5462 __ Ret(); 5050 __ Ret();
5463 } 5051 }
5464 5052
5465 5053
5466 void StubFailureTailCallTrampolineStub::Generate(MacroAssembler* masm) {
5467 CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
5468 __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
5469 __ mov(r1, r0);
5470 int parameter_count_offset =
5471 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
5472 __ ldr(r0, MemOperand(fp, parameter_count_offset));
5473 // The parameter count above includes the receiver for the arguments passed to
5474 // the deoptimization handler. Subtract the receiver for the parameter count
5475 // for the call.
5476 __ sub(r0, r0, Operand(1));
5477 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
5478 ParameterCount argument_count(r0);
5479 __ InvokeFunction(r1, argument_count, JUMP_FUNCTION, NullCallWrapper());
5480 }
5481
5482
5483 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { 5054 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
5484 if (masm->isolate()->function_entry_hook() != NULL) { 5055 if (masm->isolate()->function_entry_hook() != NULL) {
5485 PredictableCodeSizeScope predictable(masm, 4 * Assembler::kInstrSize); 5056 PredictableCodeSizeScope predictable(masm, 4 * Assembler::kInstrSize);
5486 ProfileEntryHookStub stub; 5057 ProfileEntryHookStub stub;
5487 __ push(lr); 5058 __ push(lr);
5488 __ CallStub(&stub); 5059 __ CallStub(&stub);
5489 __ pop(lr); 5060 __ pop(lr);
5490 } 5061 }
5491 } 5062 }
5492 5063
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
5573 // If we reached this point there is a problem. 5144 // If we reached this point there is a problem.
5574 __ Abort(kUnexpectedElementsKindInArrayConstructor); 5145 __ Abort(kUnexpectedElementsKindInArrayConstructor);
5575 } else { 5146 } else {
5576 UNREACHABLE(); 5147 UNREACHABLE();
5577 } 5148 }
5578 } 5149 }
5579 5150
5580 5151
5581 static void CreateArrayDispatchOneArgument(MacroAssembler* masm, 5152 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
5582 AllocationSiteOverrideMode mode) { 5153 AllocationSiteOverrideMode mode) {
5583 // r2 - type info cell (if mode != DISABLE_ALLOCATION_SITES) 5154 // r2 - allocation site (if mode != DISABLE_ALLOCATION_SITES)
5584 // r3 - kind (if mode != DISABLE_ALLOCATION_SITES) 5155 // r3 - kind (if mode != DISABLE_ALLOCATION_SITES)
5585 // r0 - number of arguments 5156 // r0 - number of arguments
5586 // r1 - constructor? 5157 // r1 - constructor?
5587 // sp[0] - last argument 5158 // sp[0] - last argument
5588 Label normal_sequence; 5159 Label normal_sequence;
5589 if (mode == DONT_OVERRIDE) { 5160 if (mode == DONT_OVERRIDE) {
5590 ASSERT(FAST_SMI_ELEMENTS == 0); 5161 ASSERT(FAST_SMI_ELEMENTS == 0);
5591 ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); 5162 ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
5592 ASSERT(FAST_ELEMENTS == 2); 5163 ASSERT(FAST_ELEMENTS == 2);
5593 ASSERT(FAST_HOLEY_ELEMENTS == 3); 5164 ASSERT(FAST_HOLEY_ELEMENTS == 3);
(...skipping 17 matching lines...) Expand all
5611 ArraySingleArgumentConstructorStub stub_holey(holey_initial, 5182 ArraySingleArgumentConstructorStub stub_holey(holey_initial,
5612 DISABLE_ALLOCATION_SITES); 5183 DISABLE_ALLOCATION_SITES);
5613 __ TailCallStub(&stub_holey); 5184 __ TailCallStub(&stub_holey);
5614 5185
5615 __ bind(&normal_sequence); 5186 __ bind(&normal_sequence);
5616 ArraySingleArgumentConstructorStub stub(initial, 5187 ArraySingleArgumentConstructorStub stub(initial,
5617 DISABLE_ALLOCATION_SITES); 5188 DISABLE_ALLOCATION_SITES);
5618 __ TailCallStub(&stub); 5189 __ TailCallStub(&stub);
5619 } else if (mode == DONT_OVERRIDE) { 5190 } else if (mode == DONT_OVERRIDE) {
5620 // We are going to create a holey array, but our kind is non-holey. 5191 // We are going to create a holey array, but our kind is non-holey.
5621 // Fix kind and retry (only if we have an allocation site in the cell). 5192 // Fix kind and retry (only if we have an allocation site in the slot).
5622 __ add(r3, r3, Operand(1)); 5193 __ add(r3, r3, Operand(1));
5623 __ ldr(r5, FieldMemOperand(r2, Cell::kValueOffset));
5624 5194
5625 if (FLAG_debug_code) { 5195 if (FLAG_debug_code) {
5626 __ ldr(r5, FieldMemOperand(r5, 0)); 5196 __ ldr(r5, FieldMemOperand(r2, 0));
5627 __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex); 5197 __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
5628 __ Assert(eq, kExpectedAllocationSiteInCell); 5198 __ Assert(eq, kExpectedAllocationSite);
5629 __ ldr(r5, FieldMemOperand(r2, Cell::kValueOffset));
5630 } 5199 }
5631 5200
5632 // Save the resulting elements kind in type info. We can't just store r3 5201 // Save the resulting elements kind in type info. We can't just store r3
5633 // in the AllocationSite::transition_info field because elements kind is 5202 // in the AllocationSite::transition_info field because elements kind is
5634 // restricted to a portion of the field...upper bits need to be left alone. 5203 // restricted to a portion of the field...upper bits need to be left alone.
5635 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); 5204 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
5636 __ ldr(r4, FieldMemOperand(r5, AllocationSite::kTransitionInfoOffset)); 5205 __ ldr(r4, FieldMemOperand(r2, AllocationSite::kTransitionInfoOffset));
5637 __ add(r4, r4, Operand(Smi::FromInt(kFastElementsKindPackedToHoley))); 5206 __ add(r4, r4, Operand(Smi::FromInt(kFastElementsKindPackedToHoley)));
5638 __ str(r4, FieldMemOperand(r5, AllocationSite::kTransitionInfoOffset)); 5207 __ str(r4, FieldMemOperand(r2, AllocationSite::kTransitionInfoOffset));
5639 5208
5640 __ bind(&normal_sequence); 5209 __ bind(&normal_sequence);
5641 int last_index = GetSequenceIndexFromFastElementsKind( 5210 int last_index = GetSequenceIndexFromFastElementsKind(
5642 TERMINAL_FAST_ELEMENTS_KIND); 5211 TERMINAL_FAST_ELEMENTS_KIND);
5643 for (int i = 0; i <= last_index; ++i) { 5212 for (int i = 0; i <= last_index; ++i) {
5644 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 5213 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
5645 __ cmp(r3, Operand(kind)); 5214 __ cmp(r3, Operand(kind));
5646 ArraySingleArgumentConstructorStub stub(kind); 5215 ArraySingleArgumentConstructorStub stub(kind);
5647 __ TailCallStub(&stub, eq); 5216 __ TailCallStub(&stub, eq);
5648 } 5217 }
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
5721 } else { 5290 } else {
5722 UNREACHABLE(); 5291 UNREACHABLE();
5723 } 5292 }
5724 } 5293 }
5725 5294
5726 5295
5727 void ArrayConstructorStub::Generate(MacroAssembler* masm) { 5296 void ArrayConstructorStub::Generate(MacroAssembler* masm) {
5728 // ----------- S t a t e ------------- 5297 // ----------- S t a t e -------------
5729 // -- r0 : argc (only if argument_count_ == ANY) 5298 // -- r0 : argc (only if argument_count_ == ANY)
5730 // -- r1 : constructor 5299 // -- r1 : constructor
5731 // -- r2 : type info cell 5300 // -- r2 : feedback vector (fixed array or undefined)
5301 // -- r3 : slot index (if r2 is fixed array)
5732 // -- sp[0] : return address 5302 // -- sp[0] : return address
5733 // -- sp[4] : last argument 5303 // -- sp[4] : last argument
5734 // ----------------------------------- 5304 // -----------------------------------
5735 if (FLAG_debug_code) { 5305 if (FLAG_debug_code) {
5736 // The array construct code is only set for the global and natives 5306 // The array construct code is only set for the global and natives
5737 // builtin Array functions which always have maps. 5307 // builtin Array functions which always have maps.
5738 5308
5739 // Initial map for the builtin Array function should be a map. 5309 // Initial map for the builtin Array function should be a map.
5740 __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); 5310 __ ldr(r4, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
5741 // Will both indicate a NULL and a Smi. 5311 // Will both indicate a NULL and a Smi.
5742 __ tst(r3, Operand(kSmiTagMask)); 5312 __ tst(r4, Operand(kSmiTagMask));
5743 __ Assert(ne, kUnexpectedInitialMapForArrayFunction); 5313 __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
5744 __ CompareObjectType(r3, r3, r4, MAP_TYPE); 5314 __ CompareObjectType(r4, r4, r5, MAP_TYPE);
5745 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); 5315 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
5746 5316
5747 // We should either have undefined in ebx or a valid cell 5317 // We should either have undefined in ebx or a valid fixed array.
5748 Label okay_here; 5318 Label okay_here;
5749 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); 5319 Handle<Map> fixed_array_map = masm->isolate()->factory()->fixed_array_map();
5750 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); 5320 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
5751 __ b(eq, &okay_here); 5321 __ b(eq, &okay_here);
5752 __ ldr(r3, FieldMemOperand(r2, 0)); 5322 __ ldr(r4, FieldMemOperand(r2, 0));
5753 __ cmp(r3, Operand(cell_map)); 5323 __ cmp(r4, Operand(fixed_array_map));
5754 __ Assert(eq, kExpectedPropertyCellInRegisterEbx); 5324 __ Assert(eq, kExpectedFixedArrayInRegisterR2);
5325
5326 // r3 should be a smi if we don't have undefined in r2
5327 __ AssertSmi(r3);
5328
5755 __ bind(&okay_here); 5329 __ bind(&okay_here);
5756 } 5330 }
5757 5331
5758 Label no_info; 5332 Label no_info;
5759 // Get the elements kind and case on that. 5333 // Get the elements kind and case on that.
5760 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); 5334 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
5761 __ b(eq, &no_info); 5335 __ b(eq, &no_info);
5762 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset)); 5336 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3));
5337 __ ldr(r2, FieldMemOperand(r2, FixedArray::kHeaderSize));
5763 5338
5764 // If the type cell is undefined, or contains anything other than an 5339 // If the feedback vector is undefined, or contains anything other than an
5765 // AllocationSite, call an array constructor that doesn't use AllocationSites. 5340 // AllocationSite, call an array constructor that doesn't use AllocationSites.
5766 __ ldr(r4, FieldMemOperand(r3, 0)); 5341 __ ldr(r4, FieldMemOperand(r2, 0));
5767 __ CompareRoot(r4, Heap::kAllocationSiteMapRootIndex); 5342 __ CompareRoot(r4, Heap::kAllocationSiteMapRootIndex);
5768 __ b(ne, &no_info); 5343 __ b(ne, &no_info);
5769 5344
5770 __ ldr(r3, FieldMemOperand(r3, AllocationSite::kTransitionInfoOffset)); 5345 __ ldr(r3, FieldMemOperand(r2, AllocationSite::kTransitionInfoOffset));
5771 __ SmiUntag(r3); 5346 __ SmiUntag(r3);
5772 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); 5347 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
5773 __ and_(r3, r3, Operand(AllocationSite::ElementsKindBits::kMask)); 5348 __ and_(r3, r3, Operand(AllocationSite::ElementsKindBits::kMask));
5774 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); 5349 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
5775 5350
5776 __ bind(&no_info); 5351 __ bind(&no_info);
5777 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES); 5352 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
5778 } 5353 }
5779 5354
5780 5355
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
5846 Label fast_elements_case; 5421 Label fast_elements_case;
5847 __ cmp(r3, Operand(FAST_ELEMENTS)); 5422 __ cmp(r3, Operand(FAST_ELEMENTS));
5848 __ b(eq, &fast_elements_case); 5423 __ b(eq, &fast_elements_case);
5849 GenerateCase(masm, FAST_HOLEY_ELEMENTS); 5424 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
5850 5425
5851 __ bind(&fast_elements_case); 5426 __ bind(&fast_elements_case);
5852 GenerateCase(masm, FAST_ELEMENTS); 5427 GenerateCase(masm, FAST_ELEMENTS);
5853 } 5428 }
5854 5429
5855 5430
5431 void CallApiFunctionStub::Generate(MacroAssembler* masm) {
5432 // ----------- S t a t e -------------
5433 // -- r0 : callee
5434 // -- r4 : call_data
5435 // -- r2 : holder
5436 // -- r1 : api_function_address
5437 // -- cp : context
5438 // --
5439 // -- sp[0] : last argument
5440 // -- ...
5441 // -- sp[(argc - 1)* 4] : first argument
5442 // -- sp[argc * 4] : receiver
5443 // -----------------------------------
5444
5445 Register callee = r0;
5446 Register call_data = r4;
5447 Register holder = r2;
5448 Register api_function_address = r1;
5449 Register context = cp;
5450
5451 int argc = ArgumentBits::decode(bit_field_);
5452 bool is_store = IsStoreBits::decode(bit_field_);
5453 bool call_data_undefined = CallDataUndefinedBits::decode(bit_field_);
5454
5455 typedef FunctionCallbackArguments FCA;
5456
5457 STATIC_ASSERT(FCA::kContextSaveIndex == 6);
5458 STATIC_ASSERT(FCA::kCalleeIndex == 5);
5459 STATIC_ASSERT(FCA::kDataIndex == 4);
5460 STATIC_ASSERT(FCA::kReturnValueOffset == 3);
5461 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
5462 STATIC_ASSERT(FCA::kIsolateIndex == 1);
5463 STATIC_ASSERT(FCA::kHolderIndex == 0);
5464 STATIC_ASSERT(FCA::kArgsLength == 7);
5465
5466 Isolate* isolate = masm->isolate();
5467
5468 // context save
5469 __ push(context);
5470 // load context from callee
5471 __ ldr(context, FieldMemOperand(callee, JSFunction::kContextOffset));
5472
5473 // callee
5474 __ push(callee);
5475
5476 // call data
5477 __ push(call_data);
5478
5479 Register scratch = call_data;
5480 if (!call_data_undefined) {
5481 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
5482 }
5483 // return value
5484 __ push(scratch);
5485 // return value default
5486 __ push(scratch);
5487 // isolate
5488 __ mov(scratch,
5489 Operand(ExternalReference::isolate_address(isolate)));
5490 __ push(scratch);
5491 // holder
5492 __ push(holder);
5493
5494 // Prepare arguments.
5495 __ mov(scratch, sp);
5496
5497 // Allocate the v8::Arguments structure in the arguments' space since
5498 // it's not controlled by GC.
5499 const int kApiStackSpace = 4;
5500
5501 FrameScope frame_scope(masm, StackFrame::MANUAL);
5502 __ EnterExitFrame(false, kApiStackSpace);
5503
5504 ASSERT(!api_function_address.is(r0) && !scratch.is(r0));
5505 // r0 = FunctionCallbackInfo&
5506 // Arguments is after the return address.
5507 __ add(r0, sp, Operand(1 * kPointerSize));
5508 // FunctionCallbackInfo::implicit_args_
5509 __ str(scratch, MemOperand(r0, 0 * kPointerSize));
5510 // FunctionCallbackInfo::values_
5511 __ add(ip, scratch, Operand((FCA::kArgsLength - 1 + argc) * kPointerSize));
5512 __ str(ip, MemOperand(r0, 1 * kPointerSize));
5513 // FunctionCallbackInfo::length_ = argc
5514 __ mov(ip, Operand(argc));
5515 __ str(ip, MemOperand(r0, 2 * kPointerSize));
5516 // FunctionCallbackInfo::is_construct_call = 0
5517 __ mov(ip, Operand::Zero());
5518 __ str(ip, MemOperand(r0, 3 * kPointerSize));
5519
5520 const int kStackUnwindSpace = argc + FCA::kArgsLength + 1;
5521 Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback);
5522 ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL;
5523 ApiFunction thunk_fun(thunk_address);
5524 ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
5525 masm->isolate());
5526
5527 AllowExternalCallThatCantCauseGC scope(masm);
5528 MemOperand context_restore_operand(
5529 fp, (2 + FCA::kContextSaveIndex) * kPointerSize);
5530 // Stores return the first js argument
5531 int return_value_offset = 0;
5532 if (is_store) {
5533 return_value_offset = 2 + FCA::kArgsLength;
5534 } else {
5535 return_value_offset = 2 + FCA::kReturnValueOffset;
5536 }
5537 MemOperand return_value_operand(fp, return_value_offset * kPointerSize);
5538
5539 __ CallApiFunctionAndReturn(api_function_address,
5540 thunk_ref,
5541 kStackUnwindSpace,
5542 return_value_operand,
5543 &context_restore_operand);
5544 }
5545
5546
5547 void CallApiGetterStub::Generate(MacroAssembler* masm) {
5548 // ----------- S t a t e -------------
5549 // -- sp[0] : name
5550 // -- sp[4 - kArgsLength*4] : PropertyCallbackArguments object
5551 // -- ...
5552 // -- r2 : api_function_address
5553 // -----------------------------------
5554
5555 Register api_function_address = r2;
5556
5557 __ mov(r0, sp); // r0 = Handle<Name>
5558 __ add(r1, r0, Operand(1 * kPointerSize)); // r1 = PCA
5559
5560 const int kApiStackSpace = 1;
5561 FrameScope frame_scope(masm, StackFrame::MANUAL);
5562 __ EnterExitFrame(false, kApiStackSpace);
5563
5564 // Create PropertyAccessorInfo instance on the stack above the exit frame with
5565 // r1 (internal::Object** args_) as the data.
5566 __ str(r1, MemOperand(sp, 1 * kPointerSize));
5567 __ add(r1, sp, Operand(1 * kPointerSize)); // r1 = AccessorInfo&
5568
5569 const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
5570
5571 Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback);
5572 ExternalReference::Type thunk_type =
5573 ExternalReference::PROFILING_GETTER_CALL;
5574 ApiFunction thunk_fun(thunk_address);
5575 ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
5576 masm->isolate());
5577 __ CallApiFunctionAndReturn(api_function_address,
5578 thunk_ref,
5579 kStackUnwindSpace,
5580 MemOperand(fp, 6 * kPointerSize),
5581 NULL);
5582 }
5583
5584
5856 #undef __ 5585 #undef __
5857 5586
5858 } } // namespace v8::internal 5587 } } // namespace v8::internal
5859 5588
5860 #endif // V8_TARGET_ARCH_ARM 5589 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/code-stubs-arm.h ('k') | src/arm/codegen-arm.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698