Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(182)

Side by Side Diff: src/arm/code-stubs-arm.cc

Issue 16453002: Removed flag optimize-constructed-arrays. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Comment fixes Created 7 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/builtins-arm.cc ('k') | src/arm/lithium-codegen-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 2978 matching lines...) Expand 10 before | Expand all | Expand 10 after
2989 result_size_ == 1; 2989 result_size_ == 1;
2990 } 2990 }
2991 2991
2992 2992
2993 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { 2993 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
2994 CEntryStub::GenerateAheadOfTime(isolate); 2994 CEntryStub::GenerateAheadOfTime(isolate);
2995 WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(isolate); 2995 WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(isolate);
2996 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); 2996 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
2997 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); 2997 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
2998 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate); 2998 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
2999 if (FLAG_optimize_constructed_arrays) { 2999 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
3000 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
3001 }
3002 } 3000 }
3003 3001
3004 3002
3005 void CodeStub::GenerateFPStubs(Isolate* isolate) { 3003 void CodeStub::GenerateFPStubs(Isolate* isolate) {
3006 SaveFPRegsMode mode = kSaveFPRegs; 3004 SaveFPRegsMode mode = kSaveFPRegs;
3007 CEntryStub save_doubles(1, mode); 3005 CEntryStub save_doubles(1, mode);
3008 StoreBufferOverflowStub stub(mode); 3006 StoreBufferOverflowStub stub(mode);
3009 // These stubs might already be in the snapshot, detect that and don't 3007 // These stubs might already be in the snapshot, detect that and don't
3010 // regenerate, which would lead to code stub initialization state being messed 3008 // regenerate, which would lead to code stub initialization state being messed
3011 // up. 3009 // up.
(...skipping 1604 matching lines...) Expand 10 before | Expand all | Expand 10 after
4616 4614
4617 __ bind(&done); 4615 __ bind(&done);
4618 __ add(sp, sp, Operand(3 * kPointerSize)); 4616 __ add(sp, sp, Operand(3 * kPointerSize));
4619 __ Ret(); 4617 __ Ret();
4620 4618
4621 __ bind(&slowcase); 4619 __ bind(&slowcase);
4622 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1); 4620 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
4623 } 4621 }
4624 4622
4625 4623
4626 static void GenerateRecordCallTargetNoArray(MacroAssembler* masm) {
4627 // Cache the called function in a global property cell. Cache states
4628 // are uninitialized, monomorphic (indicated by a JSFunction), and
4629 // megamorphic.
4630 // r1 : the function to call
4631 // r2 : cache cell for call target
4632 Label done;
4633
4634 ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()),
4635 masm->isolate()->heap()->undefined_value());
4636 ASSERT_EQ(*TypeFeedbackCells::UninitializedSentinel(masm->isolate()),
4637 masm->isolate()->heap()->the_hole_value());
4638
4639 // Load the cache state into r3.
4640 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset));
4641
4642 // A monomorphic cache hit or an already megamorphic state: invoke the
4643 // function without changing the state.
4644 __ cmp(r3, r1);
4645 __ b(eq, &done);
4646 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
4647 __ b(eq, &done);
4648
4649 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
4650 // megamorphic.
4651 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
4652 // MegamorphicSentinel is an immortal immovable object (undefined) so no
4653 // write-barrier is needed.
4654 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex, ne);
4655 __ str(ip, FieldMemOperand(r2, Cell::kValueOffset), ne);
4656
4657 // An uninitialized cache is patched with the function.
4658 __ str(r1, FieldMemOperand(r2, Cell::kValueOffset), eq);
4659 // No need for a write barrier here - cells are rescanned.
4660
4661 __ bind(&done);
4662 }
4663
4664
4665 static void GenerateRecordCallTarget(MacroAssembler* masm) { 4624 static void GenerateRecordCallTarget(MacroAssembler* masm) {
4666 // Cache the called function in a global property cell. Cache states 4625 // Cache the called function in a global property cell. Cache states
4667 // are uninitialized, monomorphic (indicated by a JSFunction), and 4626 // are uninitialized, monomorphic (indicated by a JSFunction), and
4668 // megamorphic. 4627 // megamorphic.
4669 // r1 : the function to call 4628 // r1 : the function to call
4670 // r2 : cache cell for call target 4629 // r2 : cache cell for call target
4671 ASSERT(FLAG_optimize_constructed_arrays);
4672 Label initialize, done, miss, megamorphic, not_array_function; 4630 Label initialize, done, miss, megamorphic, not_array_function;
4673 4631
4674 ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()), 4632 ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()),
4675 masm->isolate()->heap()->undefined_value()); 4633 masm->isolate()->heap()->undefined_value());
4676 ASSERT_EQ(*TypeFeedbackCells::UninitializedSentinel(masm->isolate()), 4634 ASSERT_EQ(*TypeFeedbackCells::UninitializedSentinel(masm->isolate()),
4677 masm->isolate()->heap()->the_hole_value()); 4635 masm->isolate()->heap()->the_hole_value());
4678 4636
4679 // Load the cache state into r3. 4637 // Load the cache state into r3.
4680 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset)); 4638 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset));
4681 4639
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after
4765 } 4723 }
4766 4724
4767 // Check that the function is really a JavaScript function. 4725 // Check that the function is really a JavaScript function.
4768 // r1: pushed function (to be verified) 4726 // r1: pushed function (to be verified)
4769 __ JumpIfSmi(r1, &non_function); 4727 __ JumpIfSmi(r1, &non_function);
4770 // Get the map of the function object. 4728 // Get the map of the function object.
4771 __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE); 4729 __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
4772 __ b(ne, &slow); 4730 __ b(ne, &slow);
4773 4731
4774 if (RecordCallTarget()) { 4732 if (RecordCallTarget()) {
4775 if (FLAG_optimize_constructed_arrays) { 4733 GenerateRecordCallTarget(masm);
4776 GenerateRecordCallTarget(masm);
4777 } else {
4778 GenerateRecordCallTargetNoArray(masm);
4779 }
4780 } 4734 }
4781 4735
4782 // Fast-case: Invoke the function now. 4736 // Fast-case: Invoke the function now.
4783 // r1: pushed function 4737 // r1: pushed function
4784 ParameterCount actual(argc_); 4738 ParameterCount actual(argc_);
4785 4739
4786 if (ReceiverMightBeImplicit()) { 4740 if (ReceiverMightBeImplicit()) {
4787 Label call_as_function; 4741 Label call_as_function;
4788 __ CompareRoot(r4, Heap::kTheHoleValueRootIndex); 4742 __ CompareRoot(r4, Heap::kTheHoleValueRootIndex);
4789 __ b(eq, &call_as_function); 4743 __ b(eq, &call_as_function);
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
4844 // r2 : cache cell for call target 4798 // r2 : cache cell for call target
4845 Label slow, non_function_call; 4799 Label slow, non_function_call;
4846 4800
4847 // Check that the function is not a smi. 4801 // Check that the function is not a smi.
4848 __ JumpIfSmi(r1, &non_function_call); 4802 __ JumpIfSmi(r1, &non_function_call);
4849 // Check that the function is a JSFunction. 4803 // Check that the function is a JSFunction.
4850 __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE); 4804 __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
4851 __ b(ne, &slow); 4805 __ b(ne, &slow);
4852 4806
4853 if (RecordCallTarget()) { 4807 if (RecordCallTarget()) {
4854 if (FLAG_optimize_constructed_arrays) { 4808 GenerateRecordCallTarget(masm);
4855 GenerateRecordCallTarget(masm);
4856 } else {
4857 GenerateRecordCallTargetNoArray(masm);
4858 }
4859 } 4809 }
4860 4810
4861 // Jump to the function-specific construct stub. 4811 // Jump to the function-specific construct stub.
4862 Register jmp_reg = FLAG_optimize_constructed_arrays ? r3 : r2; 4812 Register jmp_reg = r3;
4863 __ ldr(jmp_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); 4813 __ ldr(jmp_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
4864 __ ldr(jmp_reg, FieldMemOperand(jmp_reg, 4814 __ ldr(jmp_reg, FieldMemOperand(jmp_reg,
4865 SharedFunctionInfo::kConstructStubOffset)); 4815 SharedFunctionInfo::kConstructStubOffset));
4866 __ add(pc, jmp_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); 4816 __ add(pc, jmp_reg, Operand(Code::kHeaderSize - kHeapObjectTag));
4867 4817
4868 // r0: number of arguments 4818 // r0: number of arguments
4869 // r1: called object 4819 // r1: called object
4870 // r3: object type 4820 // r3: object type
4871 Label do_call; 4821 Label do_call;
4872 __ bind(&slow); 4822 __ bind(&slow);
(...skipping 2461 matching lines...) Expand 10 before | Expand all | Expand 10 after
7334 Label okay_here; 7284 Label okay_here;
7335 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); 7285 Handle<Map> cell_map = masm->isolate()->factory()->cell_map();
7336 __ cmp(r2, Operand(undefined_sentinel)); 7286 __ cmp(r2, Operand(undefined_sentinel));
7337 __ b(eq, &okay_here); 7287 __ b(eq, &okay_here);
7338 __ ldr(r3, FieldMemOperand(r2, 0)); 7288 __ ldr(r3, FieldMemOperand(r2, 0));
7339 __ cmp(r3, Operand(cell_map)); 7289 __ cmp(r3, Operand(cell_map));
7340 __ Assert(eq, "Expected property cell in register ebx"); 7290 __ Assert(eq, "Expected property cell in register ebx");
7341 __ bind(&okay_here); 7291 __ bind(&okay_here);
7342 } 7292 }
7343 7293
7344 if (FLAG_optimize_constructed_arrays) { 7294 Label no_info, switch_ready;
7345 Label no_info, switch_ready; 7295 // Get the elements kind and case on that.
7346 // Get the elements kind and case on that. 7296 __ cmp(r2, Operand(undefined_sentinel));
7347 __ cmp(r2, Operand(undefined_sentinel)); 7297 __ b(eq, &no_info);
7348 __ b(eq, &no_info); 7298 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset));
7349 __ ldr(r3, FieldMemOperand(r2, PropertyCell::kValueOffset)); 7299 __ JumpIfNotSmi(r3, &no_info);
7350 __ JumpIfNotSmi(r3, &no_info); 7300 __ SmiUntag(r3);
7351 __ SmiUntag(r3); 7301 __ jmp(&switch_ready);
7352 __ jmp(&switch_ready); 7302 __ bind(&no_info);
7353 __ bind(&no_info); 7303 __ mov(r3, Operand(GetInitialFastElementsKind()));
7354 __ mov(r3, Operand(GetInitialFastElementsKind())); 7304 __ bind(&switch_ready);
7355 __ bind(&switch_ready);
7356 7305
7357 if (argument_count_ == ANY) { 7306 if (argument_count_ == ANY) {
7358 Label not_zero_case, not_one_case; 7307 Label not_zero_case, not_one_case;
7359 __ tst(r0, r0); 7308 __ tst(r0, r0);
7360 __ b(ne, &not_zero_case); 7309 __ b(ne, &not_zero_case);
7361 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm); 7310 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm);
7362 7311
7363 __ bind(&not_zero_case); 7312 __ bind(&not_zero_case);
7364 __ cmp(r0, Operand(1)); 7313 __ cmp(r0, Operand(1));
7365 __ b(gt, &not_one_case); 7314 __ b(gt, &not_one_case);
7366 CreateArrayDispatchOneArgument(masm); 7315 CreateArrayDispatchOneArgument(masm);
7367 7316
7368 __ bind(&not_one_case); 7317 __ bind(&not_one_case);
7369 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm); 7318 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm);
7370 } else if (argument_count_ == NONE) { 7319 } else if (argument_count_ == NONE) {
7371 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm); 7320 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm);
7372 } else if (argument_count_ == ONE) { 7321 } else if (argument_count_ == ONE) {
7373 CreateArrayDispatchOneArgument(masm); 7322 CreateArrayDispatchOneArgument(masm);
7374 } else if (argument_count_ == MORE_THAN_ONE) { 7323 } else if (argument_count_ == MORE_THAN_ONE) {
7375 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm); 7324 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm);
7376 } else {
7377 UNREACHABLE();
7378 }
7379 } else { 7325 } else {
7380 Label generic_constructor; 7326 UNREACHABLE();
7381 // Run the native code for the Array function called as a constructor.
7382 ArrayNativeCode(masm, &generic_constructor);
7383
7384 // Jump to the generic construct code in case the specialized code cannot
7385 // handle the construction.
7386 __ bind(&generic_constructor);
7387 Handle<Code> generic_construct_stub =
7388 masm->isolate()->builtins()->JSConstructStubGeneric();
7389 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
7390 } 7327 }
7391 } 7328 }
7392 7329
7393 7330
7394 void InternalArrayConstructorStub::GenerateCase( 7331 void InternalArrayConstructorStub::GenerateCase(
7395 MacroAssembler* masm, ElementsKind kind) { 7332 MacroAssembler* masm, ElementsKind kind) {
7396 Label not_zero_case, not_one_case; 7333 Label not_zero_case, not_one_case;
7397 Label normal_sequence; 7334 Label normal_sequence;
7398 7335
7399 __ tst(r0, r0); 7336 __ tst(r0, r0);
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
7441 7378
7442 // Initial map for the builtin Array function should be a map. 7379 // Initial map for the builtin Array function should be a map.
7443 __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); 7380 __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
7444 // Will both indicate a NULL and a Smi. 7381 // Will both indicate a NULL and a Smi.
7445 __ tst(r3, Operand(kSmiTagMask)); 7382 __ tst(r3, Operand(kSmiTagMask));
7446 __ Assert(ne, "Unexpected initial map for Array function"); 7383 __ Assert(ne, "Unexpected initial map for Array function");
7447 __ CompareObjectType(r3, r3, r4, MAP_TYPE); 7384 __ CompareObjectType(r3, r3, r4, MAP_TYPE);
7448 __ Assert(eq, "Unexpected initial map for Array function"); 7385 __ Assert(eq, "Unexpected initial map for Array function");
7449 } 7386 }
7450 7387
7451 if (FLAG_optimize_constructed_arrays) { 7388 // Figure out the right elements kind
7452 // Figure out the right elements kind 7389 __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
7453 __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); 7390 // Load the map's "bit field 2" into |result|. We only need the first byte,
7391 // but the following bit field extraction takes care of that anyway.
7392 __ ldr(r3, FieldMemOperand(r3, Map::kBitField2Offset));
7393 // Retrieve elements_kind from bit field 2.
7394 __ Ubfx(r3, r3, Map::kElementsKindShift, Map::kElementsKindBitCount);
7454 7395
7455 // Load the map's "bit field 2" into |result|. We only need the first byte, 7396 if (FLAG_debug_code) {
7456 // but the following bit field extraction takes care of that anyway. 7397 Label done;
7457 __ ldr(r3, FieldMemOperand(r3, Map::kBitField2Offset)); 7398 __ cmp(r3, Operand(FAST_ELEMENTS));
7458 // Retrieve elements_kind from bit field 2. 7399 __ b(eq, &done);
7459 __ Ubfx(r3, r3, Map::kElementsKindShift, Map::kElementsKindBitCount); 7400 __ cmp(r3, Operand(FAST_HOLEY_ELEMENTS));
7401 __ Assert(eq,
7402 "Invalid ElementsKind for InternalArray or InternalPackedArray");
7403 __ bind(&done);
7404 }
7460 7405
7461 if (FLAG_debug_code) { 7406 Label fast_elements_case;
7462 Label done; 7407 __ cmp(r3, Operand(FAST_ELEMENTS));
7463 __ cmp(r3, Operand(FAST_ELEMENTS)); 7408 __ b(eq, &fast_elements_case);
7464 __ b(eq, &done); 7409 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
7465 __ cmp(r3, Operand(FAST_HOLEY_ELEMENTS));
7466 __ Assert(eq,
7467 "Invalid ElementsKind for InternalArray or InternalPackedArray");
7468 __ bind(&done);
7469 }
7470 7410
7471 Label fast_elements_case; 7411 __ bind(&fast_elements_case);
7472 __ cmp(r3, Operand(FAST_ELEMENTS)); 7412 GenerateCase(masm, FAST_ELEMENTS);
7473 __ b(eq, &fast_elements_case);
7474 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
7475
7476 __ bind(&fast_elements_case);
7477 GenerateCase(masm, FAST_ELEMENTS);
7478 } else {
7479 Label generic_constructor;
7480 // Run the native code for the Array function called as constructor.
7481 ArrayNativeCode(masm, &generic_constructor);
7482
7483 // Jump to the generic construct code in case the specialized code cannot
7484 // handle the construction.
7485 __ bind(&generic_constructor);
7486 Handle<Code> generic_construct_stub =
7487 masm->isolate()->builtins()->JSConstructStubGeneric();
7488 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
7489 }
7490 } 7413 }
7491 7414
7492 7415
7493 #undef __ 7416 #undef __
7494 7417
7495 } } // namespace v8::internal 7418 } } // namespace v8::internal
7496 7419
7497 #endif // V8_TARGET_ARCH_ARM 7420 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/builtins-arm.cc ('k') | src/arm/lithium-codegen-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698