Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(115)

Side by Side Diff: src/ia32/code-stubs-ia32.cc

Issue 139973004: A64: Synchronize with r15814. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/a64
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/code-stubs-ia32.h ('k') | src/ia32/codegen-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 25 matching lines...) Expand all
36 #include "regexp-macro-assembler.h" 36 #include "regexp-macro-assembler.h"
37 #include "runtime.h" 37 #include "runtime.h"
38 #include "stub-cache.h" 38 #include "stub-cache.h"
39 #include "codegen.h" 39 #include "codegen.h"
40 #include "runtime.h" 40 #include "runtime.h"
41 41
42 namespace v8 { 42 namespace v8 {
43 namespace internal { 43 namespace internal {
44 44
45 45
46 void ToNumberStub::InitializeInterfaceDescriptor(
47 Isolate* isolate,
48 CodeStubInterfaceDescriptor* descriptor) {
49 static Register registers[] = { eax };
50 descriptor->register_param_count_ = 1;
51 descriptor->register_params_ = registers;
52 descriptor->deoptimization_handler_ = NULL;
53 }
54
55
46 void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( 56 void FastCloneShallowArrayStub::InitializeInterfaceDescriptor(
47 Isolate* isolate, 57 Isolate* isolate,
48 CodeStubInterfaceDescriptor* descriptor) { 58 CodeStubInterfaceDescriptor* descriptor) {
49 static Register registers[] = { eax, ebx, ecx }; 59 static Register registers[] = { eax, ebx, ecx };
50 descriptor->register_param_count_ = 3; 60 descriptor->register_param_count_ = 3;
51 descriptor->register_params_ = registers; 61 descriptor->register_params_ = registers;
52 descriptor->deoptimization_handler_ = 62 descriptor->deoptimization_handler_ =
53 Runtime::FunctionForId(Runtime::kCreateArrayLiteralShallow)->entry; 63 Runtime::FunctionForId(Runtime::kCreateArrayLiteralShallow)->entry;
54 } 64 }
55 65
(...skipping 199 matching lines...) Expand 10 before | Expand all | Expand 10 after
255 Isolate* isolate, 265 Isolate* isolate,
256 CodeStubInterfaceDescriptor* descriptor) { 266 CodeStubInterfaceDescriptor* descriptor) {
257 static Register registers[] = { edx, ecx, eax }; 267 static Register registers[] = { edx, ecx, eax };
258 descriptor->register_param_count_ = 3; 268 descriptor->register_param_count_ = 3;
259 descriptor->register_params_ = registers; 269 descriptor->register_params_ = registers;
260 descriptor->deoptimization_handler_ = 270 descriptor->deoptimization_handler_ =
261 FUNCTION_ADDR(StoreIC_MissFromStubFailure); 271 FUNCTION_ADDR(StoreIC_MissFromStubFailure);
262 } 272 }
263 273
264 274
275 void ElementsTransitionAndStoreStub::InitializeInterfaceDescriptor(
276 Isolate* isolate,
277 CodeStubInterfaceDescriptor* descriptor) {
278 static Register registers[] = { eax, ebx, ecx, edx };
279 descriptor->register_param_count_ = 4;
280 descriptor->register_params_ = registers;
281 descriptor->deoptimization_handler_ =
282 FUNCTION_ADDR(ElementsTransitionAndStoreIC_Miss);
283 }
284
285
265 #define __ ACCESS_MASM(masm) 286 #define __ ACCESS_MASM(masm)
266 287
267 288
268 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { 289 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
269 // Update the static counter each time a new code stub is generated. 290 // Update the static counter each time a new code stub is generated.
270 Isolate* isolate = masm->isolate(); 291 Isolate* isolate = masm->isolate();
271 isolate->counters()->code_stubs()->Increment(); 292 isolate->counters()->code_stubs()->Increment();
272 293
273 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate); 294 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate);
274 int param_count = descriptor->register_param_count_; 295 int param_count = descriptor->register_param_count_;
275 { 296 {
276 // Call the runtime system in a fresh internal frame. 297 // Call the runtime system in a fresh internal frame.
277 FrameScope scope(masm, StackFrame::INTERNAL); 298 FrameScope scope(masm, StackFrame::INTERNAL);
278 ASSERT(descriptor->register_param_count_ == 0 || 299 ASSERT(descriptor->register_param_count_ == 0 ||
279 eax.is(descriptor->register_params_[param_count - 1])); 300 eax.is(descriptor->register_params_[param_count - 1]));
280 // Push arguments 301 // Push arguments
281 for (int i = 0; i < param_count; ++i) { 302 for (int i = 0; i < param_count; ++i) {
282 __ push(descriptor->register_params_[i]); 303 __ push(descriptor->register_params_[i]);
283 } 304 }
284 ExternalReference miss = descriptor->miss_handler(); 305 ExternalReference miss = descriptor->miss_handler();
285 __ CallExternalReference(miss, descriptor->register_param_count_); 306 __ CallExternalReference(miss, descriptor->register_param_count_);
286 } 307 }
287 308
288 __ ret(0); 309 __ ret(0);
289 } 310 }
290 311
291 312
292 void ToNumberStub::Generate(MacroAssembler* masm) {
293 // The ToNumber stub takes one argument in eax.
294 Label check_heap_number, call_builtin;
295 __ JumpIfNotSmi(eax, &check_heap_number, Label::kNear);
296 __ ret(0);
297
298 __ bind(&check_heap_number);
299 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
300 Factory* factory = masm->isolate()->factory();
301 __ cmp(ebx, Immediate(factory->heap_number_map()));
302 __ j(not_equal, &call_builtin, Label::kNear);
303 __ ret(0);
304
305 __ bind(&call_builtin);
306 __ pop(ecx); // Pop return address.
307 __ push(eax);
308 __ push(ecx); // Push return address.
309 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION);
310 }
311
312
313 void FastNewClosureStub::Generate(MacroAssembler* masm) { 313 void FastNewClosureStub::Generate(MacroAssembler* masm) {
314 // Create a new closure from the given function info in new 314 // Create a new closure from the given function info in new
315 // space. Set the context to the current context in esi. 315 // space. Set the context to the current context in esi.
316 Counters* counters = masm->isolate()->counters(); 316 Counters* counters = masm->isolate()->counters();
317 317
318 Label gc; 318 Label gc;
319 __ Allocate(JSFunction::kSize, eax, ebx, ecx, &gc, TAG_OBJECT); 319 __ Allocate(JSFunction::kSize, eax, ebx, ecx, &gc, TAG_OBJECT);
320 320
321 __ IncrementCounter(counters->fast_new_closure_total(), 1); 321 __ IncrementCounter(counters->fast_new_closure_total(), 1);
322 322
(...skipping 335 matching lines...) Expand 10 before | Expand all | Expand 10 after
658 658
659 Label check_negative, process_64_bits, done, done_no_stash; 659 Label check_negative, process_64_bits, done, done_no_stash;
660 660
661 int double_offset = offset(); 661 int double_offset = offset();
662 662
663 // Account for return address and saved regs if input is esp. 663 // Account for return address and saved regs if input is esp.
664 if (input_reg.is(esp)) double_offset += 3 * kPointerSize; 664 if (input_reg.is(esp)) double_offset += 3 * kPointerSize;
665 665
666 MemOperand mantissa_operand(MemOperand(input_reg, double_offset)); 666 MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
667 MemOperand exponent_operand(MemOperand(input_reg, 667 MemOperand exponent_operand(MemOperand(input_reg,
668 double_offset + kPointerSize)); 668 double_offset + kDoubleSize / 2));
669 669
670 Register scratch1; 670 Register scratch1;
671 { 671 {
672 Register scratch_candidates[3] = { ebx, edx, edi }; 672 Register scratch_candidates[3] = { ebx, edx, edi };
673 for (int i = 0; i < 3; i++) { 673 for (int i = 0; i < 3; i++) {
674 scratch1 = scratch_candidates[i]; 674 scratch1 = scratch_candidates[i];
675 if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break; 675 if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
676 } 676 }
677 } 677 }
678 // Since we must use ecx for shifts below, use some other register (eax) 678 // Since we must use ecx for shifts below, use some other register (eax)
(...skipping 656 matching lines...) Expand 10 before | Expand all | Expand 10 after
1335 // Test if left operand is a string. 1335 // Test if left operand is a string.
1336 __ JumpIfSmi(left, &call_runtime, Label::kNear); 1336 __ JumpIfSmi(left, &call_runtime, Label::kNear);
1337 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, ecx); 1337 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, ecx);
1338 __ j(above_equal, &call_runtime, Label::kNear); 1338 __ j(above_equal, &call_runtime, Label::kNear);
1339 1339
1340 // Test if right operand is a string. 1340 // Test if right operand is a string.
1341 __ JumpIfSmi(right, &call_runtime, Label::kNear); 1341 __ JumpIfSmi(right, &call_runtime, Label::kNear);
1342 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, ecx); 1342 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, ecx);
1343 __ j(above_equal, &call_runtime, Label::kNear); 1343 __ j(above_equal, &call_runtime, Label::kNear);
1344 1344
1345 StringAddStub string_add_stub((StringAddFlags) 1345 StringAddStub string_add_stub(
1346 (ERECT_FRAME | NO_STRING_CHECK_IN_STUB)); 1346 (StringAddFlags)(STRING_ADD_CHECK_NONE | STRING_ADD_ERECT_FRAME));
1347 GenerateRegisterArgsPush(masm); 1347 GenerateRegisterArgsPush(masm);
1348 __ TailCallStub(&string_add_stub); 1348 __ TailCallStub(&string_add_stub);
1349 1349
1350 __ bind(&call_runtime); 1350 __ bind(&call_runtime);
1351 GenerateTypeTransition(masm); 1351 GenerateTypeTransition(masm);
1352 } 1352 }
1353 1353
1354 1354
1355 static void BinaryOpStub_GenerateHeapResultAllocation(MacroAssembler* masm, 1355 static void BinaryOpStub_GenerateHeapResultAllocation(MacroAssembler* masm,
1356 Label* alloc_failure, 1356 Label* alloc_failure,
(...skipping 624 matching lines...) Expand 10 before | Expand all | Expand 10 after
1981 1981
1982 // Registers containing left and right operands respectively. 1982 // Registers containing left and right operands respectively.
1983 Register left = edx; 1983 Register left = edx;
1984 Register right = eax; 1984 Register right = eax;
1985 1985
1986 // Test if left operand is a string. 1986 // Test if left operand is a string.
1987 __ JumpIfSmi(left, &left_not_string, Label::kNear); 1987 __ JumpIfSmi(left, &left_not_string, Label::kNear);
1988 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, ecx); 1988 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, ecx);
1989 __ j(above_equal, &left_not_string, Label::kNear); 1989 __ j(above_equal, &left_not_string, Label::kNear);
1990 1990
1991 StringAddStub string_add_left_stub((StringAddFlags) 1991 StringAddStub string_add_left_stub(
1992 (ERECT_FRAME | NO_STRING_CHECK_LEFT_IN_STUB)); 1992 (StringAddFlags)(STRING_ADD_CHECK_RIGHT | STRING_ADD_ERECT_FRAME));
1993 GenerateRegisterArgsPush(masm); 1993 GenerateRegisterArgsPush(masm);
1994 __ TailCallStub(&string_add_left_stub); 1994 __ TailCallStub(&string_add_left_stub);
1995 1995
1996 // Left operand is not a string, test right. 1996 // Left operand is not a string, test right.
1997 __ bind(&left_not_string); 1997 __ bind(&left_not_string);
1998 __ JumpIfSmi(right, &call_runtime, Label::kNear); 1998 __ JumpIfSmi(right, &call_runtime, Label::kNear);
1999 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, ecx); 1999 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, ecx);
2000 __ j(above_equal, &call_runtime, Label::kNear); 2000 __ j(above_equal, &call_runtime, Label::kNear);
2001 2001
2002 StringAddStub string_add_right_stub((StringAddFlags) 2002 StringAddStub string_add_right_stub(
2003 (ERECT_FRAME | NO_STRING_CHECK_RIGHT_IN_STUB)); 2003 (StringAddFlags)(STRING_ADD_CHECK_LEFT | STRING_ADD_ERECT_FRAME));
2004 GenerateRegisterArgsPush(masm); 2004 GenerateRegisterArgsPush(masm);
2005 __ TailCallStub(&string_add_right_stub); 2005 __ TailCallStub(&string_add_right_stub);
2006 2006
2007 // Neither argument is a string. 2007 // Neither argument is a string.
2008 __ bind(&call_runtime); 2008 __ bind(&call_runtime);
2009 } 2009 }
2010 2010
2011 2011
2012 static void BinaryOpStub_GenerateHeapResultAllocation(MacroAssembler* masm, 2012 static void BinaryOpStub_GenerateHeapResultAllocation(MacroAssembler* masm,
2013 Label* alloc_failure, 2013 Label* alloc_failure,
(...skipping 1940 matching lines...) Expand 10 before | Expand all | Expand 10 after
3954 __ bind(&slowcase); 3954 __ bind(&slowcase);
3955 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1); 3955 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
3956 } 3956 }
3957 3957
3958 3958
3959 void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm, 3959 void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
3960 Register object, 3960 Register object,
3961 Register result, 3961 Register result,
3962 Register scratch1, 3962 Register scratch1,
3963 Register scratch2, 3963 Register scratch2,
3964 bool object_is_smi,
3965 Label* not_found) { 3964 Label* not_found) {
3966 // Use of registers. Register result is used as a temporary. 3965 // Use of registers. Register result is used as a temporary.
3967 Register number_string_cache = result; 3966 Register number_string_cache = result;
3968 Register mask = scratch1; 3967 Register mask = scratch1;
3969 Register scratch = scratch2; 3968 Register scratch = scratch2;
3970 3969
3971 // Load the number string cache. 3970 // Load the number string cache.
3972 ExternalReference roots_array_start = 3971 ExternalReference roots_array_start =
3973 ExternalReference::roots_array_start(masm->isolate()); 3972 ExternalReference::roots_array_start(masm->isolate());
3974 __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex)); 3973 __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex));
3975 __ mov(number_string_cache, 3974 __ mov(number_string_cache,
3976 Operand::StaticArray(scratch, times_pointer_size, roots_array_start)); 3975 Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
3977 // Make the hash mask from the length of the number string cache. It 3976 // Make the hash mask from the length of the number string cache. It
3978 // contains two elements (number and string) for each cache entry. 3977 // contains two elements (number and string) for each cache entry.
3979 __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset)); 3978 __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
3980 __ shr(mask, kSmiTagSize + 1); // Untag length and divide it by two. 3979 __ shr(mask, kSmiTagSize + 1); // Untag length and divide it by two.
3981 __ sub(mask, Immediate(1)); // Make mask. 3980 __ sub(mask, Immediate(1)); // Make mask.
3982 3981
3983 // Calculate the entry in the number string cache. The hash value in the 3982 // Calculate the entry in the number string cache. The hash value in the
3984 // number string cache for smis is just the smi value, and the hash for 3983 // number string cache for smis is just the smi value, and the hash for
3985 // doubles is the xor of the upper and lower words. See 3984 // doubles is the xor of the upper and lower words. See
3986 // Heap::GetNumberStringCache. 3985 // Heap::GetNumberStringCache.
3987 Label smi_hash_calculated; 3986 Label smi_hash_calculated;
3988 Label load_result_from_cache; 3987 Label load_result_from_cache;
3989 if (object_is_smi) { 3988 Label not_smi;
3990 __ mov(scratch, object); 3989 STATIC_ASSERT(kSmiTag == 0);
3991 __ SmiUntag(scratch); 3990 __ JumpIfNotSmi(object, &not_smi, Label::kNear);
3991 __ mov(scratch, object);
3992 __ SmiUntag(scratch);
3993 __ jmp(&smi_hash_calculated, Label::kNear);
3994 __ bind(&not_smi);
3995 __ cmp(FieldOperand(object, HeapObject::kMapOffset),
3996 masm->isolate()->factory()->heap_number_map());
3997 __ j(not_equal, not_found);
3998 STATIC_ASSERT(8 == kDoubleSize);
3999 __ mov(scratch, FieldOperand(object, HeapNumber::kValueOffset));
4000 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
4001 // Object is heap number and hash is now in scratch. Calculate cache index.
4002 __ and_(scratch, mask);
4003 Register index = scratch;
4004 Register probe = mask;
4005 __ mov(probe,
4006 FieldOperand(number_string_cache,
4007 index,
4008 times_twice_pointer_size,
4009 FixedArray::kHeaderSize));
4010 __ JumpIfSmi(probe, not_found);
4011 if (CpuFeatures::IsSupported(SSE2)) {
4012 CpuFeatureScope fscope(masm, SSE2);
4013 __ movdbl(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
4014 __ movdbl(xmm1, FieldOperand(probe, HeapNumber::kValueOffset));
4015 __ ucomisd(xmm0, xmm1);
3992 } else { 4016 } else {
3993 Label not_smi; 4017 __ fld_d(FieldOperand(object, HeapNumber::kValueOffset));
3994 STATIC_ASSERT(kSmiTag == 0); 4018 __ fld_d(FieldOperand(probe, HeapNumber::kValueOffset));
3995 __ JumpIfNotSmi(object, &not_smi, Label::kNear); 4019 __ FCmp();
3996 __ mov(scratch, object);
3997 __ SmiUntag(scratch);
3998 __ jmp(&smi_hash_calculated, Label::kNear);
3999 __ bind(&not_smi);
4000 __ cmp(FieldOperand(object, HeapObject::kMapOffset),
4001 masm->isolate()->factory()->heap_number_map());
4002 __ j(not_equal, not_found);
4003 STATIC_ASSERT(8 == kDoubleSize);
4004 __ mov(scratch, FieldOperand(object, HeapNumber::kValueOffset));
4005 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
4006 // Object is heap number and hash is now in scratch. Calculate cache index.
4007 __ and_(scratch, mask);
4008 Register index = scratch;
4009 Register probe = mask;
4010 __ mov(probe,
4011 FieldOperand(number_string_cache,
4012 index,
4013 times_twice_pointer_size,
4014 FixedArray::kHeaderSize));
4015 __ JumpIfSmi(probe, not_found);
4016 if (CpuFeatures::IsSupported(SSE2)) {
4017 CpuFeatureScope fscope(masm, SSE2);
4018 __ movdbl(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
4019 __ movdbl(xmm1, FieldOperand(probe, HeapNumber::kValueOffset));
4020 __ ucomisd(xmm0, xmm1);
4021 } else {
4022 __ fld_d(FieldOperand(object, HeapNumber::kValueOffset));
4023 __ fld_d(FieldOperand(probe, HeapNumber::kValueOffset));
4024 __ FCmp();
4025 }
4026 __ j(parity_even, not_found); // Bail out if NaN is involved.
4027 __ j(not_equal, not_found); // The cache did not contain this value.
4028 __ jmp(&load_result_from_cache, Label::kNear);
4029 } 4020 }
4021 __ j(parity_even, not_found); // Bail out if NaN is involved.
4022 __ j(not_equal, not_found); // The cache did not contain this value.
4023 __ jmp(&load_result_from_cache, Label::kNear);
4030 4024
4031 __ bind(&smi_hash_calculated); 4025 __ bind(&smi_hash_calculated);
4032 // Object is smi and hash is now in scratch. Calculate cache index. 4026 // Object is smi and hash is now in scratch. Calculate cache index.
4033 __ and_(scratch, mask); 4027 __ and_(scratch, mask);
4034 Register index = scratch;
4035 // Check if the entry is the smi we are looking for. 4028 // Check if the entry is the smi we are looking for.
4036 __ cmp(object, 4029 __ cmp(object,
4037 FieldOperand(number_string_cache, 4030 FieldOperand(number_string_cache,
4038 index, 4031 index,
4039 times_twice_pointer_size, 4032 times_twice_pointer_size,
4040 FixedArray::kHeaderSize)); 4033 FixedArray::kHeaderSize));
4041 __ j(not_equal, not_found); 4034 __ j(not_equal, not_found);
4042 4035
4043 // Get the result from the cache. 4036 // Get the result from the cache.
4044 __ bind(&load_result_from_cache); 4037 __ bind(&load_result_from_cache);
4045 __ mov(result, 4038 __ mov(result,
4046 FieldOperand(number_string_cache, 4039 FieldOperand(number_string_cache,
4047 index, 4040 index,
4048 times_twice_pointer_size, 4041 times_twice_pointer_size,
4049 FixedArray::kHeaderSize + kPointerSize)); 4042 FixedArray::kHeaderSize + kPointerSize));
4050 Counters* counters = masm->isolate()->counters(); 4043 Counters* counters = masm->isolate()->counters();
4051 __ IncrementCounter(counters->number_to_string_native(), 1); 4044 __ IncrementCounter(counters->number_to_string_native(), 1);
4052 } 4045 }
4053 4046
4054 4047
4055 void NumberToStringStub::Generate(MacroAssembler* masm) { 4048 void NumberToStringStub::Generate(MacroAssembler* masm) {
4056 Label runtime; 4049 Label runtime;
4057 4050
4058 __ mov(ebx, Operand(esp, kPointerSize)); 4051 __ mov(ebx, Operand(esp, kPointerSize));
4059 4052
4060 // Generate code to lookup number in the number string cache. 4053 // Generate code to lookup number in the number string cache.
4061 GenerateLookupNumberStringCache(masm, ebx, eax, ecx, edx, false, &runtime); 4054 GenerateLookupNumberStringCache(masm, ebx, eax, ecx, edx, &runtime);
4062 __ ret(1 * kPointerSize); 4055 __ ret(1 * kPointerSize);
4063 4056
4064 __ bind(&runtime); 4057 __ bind(&runtime);
4065 // Handle number to string in the runtime system if not found in the cache. 4058 // Handle number to string in the runtime system if not found in the cache.
4066 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1); 4059 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
4067 } 4060 }
4068 4061
4069 4062
4070 static int NegativeComparisonResult(Condition cc) { 4063 static int NegativeComparisonResult(Condition cc) {
4071 ASSERT(cc != equal); 4064 ASSERT(cc != equal);
(...skipping 22 matching lines...) Expand all
4094 } 4087 }
4095 4088
4096 4089
4097 static void BranchIfNotInternalizedString(MacroAssembler* masm, 4090 static void BranchIfNotInternalizedString(MacroAssembler* masm,
4098 Label* label, 4091 Label* label,
4099 Register object, 4092 Register object,
4100 Register scratch) { 4093 Register scratch) {
4101 __ JumpIfSmi(object, label); 4094 __ JumpIfSmi(object, label);
4102 __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset)); 4095 __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset));
4103 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); 4096 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
4104 __ and_(scratch, kIsInternalizedMask | kIsNotStringMask); 4097 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
4105 __ cmp(scratch, kInternalizedTag | kStringTag); 4098 __ test(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
4106 __ j(not_equal, label); 4099 __ j(not_zero, label);
4107 } 4100 }
4108 4101
4109 4102
4110 void ICCompareStub::GenerateGeneric(MacroAssembler* masm) { 4103 void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
4111 Label check_unequal_objects; 4104 Label check_unequal_objects;
4112 Condition cc = GetCondition(); 4105 Condition cc = GetCondition();
4113 4106
4114 Label miss; 4107 Label miss;
4115 CheckInputType(masm, edx, left_, &miss); 4108 CheckInputType(masm, edx, left_, &miss);
4116 CheckInputType(masm, eax, right_, &miss); 4109 CheckInputType(masm, eax, right_, &miss);
(...skipping 1248 matching lines...) Expand 10 before | Expand all | Expand 10 after
5365 5358
5366 void StringAddStub::Generate(MacroAssembler* masm) { 5359 void StringAddStub::Generate(MacroAssembler* masm) {
5367 Label call_runtime, call_builtin; 5360 Label call_runtime, call_builtin;
5368 Builtins::JavaScript builtin_id = Builtins::ADD; 5361 Builtins::JavaScript builtin_id = Builtins::ADD;
5369 5362
5370 // Load the two arguments. 5363 // Load the two arguments.
5371 __ mov(eax, Operand(esp, 2 * kPointerSize)); // First argument. 5364 __ mov(eax, Operand(esp, 2 * kPointerSize)); // First argument.
5372 __ mov(edx, Operand(esp, 1 * kPointerSize)); // Second argument. 5365 __ mov(edx, Operand(esp, 1 * kPointerSize)); // Second argument.
5373 5366
5374 // Make sure that both arguments are strings if not known in advance. 5367 // Make sure that both arguments are strings if not known in advance.
5375 if ((flags_ & NO_STRING_ADD_FLAGS) != 0) { 5368 // Otherwise, at least one of the arguments is definitely a string,
5369 // and we convert the one that is not known to be a string.
5370 if ((flags_ & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) {
5371 ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT);
5372 ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT);
5376 __ JumpIfSmi(eax, &call_runtime); 5373 __ JumpIfSmi(eax, &call_runtime);
5377 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, ebx); 5374 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, ebx);
5378 __ j(above_equal, &call_runtime); 5375 __ j(above_equal, &call_runtime);
5379 5376
5380 // First argument is a a string, test second. 5377 // First argument is a a string, test second.
5381 __ JumpIfSmi(edx, &call_runtime); 5378 __ JumpIfSmi(edx, &call_runtime);
5382 __ CmpObjectType(edx, FIRST_NONSTRING_TYPE, ebx); 5379 __ CmpObjectType(edx, FIRST_NONSTRING_TYPE, ebx);
5383 __ j(above_equal, &call_runtime); 5380 __ j(above_equal, &call_runtime);
5384 } else { 5381 } else if ((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) {
5385 // Here at least one of the arguments is definitely a string. 5382 ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == 0);
5386 // We convert the one that is not known to be a string. 5383 GenerateConvertArgument(masm, 2 * kPointerSize, eax, ebx, ecx, edi,
5387 if ((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) == 0) { 5384 &call_builtin);
5388 ASSERT((flags_ & NO_STRING_CHECK_RIGHT_IN_STUB) != 0); 5385 builtin_id = Builtins::STRING_ADD_RIGHT;
5389 GenerateConvertArgument(masm, 2 * kPointerSize, eax, ebx, ecx, edi, 5386 } else if ((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) {
5390 &call_builtin); 5387 ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == 0);
5391 builtin_id = Builtins::STRING_ADD_RIGHT; 5388 GenerateConvertArgument(masm, 1 * kPointerSize, edx, ebx, ecx, edi,
5392 } else if ((flags_ & NO_STRING_CHECK_RIGHT_IN_STUB) == 0) { 5389 &call_builtin);
5393 ASSERT((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) != 0); 5390 builtin_id = Builtins::STRING_ADD_LEFT;
5394 GenerateConvertArgument(masm, 1 * kPointerSize, edx, ebx, ecx, edi,
5395 &call_builtin);
5396 builtin_id = Builtins::STRING_ADD_LEFT;
5397 }
5398 } 5391 }
5399 5392
5400 // Both arguments are strings. 5393 // Both arguments are strings.
5401 // eax: first string 5394 // eax: first string
5402 // edx: second string 5395 // edx: second string
5403 // Check if either of the strings are empty. In that case return the other. 5396 // Check if either of the strings are empty. In that case return the other.
5404 Label second_not_zero_length, both_not_zero_length; 5397 Label second_not_zero_length, both_not_zero_length;
5405 __ mov(ecx, FieldOperand(edx, String::kLengthOffset)); 5398 __ mov(ecx, FieldOperand(edx, String::kLengthOffset));
5406 STATIC_ASSERT(kSmiTag == 0); 5399 STATIC_ASSERT(kSmiTag == 0);
5407 __ test(ecx, ecx); 5400 __ test(ecx, ecx);
(...skipping 265 matching lines...) Expand 10 before | Expand all | Expand 10 after
5673 // edi: length of second argument 5666 // edi: length of second argument
5674 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false); 5667 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
5675 __ IncrementCounter(counters->string_add_native(), 1); 5668 __ IncrementCounter(counters->string_add_native(), 1);
5676 __ ret(2 * kPointerSize); 5669 __ ret(2 * kPointerSize);
5677 5670
5678 // Recover stack pointer before jumping to runtime. 5671 // Recover stack pointer before jumping to runtime.
5679 __ bind(&call_runtime_drop_two); 5672 __ bind(&call_runtime_drop_two);
5680 __ Drop(2); 5673 __ Drop(2);
5681 // Just jump to runtime to add the two strings. 5674 // Just jump to runtime to add the two strings.
5682 __ bind(&call_runtime); 5675 __ bind(&call_runtime);
5683 if ((flags_ & ERECT_FRAME) != 0) { 5676 if ((flags_ & STRING_ADD_ERECT_FRAME) != 0) {
5684 GenerateRegisterArgsPop(masm, ecx); 5677 GenerateRegisterArgsPop(masm, ecx);
5685 // Build a frame 5678 // Build a frame
5686 { 5679 {
5687 FrameScope scope(masm, StackFrame::INTERNAL); 5680 FrameScope scope(masm, StackFrame::INTERNAL);
5688 GenerateRegisterArgsPush(masm); 5681 GenerateRegisterArgsPush(masm);
5689 __ CallRuntime(Runtime::kStringAdd, 2); 5682 __ CallRuntime(Runtime::kStringAdd, 2);
5690 } 5683 }
5691 __ ret(0); 5684 __ ret(0);
5692 } else { 5685 } else {
5693 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); 5686 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
5694 } 5687 }
5695 5688
5696 if (call_builtin.is_linked()) { 5689 if (call_builtin.is_linked()) {
5697 __ bind(&call_builtin); 5690 __ bind(&call_builtin);
5698 if ((flags_ & ERECT_FRAME) != 0) { 5691 if ((flags_ & STRING_ADD_ERECT_FRAME) != 0) {
5699 GenerateRegisterArgsPop(masm, ecx); 5692 GenerateRegisterArgsPop(masm, ecx);
5700 // Build a frame 5693 // Build a frame
5701 { 5694 {
5702 FrameScope scope(masm, StackFrame::INTERNAL); 5695 FrameScope scope(masm, StackFrame::INTERNAL);
5703 GenerateRegisterArgsPush(masm); 5696 GenerateRegisterArgsPush(masm);
5704 __ InvokeBuiltin(builtin_id, CALL_FUNCTION); 5697 __ InvokeBuiltin(builtin_id, CALL_FUNCTION);
5705 } 5698 }
5706 __ ret(0); 5699 __ ret(0);
5707 } else { 5700 } else {
5708 __ InvokeBuiltin(builtin_id, JUMP_FUNCTION); 5701 __ InvokeBuiltin(builtin_id, JUMP_FUNCTION);
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
5741 5734
5742 // Check the number to string cache. 5735 // Check the number to string cache.
5743 Label not_cached; 5736 Label not_cached;
5744 __ bind(&not_string); 5737 __ bind(&not_string);
5745 // Puts the cached result into scratch1. 5738 // Puts the cached result into scratch1.
5746 NumberToStringStub::GenerateLookupNumberStringCache(masm, 5739 NumberToStringStub::GenerateLookupNumberStringCache(masm,
5747 arg, 5740 arg,
5748 scratch1, 5741 scratch1,
5749 scratch2, 5742 scratch2,
5750 scratch3, 5743 scratch3,
5751 false,
5752 &not_cached); 5744 &not_cached);
5753 __ mov(arg, scratch1); 5745 __ mov(arg, scratch1);
5754 __ mov(Operand(esp, stack_offset), arg); 5746 __ mov(Operand(esp, stack_offset), arg);
5755 __ jmp(&done); 5747 __ jmp(&done);
5756 5748
5757 // Check if the argument is a safe string wrapper. 5749 // Check if the argument is a safe string wrapper.
5758 __ bind(&not_cached); 5750 __ bind(&not_cached);
5759 __ JumpIfSmi(arg, slow); 5751 __ JumpIfSmi(arg, slow);
5760 __ CmpObjectType(arg, JS_VALUE_TYPE, scratch1); // map -> scratch1. 5752 __ CmpObjectType(arg, JS_VALUE_TYPE, scratch1); // map -> scratch1.
5761 __ j(not_equal, slow); 5753 __ j(not_equal, slow);
(...skipping 829 matching lines...) Expand 10 before | Expand all | Expand 10 after
6591 __ mov(tmp1, left); 6583 __ mov(tmp1, left);
6592 STATIC_ASSERT(kSmiTag == 0); 6584 STATIC_ASSERT(kSmiTag == 0);
6593 __ and_(tmp1, right); 6585 __ and_(tmp1, right);
6594 __ JumpIfSmi(tmp1, &miss, Label::kNear); 6586 __ JumpIfSmi(tmp1, &miss, Label::kNear);
6595 6587
6596 // Check that both operands are internalized strings. 6588 // Check that both operands are internalized strings.
6597 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); 6589 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
6598 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); 6590 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
6599 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); 6591 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
6600 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); 6592 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
6601 STATIC_ASSERT(kInternalizedTag != 0); 6593 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
6602 __ and_(tmp1, Immediate(kIsNotStringMask | kIsInternalizedMask)); 6594 __ or_(tmp1, tmp2);
6603 __ cmpb(tmp1, kInternalizedTag | kStringTag); 6595 __ test(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
6604 __ j(not_equal, &miss, Label::kNear); 6596 __ j(not_zero, &miss, Label::kNear);
6605
6606 __ and_(tmp2, Immediate(kIsNotStringMask | kIsInternalizedMask));
6607 __ cmpb(tmp2, kInternalizedTag | kStringTag);
6608 __ j(not_equal, &miss, Label::kNear);
6609 6597
6610 // Internalized strings are compared by identity. 6598 // Internalized strings are compared by identity.
6611 Label done; 6599 Label done;
6612 __ cmp(left, right); 6600 __ cmp(left, right);
6613 // Make sure eax is non-zero. At this point input operands are 6601 // Make sure eax is non-zero. At this point input operands are
6614 // guaranteed to be non-zero. 6602 // guaranteed to be non-zero.
6615 ASSERT(right.is(eax)); 6603 ASSERT(right.is(eax));
6616 __ j(not_equal, &done, Label::kNear); 6604 __ j(not_equal, &done, Label::kNear);
6617 STATIC_ASSERT(EQUAL == 0); 6605 STATIC_ASSERT(EQUAL == 0);
6618 STATIC_ASSERT(kSmiTag == 0); 6606 STATIC_ASSERT(kSmiTag == 0);
(...skipping 18 matching lines...) Expand all
6637 6625
6638 // Check that both operands are heap objects. 6626 // Check that both operands are heap objects.
6639 Label miss; 6627 Label miss;
6640 __ mov(tmp1, left); 6628 __ mov(tmp1, left);
6641 STATIC_ASSERT(kSmiTag == 0); 6629 STATIC_ASSERT(kSmiTag == 0);
6642 __ and_(tmp1, right); 6630 __ and_(tmp1, right);
6643 __ JumpIfSmi(tmp1, &miss, Label::kNear); 6631 __ JumpIfSmi(tmp1, &miss, Label::kNear);
6644 6632
6645 // Check that both operands are unique names. This leaves the instance 6633 // Check that both operands are unique names. This leaves the instance
6646 // types loaded in tmp1 and tmp2. 6634 // types loaded in tmp1 and tmp2.
6647 STATIC_ASSERT(kInternalizedTag != 0);
6648 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); 6635 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
6649 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); 6636 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
6650 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); 6637 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
6651 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); 6638 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
6652 6639
6653 __ JumpIfNotUniqueName(tmp1, &miss, Label::kNear); 6640 __ JumpIfNotUniqueName(tmp1, &miss, Label::kNear);
6654 __ JumpIfNotUniqueName(tmp2, &miss, Label::kNear); 6641 __ JumpIfNotUniqueName(tmp2, &miss, Label::kNear);
6655 6642
6656 // Unique names are compared by identity. 6643 // Unique names are compared by identity.
6657 Label done; 6644 Label done;
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
6713 6700
6714 // Handle not identical strings. 6701 // Handle not identical strings.
6715 __ bind(&not_same); 6702 __ bind(&not_same);
6716 6703
6717 // Check that both strings are internalized. If they are, we're done 6704 // Check that both strings are internalized. If they are, we're done
6718 // because we already know they are not identical. But in the case of 6705 // because we already know they are not identical. But in the case of
6719 // non-equality compare, we still need to determine the order. We 6706 // non-equality compare, we still need to determine the order. We
6720 // also know they are both strings. 6707 // also know they are both strings.
6721 if (equality) { 6708 if (equality) {
6722 Label do_compare; 6709 Label do_compare;
6723 STATIC_ASSERT(kInternalizedTag != 0); 6710 STATIC_ASSERT(kInternalizedTag == 0);
6724 __ and_(tmp1, tmp2); 6711 __ or_(tmp1, tmp2);
6725 __ test(tmp1, Immediate(kIsInternalizedMask)); 6712 __ test(tmp1, Immediate(kIsNotInternalizedMask));
6726 __ j(zero, &do_compare, Label::kNear); 6713 __ j(not_zero, &do_compare, Label::kNear);
6727 // Make sure eax is non-zero. At this point input operands are 6714 // Make sure eax is non-zero. At this point input operands are
6728 // guaranteed to be non-zero. 6715 // guaranteed to be non-zero.
6729 ASSERT(right.is(eax)); 6716 ASSERT(right.is(eax));
6730 __ ret(0); 6717 __ ret(0);
6731 __ bind(&do_compare); 6718 __ bind(&do_compare);
6732 } 6719 }
6733 6720
6734 // Check that both strings are sequential ASCII. 6721 // Check that both strings are sequential ASCII.
6735 Label runtime; 6722 Label runtime;
6736 __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime); 6723 __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime);
(...skipping 1033 matching lines...) Expand 10 before | Expand all | Expand 10 after
7770 __ bind(&fast_elements_case); 7757 __ bind(&fast_elements_case);
7771 GenerateCase(masm, FAST_ELEMENTS); 7758 GenerateCase(masm, FAST_ELEMENTS);
7772 } 7759 }
7773 7760
7774 7761
7775 #undef __ 7762 #undef __
7776 7763
7777 } } // namespace v8::internal 7764 } } // namespace v8::internal
7778 7765
7779 #endif // V8_TARGET_ARCH_IA32 7766 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/code-stubs-ia32.h ('k') | src/ia32/codegen-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698