Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(459)

Side by Side Diff: src/ia32/code-stubs-ia32.cc

Issue 6696107: Cleanup more isolate usage in ia32 files. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Address comments. Created 9 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/builtins-ia32.cc ('k') | src/ia32/deoptimizer-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
42 42
43 void ToNumberStub::Generate(MacroAssembler* masm) { 43 void ToNumberStub::Generate(MacroAssembler* masm) {
44 // The ToNumber stub takes one argument in eax. 44 // The ToNumber stub takes one argument in eax.
45 NearLabel check_heap_number, call_builtin; 45 NearLabel check_heap_number, call_builtin;
46 __ test(eax, Immediate(kSmiTagMask)); 46 __ test(eax, Immediate(kSmiTagMask));
47 __ j(not_zero, &check_heap_number); 47 __ j(not_zero, &check_heap_number);
48 __ ret(0); 48 __ ret(0);
49 49
50 __ bind(&check_heap_number); 50 __ bind(&check_heap_number);
51 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); 51 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
52 __ cmp(Operand(ebx), Immediate(FACTORY->heap_number_map())); 52 Factory* factory = masm->isolate()->factory();
53 __ cmp(Operand(ebx), Immediate(factory->heap_number_map()));
53 __ j(not_equal, &call_builtin); 54 __ j(not_equal, &call_builtin);
54 __ ret(0); 55 __ ret(0);
55 56
56 __ bind(&call_builtin); 57 __ bind(&call_builtin);
57 __ pop(ecx); // Pop return address. 58 __ pop(ecx); // Pop return address.
58 __ push(eax); 59 __ push(eax);
59 __ push(ecx); // Push return address. 60 __ push(ecx); // Push return address.
60 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION); 61 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION);
61 } 62 }
62 63
(...skipping 13 matching lines...) Expand all
76 77
77 // Compute the function map in the current global context and set that 78 // Compute the function map in the current global context and set that
78 // as the map of the allocated object. 79 // as the map of the allocated object.
79 __ mov(ecx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX))); 80 __ mov(ecx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
80 __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalContextOffset)); 81 __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalContextOffset));
81 __ mov(ecx, Operand(ecx, Context::SlotOffset(map_index))); 82 __ mov(ecx, Operand(ecx, Context::SlotOffset(map_index)));
82 __ mov(FieldOperand(eax, JSObject::kMapOffset), ecx); 83 __ mov(FieldOperand(eax, JSObject::kMapOffset), ecx);
83 84
84 // Initialize the rest of the function. We don't have to update the 85 // Initialize the rest of the function. We don't have to update the
85 // write barrier because the allocated object is in new space. 86 // write barrier because the allocated object is in new space.
86 __ mov(ebx, Immediate(FACTORY->empty_fixed_array())); 87 Factory* factory = masm->isolate()->factory();
88 __ mov(ebx, Immediate(factory->empty_fixed_array()));
87 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ebx); 89 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ebx);
88 __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx); 90 __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
89 __ mov(FieldOperand(eax, JSFunction::kPrototypeOrInitialMapOffset), 91 __ mov(FieldOperand(eax, JSFunction::kPrototypeOrInitialMapOffset),
90 Immediate(FACTORY->the_hole_value())); 92 Immediate(factory->the_hole_value()));
91 __ mov(FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset), edx); 93 __ mov(FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset), edx);
92 __ mov(FieldOperand(eax, JSFunction::kContextOffset), esi); 94 __ mov(FieldOperand(eax, JSFunction::kContextOffset), esi);
93 __ mov(FieldOperand(eax, JSFunction::kLiteralsOffset), ebx); 95 __ mov(FieldOperand(eax, JSFunction::kLiteralsOffset), ebx);
94 __ mov(FieldOperand(eax, JSFunction::kNextFunctionLinkOffset), 96 __ mov(FieldOperand(eax, JSFunction::kNextFunctionLinkOffset),
95 Immediate(FACTORY->undefined_value())); 97 Immediate(factory->undefined_value()));
96 98
97 // Initialize the code pointer in the function to be the one 99 // Initialize the code pointer in the function to be the one
98 // found in the shared function info object. 100 // found in the shared function info object.
99 __ mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset)); 101 __ mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
100 __ lea(edx, FieldOperand(edx, Code::kHeaderSize)); 102 __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
101 __ mov(FieldOperand(eax, JSFunction::kCodeEntryOffset), edx); 103 __ mov(FieldOperand(eax, JSFunction::kCodeEntryOffset), edx);
102 104
103 // Return and remove the on-stack parameter. 105 // Return and remove the on-stack parameter.
104 __ ret(1 * kPointerSize); 106 __ ret(1 * kPointerSize);
105 107
106 // Create a new closure through the slower runtime call. 108 // Create a new closure through the slower runtime call.
107 __ bind(&gc); 109 __ bind(&gc);
108 __ pop(ecx); // Temporarily remove return address. 110 __ pop(ecx); // Temporarily remove return address.
109 __ pop(edx); 111 __ pop(edx);
110 __ push(esi); 112 __ push(esi);
111 __ push(edx); 113 __ push(edx);
112 __ push(Immediate(FACTORY->false_value())); 114 __ push(Immediate(factory->false_value()));
113 __ push(ecx); // Restore return address. 115 __ push(ecx); // Restore return address.
114 __ TailCallRuntime(Runtime::kNewClosure, 3, 1); 116 __ TailCallRuntime(Runtime::kNewClosure, 3, 1);
115 } 117 }
116 118
117 119
118 void FastNewContextStub::Generate(MacroAssembler* masm) { 120 void FastNewContextStub::Generate(MacroAssembler* masm) {
119 // Try to allocate the context in new space. 121 // Try to allocate the context in new space.
120 Label gc; 122 Label gc;
121 int length = slots_ + Context::MIN_CONTEXT_SLOTS; 123 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
122 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize, 124 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize,
123 eax, ebx, ecx, &gc, TAG_OBJECT); 125 eax, ebx, ecx, &gc, TAG_OBJECT);
124 126
125 // Get the function from the stack. 127 // Get the function from the stack.
126 __ mov(ecx, Operand(esp, 1 * kPointerSize)); 128 __ mov(ecx, Operand(esp, 1 * kPointerSize));
127 129
128 // Setup the object header. 130 // Setup the object header.
129 __ mov(FieldOperand(eax, HeapObject::kMapOffset), FACTORY->context_map()); 131 Factory* factory = masm->isolate()->factory();
132 __ mov(FieldOperand(eax, HeapObject::kMapOffset), factory->context_map());
130 __ mov(FieldOperand(eax, Context::kLengthOffset), 133 __ mov(FieldOperand(eax, Context::kLengthOffset),
131 Immediate(Smi::FromInt(length))); 134 Immediate(Smi::FromInt(length)));
132 135
133 // Setup the fixed slots. 136 // Setup the fixed slots.
134 __ Set(ebx, Immediate(0)); // Set to NULL. 137 __ Set(ebx, Immediate(0)); // Set to NULL.
135 __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx); 138 __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx);
136 __ mov(Operand(eax, Context::SlotOffset(Context::FCONTEXT_INDEX)), eax); 139 __ mov(Operand(eax, Context::SlotOffset(Context::FCONTEXT_INDEX)), eax);
137 __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), ebx); 140 __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), ebx);
138 __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx); 141 __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx);
139 142
140 // Copy the global object from the surrounding context. We go through the 143 // Copy the global object from the surrounding context. We go through the
141 // context in the function (ecx) to match the allocation behavior we have 144 // context in the function (ecx) to match the allocation behavior we have
142 // in the runtime system (see Heap::AllocateFunctionContext). 145 // in the runtime system (see Heap::AllocateFunctionContext).
143 __ mov(ebx, FieldOperand(ecx, JSFunction::kContextOffset)); 146 __ mov(ebx, FieldOperand(ecx, JSFunction::kContextOffset));
144 __ mov(ebx, Operand(ebx, Context::SlotOffset(Context::GLOBAL_INDEX))); 147 __ mov(ebx, Operand(ebx, Context::SlotOffset(Context::GLOBAL_INDEX)));
145 __ mov(Operand(eax, Context::SlotOffset(Context::GLOBAL_INDEX)), ebx); 148 __ mov(Operand(eax, Context::SlotOffset(Context::GLOBAL_INDEX)), ebx);
146 149
147 // Initialize the rest of the slots to undefined. 150 // Initialize the rest of the slots to undefined.
148 __ mov(ebx, FACTORY->undefined_value()); 151 __ mov(ebx, factory->undefined_value());
149 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) { 152 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
150 __ mov(Operand(eax, Context::SlotOffset(i)), ebx); 153 __ mov(Operand(eax, Context::SlotOffset(i)), ebx);
151 } 154 }
152 155
153 // Return and remove the on-stack parameter. 156 // Return and remove the on-stack parameter.
154 __ mov(esi, Operand(eax)); 157 __ mov(esi, Operand(eax));
155 __ ret(1 * kPointerSize); 158 __ ret(1 * kPointerSize);
156 159
157 // Need to collect. Call into runtime system. 160 // Need to collect. Call into runtime system.
158 __ bind(&gc); 161 __ bind(&gc);
(...skipping 15 matching lines...) Expand all
174 // Load boilerplate object into ecx and check if we need to create a 177 // Load boilerplate object into ecx and check if we need to create a
175 // boilerplate. 178 // boilerplate.
176 Label slow_case; 179 Label slow_case;
177 __ mov(ecx, Operand(esp, 3 * kPointerSize)); 180 __ mov(ecx, Operand(esp, 3 * kPointerSize));
178 __ mov(eax, Operand(esp, 2 * kPointerSize)); 181 __ mov(eax, Operand(esp, 2 * kPointerSize));
179 STATIC_ASSERT(kPointerSize == 4); 182 STATIC_ASSERT(kPointerSize == 4);
180 STATIC_ASSERT(kSmiTagSize == 1); 183 STATIC_ASSERT(kSmiTagSize == 1);
181 STATIC_ASSERT(kSmiTag == 0); 184 STATIC_ASSERT(kSmiTag == 0);
182 __ mov(ecx, FieldOperand(ecx, eax, times_half_pointer_size, 185 __ mov(ecx, FieldOperand(ecx, eax, times_half_pointer_size,
183 FixedArray::kHeaderSize)); 186 FixedArray::kHeaderSize));
184 __ cmp(ecx, FACTORY->undefined_value()); 187 Factory* factory = masm->isolate()->factory();
188 __ cmp(ecx, factory->undefined_value());
185 __ j(equal, &slow_case); 189 __ j(equal, &slow_case);
186 190
187 if (FLAG_debug_code) { 191 if (FLAG_debug_code) {
188 const char* message; 192 const char* message;
189 Handle<Map> expected_map; 193 Handle<Map> expected_map;
190 if (mode_ == CLONE_ELEMENTS) { 194 if (mode_ == CLONE_ELEMENTS) {
191 message = "Expected (writable) fixed array"; 195 message = "Expected (writable) fixed array";
192 expected_map = FACTORY->fixed_array_map(); 196 expected_map = factory->fixed_array_map();
193 } else { 197 } else {
194 ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS); 198 ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS);
195 message = "Expected copy-on-write fixed array"; 199 message = "Expected copy-on-write fixed array";
196 expected_map = FACTORY->fixed_cow_array_map(); 200 expected_map = factory->fixed_cow_array_map();
197 } 201 }
198 __ push(ecx); 202 __ push(ecx);
199 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset)); 203 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
200 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), expected_map); 204 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), expected_map);
201 __ Assert(equal, message); 205 __ Assert(equal, message);
202 __ pop(ecx); 206 __ pop(ecx);
203 } 207 }
204 208
205 // Allocate both the JS array and the elements array in one big 209 // Allocate both the JS array and the elements array in one big
206 // allocation. This avoids multiple limit checks. 210 // allocation. This avoids multiple limit checks.
(...skipping 28 matching lines...) Expand all
235 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); 239 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
236 } 240 }
237 241
238 242
239 // NOTE: The stub does not handle the inlined cases (Smis, Booleans, undefined). 243 // NOTE: The stub does not handle the inlined cases (Smis, Booleans, undefined).
240 void ToBooleanStub::Generate(MacroAssembler* masm) { 244 void ToBooleanStub::Generate(MacroAssembler* masm) {
241 NearLabel false_result, true_result, not_string; 245 NearLabel false_result, true_result, not_string;
242 __ mov(eax, Operand(esp, 1 * kPointerSize)); 246 __ mov(eax, Operand(esp, 1 * kPointerSize));
243 247
244 // 'null' => false. 248 // 'null' => false.
245 __ cmp(eax, FACTORY->null_value()); 249 Factory* factory = masm->isolate()->factory();
250 __ cmp(eax, factory->null_value());
246 __ j(equal, &false_result); 251 __ j(equal, &false_result);
247 252
248 // Get the map and type of the heap object. 253 // Get the map and type of the heap object.
249 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset)); 254 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
250 __ movzx_b(ecx, FieldOperand(edx, Map::kInstanceTypeOffset)); 255 __ movzx_b(ecx, FieldOperand(edx, Map::kInstanceTypeOffset));
251 256
252 // Undetectable => false. 257 // Undetectable => false.
253 __ test_b(FieldOperand(edx, Map::kBitFieldOffset), 258 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
254 1 << Map::kIsUndetectable); 259 1 << Map::kIsUndetectable);
255 __ j(not_zero, &false_result); 260 __ j(not_zero, &false_result);
256 261
257 // JavaScript object => true. 262 // JavaScript object => true.
258 __ CmpInstanceType(edx, FIRST_JS_OBJECT_TYPE); 263 __ CmpInstanceType(edx, FIRST_JS_OBJECT_TYPE);
259 __ j(above_equal, &true_result); 264 __ j(above_equal, &true_result);
260 265
261 // String value => false iff empty. 266 // String value => false iff empty.
262 __ CmpInstanceType(edx, FIRST_NONSTRING_TYPE); 267 __ CmpInstanceType(edx, FIRST_NONSTRING_TYPE);
263 __ j(above_equal, &not_string); 268 __ j(above_equal, &not_string);
264 STATIC_ASSERT(kSmiTag == 0); 269 STATIC_ASSERT(kSmiTag == 0);
265 __ cmp(FieldOperand(eax, String::kLengthOffset), Immediate(0)); 270 __ cmp(FieldOperand(eax, String::kLengthOffset), Immediate(0));
266 __ j(zero, &false_result); 271 __ j(zero, &false_result);
267 __ jmp(&true_result); 272 __ jmp(&true_result);
268 273
269 __ bind(&not_string); 274 __ bind(&not_string);
270 // HeapNumber => false iff +0, -0, or NaN. 275 // HeapNumber => false iff +0, -0, or NaN.
271 __ cmp(edx, FACTORY->heap_number_map()); 276 __ cmp(edx, factory->heap_number_map());
272 __ j(not_equal, &true_result); 277 __ j(not_equal, &true_result);
273 __ fldz(); 278 __ fldz();
274 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset)); 279 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
275 __ FCmp(); 280 __ FCmp();
276 __ j(zero, &false_result); 281 __ j(zero, &false_result);
277 // Fall through to |true_result|. 282 // Fall through to |true_result|.
278 283
279 // Return 1/0 for true/false in eax. 284 // Return 1/0 for true/false in eax.
280 __ bind(&true_result); 285 __ bind(&true_result);
281 __ mov(eax, 1); 286 __ mov(eax, 1);
(...skipping 2232 matching lines...) Expand 10 before | Expand all | Expand 10 after
2514 __ sub(Operand(esp), Immediate(2 * kPointerSize)); 2519 __ sub(Operand(esp), Immediate(2 * kPointerSize));
2515 __ mov(Operand(esp, 0), eax); 2520 __ mov(Operand(esp, 0), eax);
2516 __ fild_s(Operand(esp, 0)); 2521 __ fild_s(Operand(esp, 0));
2517 __ fst_d(Operand(esp, 0)); 2522 __ fst_d(Operand(esp, 0));
2518 __ pop(edx); 2523 __ pop(edx);
2519 __ pop(ebx); 2524 __ pop(ebx);
2520 __ jmp(&loaded); 2525 __ jmp(&loaded);
2521 __ bind(&input_not_smi); 2526 __ bind(&input_not_smi);
2522 // Check if input is a HeapNumber. 2527 // Check if input is a HeapNumber.
2523 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); 2528 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2524 __ cmp(Operand(ebx), Immediate(FACTORY->heap_number_map())); 2529 Factory* factory = masm->isolate()->factory();
2530 __ cmp(Operand(ebx), Immediate(factory->heap_number_map()));
2525 __ j(not_equal, &runtime_call); 2531 __ j(not_equal, &runtime_call);
2526 // Input is a HeapNumber. Push it on the FPU stack and load its 2532 // Input is a HeapNumber. Push it on the FPU stack and load its
2527 // low and high words into ebx, edx. 2533 // low and high words into ebx, edx.
2528 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset)); 2534 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
2529 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset)); 2535 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset));
2530 __ mov(ebx, FieldOperand(eax, HeapNumber::kMantissaOffset)); 2536 __ mov(ebx, FieldOperand(eax, HeapNumber::kMantissaOffset));
2531 2537
2532 __ bind(&loaded); 2538 __ bind(&loaded);
2533 } else { // UNTAGGED. 2539 } else { // UNTAGGED.
2534 if (masm->isolate()->cpu_features()->IsSupported(SSE4_1)) { 2540 if (masm->isolate()->cpu_features()->IsSupported(SSE4_1)) {
(...skipping 447 matching lines...) Expand 10 before | Expand all | Expand 10 after
2982 2988
2983 // Test if arg1 is a Smi. 2989 // Test if arg1 is a Smi.
2984 __ test(edx, Immediate(kSmiTagMask)); 2990 __ test(edx, Immediate(kSmiTagMask));
2985 __ j(not_zero, &arg1_is_object); 2991 __ j(not_zero, &arg1_is_object);
2986 2992
2987 __ SmiUntag(edx); 2993 __ SmiUntag(edx);
2988 __ jmp(&load_arg2); 2994 __ jmp(&load_arg2);
2989 2995
2990 // If the argument is undefined it converts to zero (ECMA-262, section 9.5). 2996 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
2991 __ bind(&check_undefined_arg1); 2997 __ bind(&check_undefined_arg1);
2992 __ cmp(edx, FACTORY->undefined_value()); 2998 Factory* factory = masm->isolate()->factory();
2999 __ cmp(edx, factory->undefined_value());
2993 __ j(not_equal, conversion_failure); 3000 __ j(not_equal, conversion_failure);
2994 __ mov(edx, Immediate(0)); 3001 __ mov(edx, Immediate(0));
2995 __ jmp(&load_arg2); 3002 __ jmp(&load_arg2);
2996 3003
2997 __ bind(&arg1_is_object); 3004 __ bind(&arg1_is_object);
2998 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset)); 3005 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
2999 __ cmp(ebx, FACTORY->heap_number_map()); 3006 __ cmp(ebx, factory->heap_number_map());
3000 __ j(not_equal, &check_undefined_arg1); 3007 __ j(not_equal, &check_undefined_arg1);
3001 3008
3002 // Get the untagged integer version of the edx heap number in ecx. 3009 // Get the untagged integer version of the edx heap number in ecx.
3003 IntegerConvert(masm, 3010 IntegerConvert(masm,
3004 edx, 3011 edx,
3005 TypeInfo::Unknown(), 3012 TypeInfo::Unknown(),
3006 use_sse3, 3013 use_sse3,
3007 conversion_failure); 3014 conversion_failure);
3008 __ mov(edx, ecx); 3015 __ mov(edx, ecx);
3009 3016
3010 // Here edx has the untagged integer, eax has a Smi or a heap number. 3017 // Here edx has the untagged integer, eax has a Smi or a heap number.
3011 __ bind(&load_arg2); 3018 __ bind(&load_arg2);
3012 3019
3013 // Test if arg2 is a Smi. 3020 // Test if arg2 is a Smi.
3014 __ test(eax, Immediate(kSmiTagMask)); 3021 __ test(eax, Immediate(kSmiTagMask));
3015 __ j(not_zero, &arg2_is_object); 3022 __ j(not_zero, &arg2_is_object);
3016 3023
3017 __ SmiUntag(eax); 3024 __ SmiUntag(eax);
3018 __ mov(ecx, eax); 3025 __ mov(ecx, eax);
3019 __ jmp(&done); 3026 __ jmp(&done);
3020 3027
3021 // If the argument is undefined it converts to zero (ECMA-262, section 9.5). 3028 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
3022 __ bind(&check_undefined_arg2); 3029 __ bind(&check_undefined_arg2);
3023 __ cmp(eax, FACTORY->undefined_value()); 3030 __ cmp(eax, factory->undefined_value());
3024 __ j(not_equal, conversion_failure); 3031 __ j(not_equal, conversion_failure);
3025 __ mov(ecx, Immediate(0)); 3032 __ mov(ecx, Immediate(0));
3026 __ jmp(&done); 3033 __ jmp(&done);
3027 3034
3028 __ bind(&arg2_is_object); 3035 __ bind(&arg2_is_object);
3029 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); 3036 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3030 __ cmp(ebx, FACTORY->heap_number_map()); 3037 __ cmp(ebx, factory->heap_number_map());
3031 __ j(not_equal, &check_undefined_arg2); 3038 __ j(not_equal, &check_undefined_arg2);
3032 3039
3033 // Get the untagged integer version of the eax heap number in ecx. 3040 // Get the untagged integer version of the eax heap number in ecx.
3034 IntegerConvert(masm, 3041 IntegerConvert(masm,
3035 eax, 3042 eax,
3036 TypeInfo::Unknown(), 3043 TypeInfo::Unknown(),
3037 use_sse3, 3044 use_sse3,
3038 conversion_failure); 3045 conversion_failure);
3039 __ bind(&done); 3046 __ bind(&done);
3040 __ mov(eax, edx); 3047 __ mov(eax, edx);
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
3107 __ bind(&done); 3114 __ bind(&done);
3108 } 3115 }
3109 3116
3110 3117
3111 void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm, 3118 void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
3112 Label* not_numbers) { 3119 Label* not_numbers) {
3113 NearLabel load_smi_edx, load_eax, load_smi_eax, load_float_eax, done; 3120 NearLabel load_smi_edx, load_eax, load_smi_eax, load_float_eax, done;
3114 // Load operand in edx into xmm0, or branch to not_numbers. 3121 // Load operand in edx into xmm0, or branch to not_numbers.
3115 __ test(edx, Immediate(kSmiTagMask)); 3122 __ test(edx, Immediate(kSmiTagMask));
3116 __ j(zero, &load_smi_edx, not_taken); // Argument in edx is a smi. 3123 __ j(zero, &load_smi_edx, not_taken); // Argument in edx is a smi.
3117 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), FACTORY->heap_number_map()); 3124 Factory* factory = masm->isolate()->factory();
3125 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), factory->heap_number_map());
3118 __ j(not_equal, not_numbers); // Argument in edx is not a number. 3126 __ j(not_equal, not_numbers); // Argument in edx is not a number.
3119 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); 3127 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
3120 __ bind(&load_eax); 3128 __ bind(&load_eax);
3121 // Load operand in eax into xmm1, or branch to not_numbers. 3129 // Load operand in eax into xmm1, or branch to not_numbers.
3122 __ test(eax, Immediate(kSmiTagMask)); 3130 __ test(eax, Immediate(kSmiTagMask));
3123 __ j(zero, &load_smi_eax, not_taken); // Argument in eax is a smi. 3131 __ j(zero, &load_smi_eax, not_taken); // Argument in eax is a smi.
3124 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), FACTORY->heap_number_map()); 3132 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), factory->heap_number_map());
3125 __ j(equal, &load_float_eax); 3133 __ j(equal, &load_float_eax);
3126 __ jmp(not_numbers); // Argument in eax is not a number. 3134 __ jmp(not_numbers); // Argument in eax is not a number.
3127 __ bind(&load_smi_edx); 3135 __ bind(&load_smi_edx);
3128 __ SmiUntag(edx); // Untag smi before converting to float. 3136 __ SmiUntag(edx); // Untag smi before converting to float.
3129 __ cvtsi2sd(xmm0, Operand(edx)); 3137 __ cvtsi2sd(xmm0, Operand(edx));
3130 __ SmiTag(edx); // Retag smi for heap number overwriting test. 3138 __ SmiTag(edx); // Retag smi for heap number overwriting test.
3131 __ jmp(&load_eax); 3139 __ jmp(&load_eax);
3132 __ bind(&load_smi_eax); 3140 __ bind(&load_smi_eax);
3133 __ SmiUntag(eax); // Untag smi before converting to float. 3141 __ SmiUntag(eax); // Untag smi before converting to float.
3134 __ cvtsi2sd(xmm1, Operand(eax)); 3142 __ cvtsi2sd(xmm1, Operand(eax));
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after
3232 3240
3233 void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm, 3241 void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
3234 Label* non_float, 3242 Label* non_float,
3235 Register scratch) { 3243 Register scratch) {
3236 NearLabel test_other, done; 3244 NearLabel test_other, done;
3237 // Test if both operands are floats or smi -> scratch=k_is_float; 3245 // Test if both operands are floats or smi -> scratch=k_is_float;
3238 // Otherwise scratch = k_not_float. 3246 // Otherwise scratch = k_not_float.
3239 __ test(edx, Immediate(kSmiTagMask)); 3247 __ test(edx, Immediate(kSmiTagMask));
3240 __ j(zero, &test_other, not_taken); // argument in edx is OK 3248 __ j(zero, &test_other, not_taken); // argument in edx is OK
3241 __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset)); 3249 __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
3242 __ cmp(scratch, FACTORY->heap_number_map()); 3250 Factory* factory = masm->isolate()->factory();
3251 __ cmp(scratch, factory->heap_number_map());
3243 __ j(not_equal, non_float); // argument in edx is not a number -> NaN 3252 __ j(not_equal, non_float); // argument in edx is not a number -> NaN
3244 3253
3245 __ bind(&test_other); 3254 __ bind(&test_other);
3246 __ test(eax, Immediate(kSmiTagMask)); 3255 __ test(eax, Immediate(kSmiTagMask));
3247 __ j(zero, &done); // argument in eax is OK 3256 __ j(zero, &done); // argument in eax is OK
3248 __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset)); 3257 __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
3249 __ cmp(scratch, FACTORY->heap_number_map()); 3258 __ cmp(scratch, factory->heap_number_map());
3250 __ j(not_equal, non_float); // argument in eax is not a number -> NaN 3259 __ j(not_equal, non_float); // argument in eax is not a number -> NaN
3251 3260
3252 // Fall-through: Both operands are numbers. 3261 // Fall-through: Both operands are numbers.
3253 __ bind(&done); 3262 __ bind(&done);
3254 } 3263 }
3255 3264
3256 3265
3257 void FloatingPointHelper::CheckFloatOperandsAreInt32(MacroAssembler* masm, 3266 void FloatingPointHelper::CheckFloatOperandsAreInt32(MacroAssembler* masm,
3258 Label* non_int32) { 3267 Label* non_int32) {
3259 return; 3268 return;
(...skipping 25 matching lines...) Expand all
3285 __ j(overflow, &undo, not_taken); 3294 __ j(overflow, &undo, not_taken);
3286 __ StubReturn(1); 3295 __ StubReturn(1);
3287 3296
3288 // Try floating point case. 3297 // Try floating point case.
3289 __ bind(&try_float); 3298 __ bind(&try_float);
3290 } else if (FLAG_debug_code) { 3299 } else if (FLAG_debug_code) {
3291 __ AbortIfSmi(eax); 3300 __ AbortIfSmi(eax);
3292 } 3301 }
3293 3302
3294 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset)); 3303 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
3295 __ cmp(edx, FACTORY->heap_number_map()); 3304 __ cmp(edx, masm->isolate()->factory()->heap_number_map());
3296 __ j(not_equal, &slow); 3305 __ j(not_equal, &slow);
3297 if (overwrite_ == UNARY_OVERWRITE) { 3306 if (overwrite_ == UNARY_OVERWRITE) {
3298 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset)); 3307 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset));
3299 __ xor_(edx, HeapNumber::kSignMask); // Flip sign. 3308 __ xor_(edx, HeapNumber::kSignMask); // Flip sign.
3300 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), edx); 3309 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), edx);
3301 } else { 3310 } else {
3302 __ mov(edx, Operand(eax)); 3311 __ mov(edx, Operand(eax));
3303 // edx: operand 3312 // edx: operand
3304 __ AllocateHeapNumber(eax, ebx, ecx, &undo); 3313 __ AllocateHeapNumber(eax, ebx, ecx, &undo);
3305 // eax: allocated 'empty' number 3314 // eax: allocated 'empty' number
(...skipping 11 matching lines...) Expand all
3317 __ not_(eax); 3326 __ not_(eax);
3318 __ and_(eax, ~kSmiTagMask); // Remove inverted smi-tag. 3327 __ and_(eax, ~kSmiTagMask); // Remove inverted smi-tag.
3319 __ ret(0); 3328 __ ret(0);
3320 __ bind(&non_smi); 3329 __ bind(&non_smi);
3321 } else if (FLAG_debug_code) { 3330 } else if (FLAG_debug_code) {
3322 __ AbortIfSmi(eax); 3331 __ AbortIfSmi(eax);
3323 } 3332 }
3324 3333
3325 // Check if the operand is a heap number. 3334 // Check if the operand is a heap number.
3326 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset)); 3335 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
3327 __ cmp(edx, FACTORY->heap_number_map()); 3336 __ cmp(edx, masm->isolate()->factory()->heap_number_map());
3328 __ j(not_equal, &slow, not_taken); 3337 __ j(not_equal, &slow, not_taken);
3329 3338
3330 // Convert the heap number in eax to an untagged integer in ecx. 3339 // Convert the heap number in eax to an untagged integer in ecx.
3331 IntegerConvert(masm, 3340 IntegerConvert(masm,
3332 eax, 3341 eax,
3333 TypeInfo::Unknown(), 3342 TypeInfo::Unknown(),
3334 masm->isolate()->cpu_features()->IsSupported(SSE3), 3343 masm->isolate()->cpu_features()->IsSupported(SSE3),
3335 &slow); 3344 &slow);
3336 3345
3337 // Do the bitwise operation and check if the result fits in a smi. 3346 // Do the bitwise operation and check if the result fits in a smi.
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
3419 __ test(edx, Immediate(kSmiTagMask)); 3428 __ test(edx, Immediate(kSmiTagMask));
3420 __ j(not_zero, &base_nonsmi); 3429 __ j(not_zero, &base_nonsmi);
3421 3430
3422 // Optimized version when both exponent and base are smis. 3431 // Optimized version when both exponent and base are smis.
3423 Label powi; 3432 Label powi;
3424 __ SmiUntag(edx); 3433 __ SmiUntag(edx);
3425 __ cvtsi2sd(xmm0, Operand(edx)); 3434 __ cvtsi2sd(xmm0, Operand(edx));
3426 __ jmp(&powi); 3435 __ jmp(&powi);
3427 // exponent is smi and base is a heapnumber. 3436 // exponent is smi and base is a heapnumber.
3428 __ bind(&base_nonsmi); 3437 __ bind(&base_nonsmi);
3438 Factory* factory = masm->isolate()->factory();
3429 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), 3439 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
3430 FACTORY->heap_number_map()); 3440 factory->heap_number_map());
3431 __ j(not_equal, &call_runtime); 3441 __ j(not_equal, &call_runtime);
3432 3442
3433 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); 3443 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
3434 3444
3435 // Optimized version of pow if exponent is a smi. 3445 // Optimized version of pow if exponent is a smi.
3436 // xmm0 contains the base. 3446 // xmm0 contains the base.
3437 __ bind(&powi); 3447 __ bind(&powi);
3438 __ SmiUntag(eax); 3448 __ SmiUntag(eax);
3439 3449
3440 // Save exponent in base as we need to check if exponent is negative later. 3450 // Save exponent in base as we need to check if exponent is negative later.
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
3472 __ ucomisd(xmm0, xmm1); 3482 __ ucomisd(xmm0, xmm1);
3473 __ j(equal, &call_runtime); 3483 __ j(equal, &call_runtime);
3474 __ divsd(xmm3, xmm1); 3484 __ divsd(xmm3, xmm1);
3475 __ movsd(xmm1, xmm3); 3485 __ movsd(xmm1, xmm3);
3476 __ jmp(&allocate_return); 3486 __ jmp(&allocate_return);
3477 3487
3478 // exponent (or both) is a heapnumber - no matter what we should now work 3488 // exponent (or both) is a heapnumber - no matter what we should now work
3479 // on doubles. 3489 // on doubles.
3480 __ bind(&exponent_nonsmi); 3490 __ bind(&exponent_nonsmi);
3481 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), 3491 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
3482 FACTORY->heap_number_map()); 3492 factory->heap_number_map());
3483 __ j(not_equal, &call_runtime); 3493 __ j(not_equal, &call_runtime);
3484 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); 3494 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
3485 // Test if exponent is nan. 3495 // Test if exponent is nan.
3486 __ ucomisd(xmm1, xmm1); 3496 __ ucomisd(xmm1, xmm1);
3487 __ j(parity_even, &call_runtime); 3497 __ j(parity_even, &call_runtime);
3488 3498
3489 NearLabel base_not_smi; 3499 NearLabel base_not_smi;
3490 NearLabel handle_special_cases; 3500 NearLabel handle_special_cases;
3491 __ test(edx, Immediate(kSmiTagMask)); 3501 __ test(edx, Immediate(kSmiTagMask));
3492 __ j(not_zero, &base_not_smi); 3502 __ j(not_zero, &base_not_smi);
3493 __ SmiUntag(edx); 3503 __ SmiUntag(edx);
3494 __ cvtsi2sd(xmm0, Operand(edx)); 3504 __ cvtsi2sd(xmm0, Operand(edx));
3495 __ jmp(&handle_special_cases); 3505 __ jmp(&handle_special_cases);
3496 3506
3497 __ bind(&base_not_smi); 3507 __ bind(&base_not_smi);
3498 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), 3508 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
3499 FACTORY->heap_number_map()); 3509 factory->heap_number_map());
3500 __ j(not_equal, &call_runtime); 3510 __ j(not_equal, &call_runtime);
3501 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset)); 3511 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
3502 __ and_(ecx, HeapNumber::kExponentMask); 3512 __ and_(ecx, HeapNumber::kExponentMask);
3503 __ cmp(Operand(ecx), Immediate(HeapNumber::kExponentMask)); 3513 __ cmp(Operand(ecx), Immediate(HeapNumber::kExponentMask));
3504 // base is NaN or +/-Infinity 3514 // base is NaN or +/-Infinity
3505 __ j(greater_equal, &call_runtime); 3515 __ j(greater_equal, &call_runtime);
3506 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); 3516 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
3507 3517
3508 // base is in xmm0 and exponent is in xmm1. 3518 // base is in xmm0 and exponent is in xmm1.
3509 __ bind(&handle_special_cases); 3519 __ bind(&handle_special_cases);
(...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after
3687 __ j(zero, &done); 3697 __ j(zero, &done);
3688 3698
3689 // Get the parameters pointer from the stack. 3699 // Get the parameters pointer from the stack.
3690 __ mov(edx, Operand(esp, 2 * kPointerSize)); 3700 __ mov(edx, Operand(esp, 2 * kPointerSize));
3691 3701
3692 // Setup the elements pointer in the allocated arguments object and 3702 // Setup the elements pointer in the allocated arguments object and
3693 // initialize the header in the elements fixed array. 3703 // initialize the header in the elements fixed array.
3694 __ lea(edi, Operand(eax, GetArgumentsObjectSize())); 3704 __ lea(edi, Operand(eax, GetArgumentsObjectSize()));
3695 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi); 3705 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
3696 __ mov(FieldOperand(edi, FixedArray::kMapOffset), 3706 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
3697 Immediate(FACTORY->fixed_array_map())); 3707 Immediate(masm->isolate()->factory()->fixed_array_map()));
3698 3708
3699 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx); 3709 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
3700 // Untag the length for the loop below. 3710 // Untag the length for the loop below.
3701 __ SmiUntag(ecx); 3711 __ SmiUntag(ecx);
3702 3712
3703 // Copy the fixed array slots. 3713 // Copy the fixed array slots.
3704 NearLabel loop; 3714 NearLabel loop;
3705 __ bind(&loop); 3715 __ bind(&loop);
3706 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver. 3716 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver.
3707 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx); 3717 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx);
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
3816 // edx: Number of capture registers 3826 // edx: Number of capture registers
3817 // Check that the fourth object is a JSArray object. 3827 // Check that the fourth object is a JSArray object.
3818 __ mov(eax, Operand(esp, kLastMatchInfoOffset)); 3828 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
3819 __ test(eax, Immediate(kSmiTagMask)); 3829 __ test(eax, Immediate(kSmiTagMask));
3820 __ j(zero, &runtime); 3830 __ j(zero, &runtime);
3821 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx); 3831 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
3822 __ j(not_equal, &runtime); 3832 __ j(not_equal, &runtime);
3823 // Check that the JSArray is in fast case. 3833 // Check that the JSArray is in fast case.
3824 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset)); 3834 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
3825 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset)); 3835 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset));
3826 __ cmp(eax, FACTORY->fixed_array_map()); 3836 Factory* factory = masm->isolate()->factory();
3837 __ cmp(eax, factory->fixed_array_map());
3827 __ j(not_equal, &runtime); 3838 __ j(not_equal, &runtime);
3828 // Check that the last match info has space for the capture registers and the 3839 // Check that the last match info has space for the capture registers and the
3829 // additional information. 3840 // additional information.
3830 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset)); 3841 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset));
3831 __ SmiUntag(eax); 3842 __ SmiUntag(eax);
3832 __ add(Operand(edx), Immediate(RegExpImpl::kLastMatchOverhead)); 3843 __ add(Operand(edx), Immediate(RegExpImpl::kLastMatchOverhead));
3833 __ cmp(edx, Operand(eax)); 3844 __ cmp(edx, Operand(eax));
3834 __ j(greater, &runtime); 3845 __ j(greater, &runtime);
3835 3846
3836 // ecx: RegExp data (FixedArray) 3847 // ecx: RegExp data (FixedArray)
(...skipping 17 matching lines...) Expand all
3854 // string. In that case the subject string is just the first part of the cons 3865 // string. In that case the subject string is just the first part of the cons
3855 // string. Also in this case the first part of the cons string is known to be 3866 // string. Also in this case the first part of the cons string is known to be
3856 // a sequential string or an external string. 3867 // a sequential string or an external string.
3857 STATIC_ASSERT(kExternalStringTag != 0); 3868 STATIC_ASSERT(kExternalStringTag != 0);
3858 STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0); 3869 STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0);
3859 __ test(Operand(ebx), 3870 __ test(Operand(ebx),
3860 Immediate(kIsNotStringMask | kExternalStringTag)); 3871 Immediate(kIsNotStringMask | kExternalStringTag));
3861 __ j(not_zero, &runtime); 3872 __ j(not_zero, &runtime);
3862 // String is a cons string. 3873 // String is a cons string.
3863 __ mov(edx, FieldOperand(eax, ConsString::kSecondOffset)); 3874 __ mov(edx, FieldOperand(eax, ConsString::kSecondOffset));
3864 __ cmp(Operand(edx), FACTORY->empty_string()); 3875 __ cmp(Operand(edx), factory->empty_string());
3865 __ j(not_equal, &runtime); 3876 __ j(not_equal, &runtime);
3866 __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset)); 3877 __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset));
3867 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); 3878 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3868 // String is a cons string with empty second part. 3879 // String is a cons string with empty second part.
3869 // eax: first part of cons string. 3880 // eax: first part of cons string.
3870 // ebx: map of first part of cons string. 3881 // ebx: map of first part of cons string.
3871 // Is first part a flat two byte string? 3882 // Is first part a flat two byte string?
3872 __ test_b(FieldOperand(ebx, Map::kInstanceTypeOffset), 3883 __ test_b(FieldOperand(ebx, Map::kInstanceTypeOffset),
3873 kStringRepresentationMask | kStringEncodingMask); 3884 kStringRepresentationMask | kStringEncodingMask);
3874 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0); 3885 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
3993 __ mov(eax, Operand::StaticVariable(pending_exception)); 4004 __ mov(eax, Operand::StaticVariable(pending_exception));
3994 __ cmp(edx, Operand(eax)); 4005 __ cmp(edx, Operand(eax));
3995 __ j(equal, &runtime); 4006 __ j(equal, &runtime);
3996 // For exception, throw the exception again. 4007 // For exception, throw the exception again.
3997 4008
3998 // Clear the pending exception variable. 4009 // Clear the pending exception variable.
3999 __ mov(Operand::StaticVariable(pending_exception), edx); 4010 __ mov(Operand::StaticVariable(pending_exception), edx);
4000 4011
4001 // Special handling of termination exceptions which are uncatchable 4012 // Special handling of termination exceptions which are uncatchable
4002 // by javascript code. 4013 // by javascript code.
4003 __ cmp(eax, FACTORY->termination_exception()); 4014 __ cmp(eax, factory->termination_exception());
4004 Label throw_termination_exception; 4015 Label throw_termination_exception;
4005 __ j(equal, &throw_termination_exception); 4016 __ j(equal, &throw_termination_exception);
4006 4017
4007 // Handle normal exception by following handler chain. 4018 // Handle normal exception by following handler chain.
4008 __ Throw(eax); 4019 __ Throw(eax);
4009 4020
4010 __ bind(&throw_termination_exception); 4021 __ bind(&throw_termination_exception);
4011 __ ThrowUncatchable(TERMINATION, eax); 4022 __ ThrowUncatchable(TERMINATION, eax);
4012 4023
4013 __ bind(&failure); 4024 __ bind(&failure);
4014 // For failure to match, return null. 4025 // For failure to match, return null.
4015 __ mov(Operand(eax), FACTORY->null_value()); 4026 __ mov(Operand(eax), factory->null_value());
4016 __ ret(4 * kPointerSize); 4027 __ ret(4 * kPointerSize);
4017 4028
4018 // Load RegExp data. 4029 // Load RegExp data.
4019 __ bind(&success); 4030 __ bind(&success);
4020 __ mov(eax, Operand(esp, kJSRegExpOffset)); 4031 __ mov(eax, Operand(esp, kJSRegExpOffset));
4021 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset)); 4032 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
4022 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset)); 4033 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
4023 // Calculate number of capture registers (number_of_captures + 1) * 2. 4034 // Calculate number of capture registers (number_of_captures + 1) * 2.
4024 STATIC_ASSERT(kSmiTag == 0); 4035 STATIC_ASSERT(kSmiTag == 0);
4025 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1); 4036 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
4106 edx, // Scratch register 4117 edx, // Scratch register
4107 &slowcase, 4118 &slowcase,
4108 TAG_OBJECT); 4119 TAG_OBJECT);
4109 // eax: Start of allocated area, object-tagged. 4120 // eax: Start of allocated area, object-tagged.
4110 4121
4111 // Set JSArray map to global.regexp_result_map(). 4122 // Set JSArray map to global.regexp_result_map().
4112 // Set empty properties FixedArray. 4123 // Set empty properties FixedArray.
4113 // Set elements to point to FixedArray allocated right after the JSArray. 4124 // Set elements to point to FixedArray allocated right after the JSArray.
4114 // Interleave operations for better latency. 4125 // Interleave operations for better latency.
4115 __ mov(edx, ContextOperand(esi, Context::GLOBAL_INDEX)); 4126 __ mov(edx, ContextOperand(esi, Context::GLOBAL_INDEX));
4116 __ mov(ecx, Immediate(FACTORY->empty_fixed_array())); 4127 Factory* factory = masm->isolate()->factory();
4128 __ mov(ecx, Immediate(factory->empty_fixed_array()));
4117 __ lea(ebx, Operand(eax, JSRegExpResult::kSize)); 4129 __ lea(ebx, Operand(eax, JSRegExpResult::kSize));
4118 __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalContextOffset)); 4130 __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalContextOffset));
4119 __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx); 4131 __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
4120 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ecx); 4132 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ecx);
4121 __ mov(edx, ContextOperand(edx, Context::REGEXP_RESULT_MAP_INDEX)); 4133 __ mov(edx, ContextOperand(edx, Context::REGEXP_RESULT_MAP_INDEX));
4122 __ mov(FieldOperand(eax, HeapObject::kMapOffset), edx); 4134 __ mov(FieldOperand(eax, HeapObject::kMapOffset), edx);
4123 4135
4124 // Set input, index and length fields from arguments. 4136 // Set input, index and length fields from arguments.
4125 __ mov(ecx, Operand(esp, kPointerSize * 1)); 4137 __ mov(ecx, Operand(esp, kPointerSize * 1));
4126 __ mov(FieldOperand(eax, JSRegExpResult::kInputOffset), ecx); 4138 __ mov(FieldOperand(eax, JSRegExpResult::kInputOffset), ecx);
4127 __ mov(ecx, Operand(esp, kPointerSize * 2)); 4139 __ mov(ecx, Operand(esp, kPointerSize * 2));
4128 __ mov(FieldOperand(eax, JSRegExpResult::kIndexOffset), ecx); 4140 __ mov(FieldOperand(eax, JSRegExpResult::kIndexOffset), ecx);
4129 __ mov(ecx, Operand(esp, kPointerSize * 3)); 4141 __ mov(ecx, Operand(esp, kPointerSize * 3));
4130 __ mov(FieldOperand(eax, JSArray::kLengthOffset), ecx); 4142 __ mov(FieldOperand(eax, JSArray::kLengthOffset), ecx);
4131 4143
4132 // Fill out the elements FixedArray. 4144 // Fill out the elements FixedArray.
4133 // eax: JSArray. 4145 // eax: JSArray.
4134 // ebx: FixedArray. 4146 // ebx: FixedArray.
4135 // ecx: Number of elements in array, as smi. 4147 // ecx: Number of elements in array, as smi.
4136 4148
4137 // Set map. 4149 // Set map.
4138 __ mov(FieldOperand(ebx, HeapObject::kMapOffset), 4150 __ mov(FieldOperand(ebx, HeapObject::kMapOffset),
4139 Immediate(FACTORY->fixed_array_map())); 4151 Immediate(factory->fixed_array_map()));
4140 // Set length. 4152 // Set length.
4141 __ mov(FieldOperand(ebx, FixedArray::kLengthOffset), ecx); 4153 __ mov(FieldOperand(ebx, FixedArray::kLengthOffset), ecx);
4142 // Fill contents of fixed-array with the-hole. 4154 // Fill contents of fixed-array with the-hole.
4143 __ SmiUntag(ecx); 4155 __ SmiUntag(ecx);
4144 __ mov(edx, Immediate(FACTORY->the_hole_value())); 4156 __ mov(edx, Immediate(factory->the_hole_value()));
4145 __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize)); 4157 __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize));
4146 // Fill fixed array elements with hole. 4158 // Fill fixed array elements with hole.
4147 // eax: JSArray. 4159 // eax: JSArray.
4148 // ecx: Number of elements to fill. 4160 // ecx: Number of elements to fill.
4149 // ebx: Start of elements in FixedArray. 4161 // ebx: Start of elements in FixedArray.
4150 // edx: the hole. 4162 // edx: the hole.
4151 Label loop; 4163 Label loop;
4152 __ test(ecx, Operand(ecx)); 4164 __ test(ecx, Operand(ecx));
4153 __ bind(&loop); 4165 __ bind(&loop);
4154 __ j(less_equal, &done); // Jump if ecx is negative or zero. 4166 __ j(less_equal, &done); // Jump if ecx is negative or zero.
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
4200 } else { 4212 } else {
4201 NearLabel not_smi, hash_calculated; 4213 NearLabel not_smi, hash_calculated;
4202 STATIC_ASSERT(kSmiTag == 0); 4214 STATIC_ASSERT(kSmiTag == 0);
4203 __ test(object, Immediate(kSmiTagMask)); 4215 __ test(object, Immediate(kSmiTagMask));
4204 __ j(not_zero, &not_smi); 4216 __ j(not_zero, &not_smi);
4205 __ mov(scratch, object); 4217 __ mov(scratch, object);
4206 __ SmiUntag(scratch); 4218 __ SmiUntag(scratch);
4207 __ jmp(&smi_hash_calculated); 4219 __ jmp(&smi_hash_calculated);
4208 __ bind(&not_smi); 4220 __ bind(&not_smi);
4209 __ cmp(FieldOperand(object, HeapObject::kMapOffset), 4221 __ cmp(FieldOperand(object, HeapObject::kMapOffset),
4210 FACTORY->heap_number_map()); 4222 masm->isolate()->factory()->heap_number_map());
4211 __ j(not_equal, not_found); 4223 __ j(not_equal, not_found);
4212 STATIC_ASSERT(8 == kDoubleSize); 4224 STATIC_ASSERT(8 == kDoubleSize);
4213 __ mov(scratch, FieldOperand(object, HeapNumber::kValueOffset)); 4225 __ mov(scratch, FieldOperand(object, HeapNumber::kValueOffset));
4214 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4)); 4226 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
4215 // Object is heap number and hash is now in scratch. Calculate cache index. 4227 // Object is heap number and hash is now in scratch. Calculate cache index.
4216 __ and_(scratch, Operand(mask)); 4228 __ and_(scratch, Operand(mask));
4217 Register index = scratch; 4229 Register index = scratch;
4218 Register probe = mask; 4230 Register probe = mask;
4219 __ mov(probe, 4231 __ mov(probe,
4220 FieldOperand(number_string_cache, 4232 FieldOperand(number_string_cache,
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after
4317 // for NaN and undefined. 4329 // for NaN and undefined.
4318 { 4330 {
4319 Label not_identical; 4331 Label not_identical;
4320 __ cmp(eax, Operand(edx)); 4332 __ cmp(eax, Operand(edx));
4321 __ j(not_equal, &not_identical); 4333 __ j(not_equal, &not_identical);
4322 4334
4323 if (cc_ != equal) { 4335 if (cc_ != equal) {
4324 // Check for undefined. undefined OP undefined is false even though 4336 // Check for undefined. undefined OP undefined is false even though
4325 // undefined == undefined. 4337 // undefined == undefined.
4326 NearLabel check_for_nan; 4338 NearLabel check_for_nan;
4327 __ cmp(edx, FACTORY->undefined_value()); 4339 __ cmp(edx, masm->isolate()->factory()->undefined_value());
4328 __ j(not_equal, &check_for_nan); 4340 __ j(not_equal, &check_for_nan);
4329 __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_)))); 4341 __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
4330 __ ret(0); 4342 __ ret(0);
4331 __ bind(&check_for_nan); 4343 __ bind(&check_for_nan);
4332 } 4344 }
4333 4345
4334 // Test for NaN. Sadly, we can't just compare to FACTORY->nan_value(), 4346 // Test for NaN. Sadly, we can't just compare to factory->nan_value(),
4335 // so we do the second best thing - test it ourselves. 4347 // so we do the second best thing - test it ourselves.
4336 // Note: if cc_ != equal, never_nan_nan_ is not used. 4348 // Note: if cc_ != equal, never_nan_nan_ is not used.
4337 if (never_nan_nan_ && (cc_ == equal)) { 4349 if (never_nan_nan_ && (cc_ == equal)) {
4338 __ Set(eax, Immediate(Smi::FromInt(EQUAL))); 4350 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
4339 __ ret(0); 4351 __ ret(0);
4340 } else { 4352 } else {
4341 NearLabel heap_number; 4353 NearLabel heap_number;
4342 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), 4354 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
4343 Immediate(FACTORY->heap_number_map())); 4355 Immediate(masm->isolate()->factory()->heap_number_map()));
4344 __ j(equal, &heap_number); 4356 __ j(equal, &heap_number);
4345 if (cc_ != equal) { 4357 if (cc_ != equal) {
4346 // Call runtime on identical JSObjects. Otherwise return equal. 4358 // Call runtime on identical JSObjects. Otherwise return equal.
4347 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx); 4359 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
4348 __ j(above_equal, &not_identical); 4360 __ j(above_equal, &not_identical);
4349 } 4361 }
4350 __ Set(eax, Immediate(Smi::FromInt(EQUAL))); 4362 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
4351 __ ret(0); 4363 __ ret(0);
4352 4364
4353 __ bind(&heap_number); 4365 __ bind(&heap_number);
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
4410 // ecx still holds eax & kSmiTag, which is either zero or one. 4422 // ecx still holds eax & kSmiTag, which is either zero or one.
4411 __ sub(Operand(ecx), Immediate(0x01)); 4423 __ sub(Operand(ecx), Immediate(0x01));
4412 __ mov(ebx, edx); 4424 __ mov(ebx, edx);
4413 __ xor_(ebx, Operand(eax)); 4425 __ xor_(ebx, Operand(eax));
4414 __ and_(ebx, Operand(ecx)); // ebx holds either 0 or eax ^ edx. 4426 __ and_(ebx, Operand(ecx)); // ebx holds either 0 or eax ^ edx.
4415 __ xor_(ebx, Operand(eax)); 4427 __ xor_(ebx, Operand(eax));
4416 // if eax was smi, ebx is now edx, else eax. 4428 // if eax was smi, ebx is now edx, else eax.
4417 4429
4418 // Check if the non-smi operand is a heap number. 4430 // Check if the non-smi operand is a heap number.
4419 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), 4431 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
4420 Immediate(FACTORY->heap_number_map())); 4432 Immediate(masm->isolate()->factory()->heap_number_map()));
4421 // If heap number, handle it in the slow case. 4433 // If heap number, handle it in the slow case.
4422 __ j(equal, &slow); 4434 __ j(equal, &slow);
4423 // Return non-equal (ebx is not zero) 4435 // Return non-equal (ebx is not zero)
4424 __ mov(eax, ebx); 4436 __ mov(eax, ebx);
4425 __ ret(0); 4437 __ ret(0);
4426 4438
4427 __ bind(&not_smis); 4439 __ bind(&not_smis);
4428 // If either operand is a JSObject or an oddball value, then they are not 4440 // If either operand is a JSObject or an oddball value, then they are not
4429 // equal since their pointers are different 4441 // equal since their pointers are different
4430 // There is no test for undetectability in strict equality. 4442 // There is no test for undetectability in strict equality.
(...skipping 311 matching lines...) Expand 10 before | Expand all | Expand 10 after
4742 // Result is in eax or edx:eax - do not destroy these registers! 4754 // Result is in eax or edx:eax - do not destroy these registers!
4743 4755
4744 if (always_allocate_scope) { 4756 if (always_allocate_scope) {
4745 __ dec(Operand::StaticVariable(scope_depth)); 4757 __ dec(Operand::StaticVariable(scope_depth));
4746 } 4758 }
4747 4759
4748 // Make sure we're not trying to return 'the hole' from the runtime 4760 // Make sure we're not trying to return 'the hole' from the runtime
4749 // call as this may lead to crashes in the IC code later. 4761 // call as this may lead to crashes in the IC code later.
4750 if (FLAG_debug_code) { 4762 if (FLAG_debug_code) {
4751 NearLabel okay; 4763 NearLabel okay;
4752 __ cmp(eax, FACTORY->the_hole_value()); 4764 __ cmp(eax, masm->isolate()->factory()->the_hole_value());
4753 __ j(not_equal, &okay); 4765 __ j(not_equal, &okay);
4754 __ int3(); 4766 __ int3();
4755 __ bind(&okay); 4767 __ bind(&okay);
4756 } 4768 }
4757 4769
4758 // Check for failure result. 4770 // Check for failure result.
4759 Label failure_returned; 4771 Label failure_returned;
4760 STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0); 4772 STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
4761 __ lea(ecx, Operand(eax, 1)); 4773 __ lea(ecx, Operand(eax, 1));
4762 // Lower 2 bits of ecx are 0 iff eax has failure tag. 4774 // Lower 2 bits of ecx are 0 iff eax has failure tag.
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
4800 4812
4801 // Retrieve the pending exception and clear the variable. 4813 // Retrieve the pending exception and clear the variable.
4802 ExternalReference the_hole_location = 4814 ExternalReference the_hole_location =
4803 ExternalReference::the_hole_value_location(masm->isolate()); 4815 ExternalReference::the_hole_value_location(masm->isolate());
4804 __ mov(eax, Operand::StaticVariable(pending_exception_address)); 4816 __ mov(eax, Operand::StaticVariable(pending_exception_address));
4805 __ mov(edx, Operand::StaticVariable(the_hole_location)); 4817 __ mov(edx, Operand::StaticVariable(the_hole_location));
4806 __ mov(Operand::StaticVariable(pending_exception_address), edx); 4818 __ mov(Operand::StaticVariable(pending_exception_address), edx);
4807 4819
4808 // Special handling of termination exceptions which are uncatchable 4820 // Special handling of termination exceptions which are uncatchable
4809 // by javascript code. 4821 // by javascript code.
4810 __ cmp(eax, FACTORY->termination_exception()); 4822 __ cmp(eax, masm->isolate()->factory()->termination_exception());
4811 __ j(equal, throw_termination_exception); 4823 __ j(equal, throw_termination_exception);
4812 4824
4813 // Handle normal exception. 4825 // Handle normal exception.
4814 __ jmp(throw_normal_exception); 4826 __ jmp(throw_normal_exception);
4815 4827
4816 // Retry. 4828 // Retry.
4817 __ bind(&retry); 4829 __ bind(&retry);
4818 } 4830 }
4819 4831
4820 4832
(...skipping 280 matching lines...) Expand 10 before | Expand all | Expand 10 after
5101 __ mov(Operand(scratch, kDeltaToCmpImmediate), map); 5113 __ mov(Operand(scratch, kDeltaToCmpImmediate), map);
5102 } 5114 }
5103 5115
5104 // Loop through the prototype chain of the object looking for the function 5116 // Loop through the prototype chain of the object looking for the function
5105 // prototype. 5117 // prototype.
5106 __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset)); 5118 __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset));
5107 NearLabel loop, is_instance, is_not_instance; 5119 NearLabel loop, is_instance, is_not_instance;
5108 __ bind(&loop); 5120 __ bind(&loop);
5109 __ cmp(scratch, Operand(prototype)); 5121 __ cmp(scratch, Operand(prototype));
5110 __ j(equal, &is_instance); 5122 __ j(equal, &is_instance);
5111 __ cmp(Operand(scratch), Immediate(FACTORY->null_value())); 5123 Factory* factory = masm->isolate()->factory();
5124 __ cmp(Operand(scratch), Immediate(factory->null_value()));
5112 __ j(equal, &is_not_instance); 5125 __ j(equal, &is_not_instance);
5113 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); 5126 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
5114 __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset)); 5127 __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset));
5115 __ jmp(&loop); 5128 __ jmp(&loop);
5116 5129
5117 __ bind(&is_instance); 5130 __ bind(&is_instance);
5118 if (!HasCallSiteInlineCheck()) { 5131 if (!HasCallSiteInlineCheck()) {
5119 __ Set(eax, Immediate(0)); 5132 __ Set(eax, Immediate(0));
5120 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); 5133 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
5121 __ mov(Operand::StaticArray(scratch, 5134 __ mov(Operand::StaticArray(scratch,
5122 times_pointer_size, roots_address), eax); 5135 times_pointer_size, roots_address), eax);
5123 } else { 5136 } else {
5124 // Get return address and delta to inlined map check. 5137 // Get return address and delta to inlined map check.
5125 __ mov(eax, FACTORY->true_value()); 5138 __ mov(eax, factory->true_value());
5126 __ mov(scratch, Operand(esp, 0 * kPointerSize)); 5139 __ mov(scratch, Operand(esp, 0 * kPointerSize));
5127 __ sub(scratch, Operand(esp, 1 * kPointerSize)); 5140 __ sub(scratch, Operand(esp, 1 * kPointerSize));
5128 if (FLAG_debug_code) { 5141 if (FLAG_debug_code) {
5129 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte); 5142 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
5130 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)"); 5143 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
5131 } 5144 }
5132 __ mov(Operand(scratch, kDeltaToMovImmediate), eax); 5145 __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
5133 if (!ReturnTrueFalseObject()) { 5146 if (!ReturnTrueFalseObject()) {
5134 __ Set(eax, Immediate(0)); 5147 __ Set(eax, Immediate(0));
5135 } 5148 }
5136 } 5149 }
5137 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); 5150 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
5138 5151
5139 __ bind(&is_not_instance); 5152 __ bind(&is_not_instance);
5140 if (!HasCallSiteInlineCheck()) { 5153 if (!HasCallSiteInlineCheck()) {
5141 __ Set(eax, Immediate(Smi::FromInt(1))); 5154 __ Set(eax, Immediate(Smi::FromInt(1)));
5142 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); 5155 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
5143 __ mov(Operand::StaticArray( 5156 __ mov(Operand::StaticArray(
5144 scratch, times_pointer_size, roots_address), eax); 5157 scratch, times_pointer_size, roots_address), eax);
5145 } else { 5158 } else {
5146 // Get return address and delta to inlined map check. 5159 // Get return address and delta to inlined map check.
5147 __ mov(eax, FACTORY->false_value()); 5160 __ mov(eax, factory->false_value());
5148 __ mov(scratch, Operand(esp, 0 * kPointerSize)); 5161 __ mov(scratch, Operand(esp, 0 * kPointerSize));
5149 __ sub(scratch, Operand(esp, 1 * kPointerSize)); 5162 __ sub(scratch, Operand(esp, 1 * kPointerSize));
5150 if (FLAG_debug_code) { 5163 if (FLAG_debug_code) {
5151 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte); 5164 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
5152 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)"); 5165 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
5153 } 5166 }
5154 __ mov(Operand(scratch, kDeltaToMovImmediate), eax); 5167 __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
5155 if (!ReturnTrueFalseObject()) { 5168 if (!ReturnTrueFalseObject()) {
5156 __ Set(eax, Immediate(Smi::FromInt(1))); 5169 __ Set(eax, Immediate(Smi::FromInt(1)));
5157 } 5170 }
5158 } 5171 }
5159 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); 5172 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
5160 5173
5161 Label object_not_null, object_not_null_or_smi; 5174 Label object_not_null, object_not_null_or_smi;
5162 __ bind(&not_js_object); 5175 __ bind(&not_js_object);
5163 // Before null, smi and string value checks, check that the rhs is a function 5176 // Before null, smi and string value checks, check that the rhs is a function
5164 // as for a non-function rhs an exception needs to be thrown. 5177 // as for a non-function rhs an exception needs to be thrown.
5165 __ test(function, Immediate(kSmiTagMask)); 5178 __ test(function, Immediate(kSmiTagMask));
5166 __ j(zero, &slow, not_taken); 5179 __ j(zero, &slow, not_taken);
5167 __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch); 5180 __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch);
5168 __ j(not_equal, &slow, not_taken); 5181 __ j(not_equal, &slow, not_taken);
5169 5182
5170 // Null is not instance of anything. 5183 // Null is not instance of anything.
5171 __ cmp(object, FACTORY->null_value()); 5184 __ cmp(object, factory->null_value());
5172 __ j(not_equal, &object_not_null); 5185 __ j(not_equal, &object_not_null);
5173 __ Set(eax, Immediate(Smi::FromInt(1))); 5186 __ Set(eax, Immediate(Smi::FromInt(1)));
5174 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); 5187 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
5175 5188
5176 __ bind(&object_not_null); 5189 __ bind(&object_not_null);
5177 // Smi values is not instance of anything. 5190 // Smi values is not instance of anything.
5178 __ test(object, Immediate(kSmiTagMask)); 5191 __ test(object, Immediate(kSmiTagMask));
5179 __ j(not_zero, &object_not_null_or_smi, not_taken); 5192 __ j(not_zero, &object_not_null_or_smi, not_taken);
5180 __ Set(eax, Immediate(Smi::FromInt(1))); 5193 __ Set(eax, Immediate(Smi::FromInt(1)));
5181 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); 5194 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
(...skipping 20 matching lines...) Expand all
5202 } else { 5215 } else {
5203 // Call the builtin and convert 0/1 to true/false. 5216 // Call the builtin and convert 0/1 to true/false.
5204 __ EnterInternalFrame(); 5217 __ EnterInternalFrame();
5205 __ push(object); 5218 __ push(object);
5206 __ push(function); 5219 __ push(function);
5207 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION); 5220 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
5208 __ LeaveInternalFrame(); 5221 __ LeaveInternalFrame();
5209 NearLabel true_value, done; 5222 NearLabel true_value, done;
5210 __ test(eax, Operand(eax)); 5223 __ test(eax, Operand(eax));
5211 __ j(zero, &true_value); 5224 __ j(zero, &true_value);
5212 __ mov(eax, FACTORY->false_value()); 5225 __ mov(eax, factory->false_value());
5213 __ jmp(&done); 5226 __ jmp(&done);
5214 __ bind(&true_value); 5227 __ bind(&true_value);
5215 __ mov(eax, FACTORY->true_value()); 5228 __ mov(eax, factory->true_value());
5216 __ bind(&done); 5229 __ bind(&done);
5217 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); 5230 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
5218 } 5231 }
5219 } 5232 }
5220 5233
5221 5234
5222 Register InstanceofStub::left() { return eax; } 5235 Register InstanceofStub::left() { return eax; }
5223 5236
5224 5237
5225 Register InstanceofStub::right() { return edx; } 5238 Register InstanceofStub::right() { return edx; }
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
5334 // Handle non-flat strings. 5347 // Handle non-flat strings.
5335 __ test(result_, Immediate(kIsConsStringMask)); 5348 __ test(result_, Immediate(kIsConsStringMask));
5336 __ j(zero, &call_runtime_); 5349 __ j(zero, &call_runtime_);
5337 5350
5338 // ConsString. 5351 // ConsString.
5339 // Check whether the right hand side is the empty string (i.e. if 5352 // Check whether the right hand side is the empty string (i.e. if
5340 // this is really a flat string in a cons string). If that is not 5353 // this is really a flat string in a cons string). If that is not
5341 // the case we would rather go to the runtime system now to flatten 5354 // the case we would rather go to the runtime system now to flatten
5342 // the string. 5355 // the string.
5343 __ cmp(FieldOperand(object_, ConsString::kSecondOffset), 5356 __ cmp(FieldOperand(object_, ConsString::kSecondOffset),
5344 Immediate(FACTORY->empty_string())); 5357 Immediate(masm->isolate()->factory()->empty_string()));
5345 __ j(not_equal, &call_runtime_); 5358 __ j(not_equal, &call_runtime_);
5346 // Get the first of the two strings and load its instance type. 5359 // Get the first of the two strings and load its instance type.
5347 __ mov(object_, FieldOperand(object_, ConsString::kFirstOffset)); 5360 __ mov(object_, FieldOperand(object_, ConsString::kFirstOffset));
5348 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset)); 5361 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
5349 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); 5362 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
5350 // If the first cons component is also non-flat, then go to runtime. 5363 // If the first cons component is also non-flat, then go to runtime.
5351 STATIC_ASSERT(kSeqStringTag == 0); 5364 STATIC_ASSERT(kSeqStringTag == 0);
5352 __ test(result_, Immediate(kStringRepresentationMask)); 5365 __ test(result_, Immediate(kStringRepresentationMask));
5353 __ j(not_zero, &call_runtime_); 5366 __ j(not_zero, &call_runtime_);
5354 5367
(...skipping 24 matching lines...) Expand all
5379 } 5392 }
5380 5393
5381 5394
5382 void StringCharCodeAtGenerator::GenerateSlow( 5395 void StringCharCodeAtGenerator::GenerateSlow(
5383 MacroAssembler* masm, const RuntimeCallHelper& call_helper) { 5396 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
5384 __ Abort("Unexpected fallthrough to CharCodeAt slow case"); 5397 __ Abort("Unexpected fallthrough to CharCodeAt slow case");
5385 5398
5386 // Index is not a smi. 5399 // Index is not a smi.
5387 __ bind(&index_not_smi_); 5400 __ bind(&index_not_smi_);
5388 // If index is a heap number, try converting it to an integer. 5401 // If index is a heap number, try converting it to an integer.
5389 __ CheckMap(index_, FACTORY->heap_number_map(), index_not_number_, true); 5402 __ CheckMap(index_,
5403 masm->isolate()->factory()->heap_number_map(),
5404 index_not_number_,
5405 true);
5390 call_helper.BeforeCall(masm); 5406 call_helper.BeforeCall(masm);
5391 __ push(object_); 5407 __ push(object_);
5392 __ push(index_); 5408 __ push(index_);
5393 __ push(index_); // Consumed by runtime conversion function. 5409 __ push(index_); // Consumed by runtime conversion function.
5394 if (index_flags_ == STRING_INDEX_IS_NUMBER) { 5410 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
5395 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); 5411 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
5396 } else { 5412 } else {
5397 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); 5413 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
5398 // NumberToSmi discards numbers that are not exact integers. 5414 // NumberToSmi discards numbers that are not exact integers.
5399 __ CallRuntime(Runtime::kNumberToSmi, 1); 5415 __ CallRuntime(Runtime::kNumberToSmi, 1);
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
5440 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) { 5456 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
5441 // Fast case of Heap::LookupSingleCharacterStringFromCode. 5457 // Fast case of Heap::LookupSingleCharacterStringFromCode.
5442 STATIC_ASSERT(kSmiTag == 0); 5458 STATIC_ASSERT(kSmiTag == 0);
5443 STATIC_ASSERT(kSmiShiftSize == 0); 5459 STATIC_ASSERT(kSmiShiftSize == 0);
5444 ASSERT(IsPowerOf2(String::kMaxAsciiCharCode + 1)); 5460 ASSERT(IsPowerOf2(String::kMaxAsciiCharCode + 1));
5445 __ test(code_, 5461 __ test(code_,
5446 Immediate(kSmiTagMask | 5462 Immediate(kSmiTagMask |
5447 ((~String::kMaxAsciiCharCode) << kSmiTagSize))); 5463 ((~String::kMaxAsciiCharCode) << kSmiTagSize)));
5448 __ j(not_zero, &slow_case_, not_taken); 5464 __ j(not_zero, &slow_case_, not_taken);
5449 5465
5450 __ Set(result_, Immediate(FACTORY->single_character_string_cache())); 5466 Factory* factory = masm->isolate()->factory();
5467 __ Set(result_, Immediate(factory->single_character_string_cache()));
5451 STATIC_ASSERT(kSmiTag == 0); 5468 STATIC_ASSERT(kSmiTag == 0);
5452 STATIC_ASSERT(kSmiTagSize == 1); 5469 STATIC_ASSERT(kSmiTagSize == 1);
5453 STATIC_ASSERT(kSmiShiftSize == 0); 5470 STATIC_ASSERT(kSmiShiftSize == 0);
5454 // At this point code register contains smi tagged ascii char code. 5471 // At this point code register contains smi tagged ascii char code.
5455 __ mov(result_, FieldOperand(result_, 5472 __ mov(result_, FieldOperand(result_,
5456 code_, times_half_pointer_size, 5473 code_, times_half_pointer_size,
5457 FixedArray::kHeaderSize)); 5474 FixedArray::kHeaderSize));
5458 __ cmp(result_, FACTORY->undefined_value()); 5475 __ cmp(result_, factory->undefined_value());
5459 __ j(equal, &slow_case_, not_taken); 5476 __ j(equal, &slow_case_, not_taken);
5460 __ bind(&exit_); 5477 __ bind(&exit_);
5461 } 5478 }
5462 5479
5463 5480
5464 void StringCharFromCodeGenerator::GenerateSlow( 5481 void StringCharFromCodeGenerator::GenerateSlow(
5465 MacroAssembler* masm, const RuntimeCallHelper& call_helper) { 5482 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
5466 __ Abort("Unexpected fallthrough to CharFromCode slow case"); 5483 __ Abort("Unexpected fallthrough to CharFromCode slow case");
5467 5484
5468 __ bind(&slow_case_); 5485 __ bind(&slow_case_);
(...skipping 507 matching lines...) Expand 10 before | Expand all | Expand 10 after
5976 // Load the entry from the symbol table. 5993 // Load the entry from the symbol table.
5977 Register candidate = scratch; // Scratch register contains candidate. 5994 Register candidate = scratch; // Scratch register contains candidate.
5978 STATIC_ASSERT(SymbolTable::kEntrySize == 1); 5995 STATIC_ASSERT(SymbolTable::kEntrySize == 1);
5979 __ mov(candidate, 5996 __ mov(candidate,
5980 FieldOperand(symbol_table, 5997 FieldOperand(symbol_table,
5981 scratch, 5998 scratch,
5982 times_pointer_size, 5999 times_pointer_size,
5983 SymbolTable::kElementsStartOffset)); 6000 SymbolTable::kElementsStartOffset));
5984 6001
5985 // If entry is undefined no string with this hash can be found. 6002 // If entry is undefined no string with this hash can be found.
5986 __ cmp(candidate, FACTORY->undefined_value()); 6003 Factory* factory = masm->isolate()->factory();
6004 __ cmp(candidate, factory->undefined_value());
5987 __ j(equal, not_found); 6005 __ j(equal, not_found);
5988 __ cmp(candidate, FACTORY->null_value()); 6006 __ cmp(candidate, factory->null_value());
5989 __ j(equal, &next_probe[i]); 6007 __ j(equal, &next_probe[i]);
5990 6008
5991 // If length is not 2 the string is not a candidate. 6009 // If length is not 2 the string is not a candidate.
5992 __ cmp(FieldOperand(candidate, String::kLengthOffset), 6010 __ cmp(FieldOperand(candidate, String::kLengthOffset),
5993 Immediate(Smi::FromInt(2))); 6011 Immediate(Smi::FromInt(2)));
5994 __ j(not_equal, &next_probe[i]); 6012 __ j(not_equal, &next_probe[i]);
5995 6013
5996 // As we are out of registers save the mask on the stack and use that 6014 // As we are out of registers save the mask on the stack and use that
5997 // register as a temporary. 6015 // register as a temporary.
5998 __ push(mask); 6016 __ push(mask);
(...skipping 490 matching lines...) Expand 10 before | Expand all | Expand 10 after
6489 // Do a tail call to the rewritten stub. 6507 // Do a tail call to the rewritten stub.
6490 __ jmp(Operand(edi)); 6508 __ jmp(Operand(edi));
6491 } 6509 }
6492 6510
6493 6511
6494 #undef __ 6512 #undef __
6495 6513
6496 } } // namespace v8::internal 6514 } } // namespace v8::internal
6497 6515
6498 #endif // V8_TARGET_ARCH_IA32 6516 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/builtins-ia32.cc ('k') | src/ia32/deoptimizer-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698