Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1024)

Side by Side Diff: src/ia32/code-stubs-ia32.cc

Issue 8404030: Version 3.7.1 (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: Created 9 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/code-stubs-ia32.h ('k') | src/ia32/codegen-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 16 matching lines...) Expand all
27 27
28 #include "v8.h" 28 #include "v8.h"
29 29
30 #if defined(V8_TARGET_ARCH_IA32) 30 #if defined(V8_TARGET_ARCH_IA32)
31 31
32 #include "bootstrapper.h" 32 #include "bootstrapper.h"
33 #include "code-stubs.h" 33 #include "code-stubs.h"
34 #include "isolate.h" 34 #include "isolate.h"
35 #include "jsregexp.h" 35 #include "jsregexp.h"
36 #include "regexp-macro-assembler.h" 36 #include "regexp-macro-assembler.h"
37 #include "stub-cache.h"
37 38
38 namespace v8 { 39 namespace v8 {
39 namespace internal { 40 namespace internal {
40 41
41 #define __ ACCESS_MASM(masm) 42 #define __ ACCESS_MASM(masm)
42 43
43 void ToNumberStub::Generate(MacroAssembler* masm) { 44 void ToNumberStub::Generate(MacroAssembler* masm) {
44 // The ToNumber stub takes one argument in eax. 45 // The ToNumber stub takes one argument in eax.
45 Label check_heap_number, call_builtin; 46 Label check_heap_number, call_builtin;
46 __ JumpIfNotSmi(eax, &check_heap_number, Label::kNear); 47 __ JumpIfNotSmi(eax, &check_heap_number, Label::kNear);
(...skipping 184 matching lines...) Expand 10 before | Expand all | Expand 10 after
231 232
232 233
233 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { 234 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
234 // Stack layout on entry: 235 // Stack layout on entry:
235 // 236 //
236 // [esp + kPointerSize]: constant elements. 237 // [esp + kPointerSize]: constant elements.
237 // [esp + (2 * kPointerSize)]: literal index. 238 // [esp + (2 * kPointerSize)]: literal index.
238 // [esp + (3 * kPointerSize)]: literals array. 239 // [esp + (3 * kPointerSize)]: literals array.
239 240
240 // All sizes here are multiples of kPointerSize. 241 // All sizes here are multiples of kPointerSize.
241 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0; 242 int elements_size = 0;
243 if (length_ > 0) {
244 elements_size = mode_ == CLONE_DOUBLE_ELEMENTS
245 ? FixedDoubleArray::SizeFor(length_)
246 : FixedArray::SizeFor(length_);
247 }
242 int size = JSArray::kSize + elements_size; 248 int size = JSArray::kSize + elements_size;
243 249
244 // Load boilerplate object into ecx and check if we need to create a 250 // Load boilerplate object into ecx and check if we need to create a
245 // boilerplate. 251 // boilerplate.
246 Label slow_case; 252 Label slow_case;
247 __ mov(ecx, Operand(esp, 3 * kPointerSize)); 253 __ mov(ecx, Operand(esp, 3 * kPointerSize));
248 __ mov(eax, Operand(esp, 2 * kPointerSize)); 254 __ mov(eax, Operand(esp, 2 * kPointerSize));
249 STATIC_ASSERT(kPointerSize == 4); 255 STATIC_ASSERT(kPointerSize == 4);
250 STATIC_ASSERT(kSmiTagSize == 1); 256 STATIC_ASSERT(kSmiTagSize == 1);
251 STATIC_ASSERT(kSmiTag == 0); 257 STATIC_ASSERT(kSmiTag == 0);
252 __ mov(ecx, FieldOperand(ecx, eax, times_half_pointer_size, 258 __ mov(ecx, FieldOperand(ecx, eax, times_half_pointer_size,
253 FixedArray::kHeaderSize)); 259 FixedArray::kHeaderSize));
254 Factory* factory = masm->isolate()->factory(); 260 Factory* factory = masm->isolate()->factory();
255 __ cmp(ecx, factory->undefined_value()); 261 __ cmp(ecx, factory->undefined_value());
256 __ j(equal, &slow_case); 262 __ j(equal, &slow_case);
257 263
258 if (FLAG_debug_code) { 264 if (FLAG_debug_code) {
259 const char* message; 265 const char* message;
260 Handle<Map> expected_map; 266 Handle<Map> expected_map;
261 if (mode_ == CLONE_ELEMENTS) { 267 if (mode_ == CLONE_ELEMENTS) {
262 message = "Expected (writable) fixed array"; 268 message = "Expected (writable) fixed array";
263 expected_map = factory->fixed_array_map(); 269 expected_map = factory->fixed_array_map();
270 } else if (mode_ == CLONE_DOUBLE_ELEMENTS) {
271 message = "Expected (writable) fixed double array";
272 expected_map = factory->fixed_double_array_map();
264 } else { 273 } else {
265 ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS); 274 ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS);
266 message = "Expected copy-on-write fixed array"; 275 message = "Expected copy-on-write fixed array";
267 expected_map = factory->fixed_cow_array_map(); 276 expected_map = factory->fixed_cow_array_map();
268 } 277 }
269 __ push(ecx); 278 __ push(ecx);
270 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset)); 279 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
271 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), expected_map); 280 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), expected_map);
272 __ Assert(equal, message); 281 __ Assert(equal, message);
273 __ pop(ecx); 282 __ pop(ecx);
(...skipping 12 matching lines...) Expand all
286 } 295 }
287 296
288 if (length_ > 0) { 297 if (length_ > 0) {
289 // Get hold of the elements array of the boilerplate and setup the 298 // Get hold of the elements array of the boilerplate and setup the
290 // elements pointer in the resulting object. 299 // elements pointer in the resulting object.
291 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset)); 300 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
292 __ lea(edx, Operand(eax, JSArray::kSize)); 301 __ lea(edx, Operand(eax, JSArray::kSize));
293 __ mov(FieldOperand(eax, JSArray::kElementsOffset), edx); 302 __ mov(FieldOperand(eax, JSArray::kElementsOffset), edx);
294 303
295 // Copy the elements array. 304 // Copy the elements array.
296 for (int i = 0; i < elements_size; i += kPointerSize) { 305 if (mode_ == CLONE_ELEMENTS) {
297 __ mov(ebx, FieldOperand(ecx, i)); 306 for (int i = 0; i < elements_size; i += kPointerSize) {
298 __ mov(FieldOperand(edx, i), ebx); 307 __ mov(ebx, FieldOperand(ecx, i));
308 __ mov(FieldOperand(edx, i), ebx);
309 }
310 } else {
311 ASSERT(mode_ == CLONE_DOUBLE_ELEMENTS);
312 int i;
313 for (i = 0; i < FixedDoubleArray::kHeaderSize; i += kPointerSize) {
314 __ mov(ebx, FieldOperand(ecx, i));
315 __ mov(FieldOperand(edx, i), ebx);
316 }
317 while (i < elements_size) {
318 __ fld_d(FieldOperand(ecx, i));
319 __ fstp_d(FieldOperand(edx, i));
320 i += kDoubleSize;
321 }
322 ASSERT(i == elements_size);
299 } 323 }
300 } 324 }
301 325
302 // Return and remove the on-stack parameters. 326 // Return and remove the on-stack parameters.
303 __ ret(3 * kPointerSize); 327 __ ret(3 * kPointerSize);
304 328
305 __ bind(&slow_case); 329 __ bind(&slow_case);
306 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); 330 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
307 } 331 }
308 332
(...skipping 3542 matching lines...) Expand 10 before | Expand all | Expand 10 after
3851 Register scratch1, 3875 Register scratch1,
3852 Register scratch2, 3876 Register scratch2,
3853 bool object_is_smi, 3877 bool object_is_smi,
3854 Label* not_found) { 3878 Label* not_found) {
3855 // Use of registers. Register result is used as a temporary. 3879 // Use of registers. Register result is used as a temporary.
3856 Register number_string_cache = result; 3880 Register number_string_cache = result;
3857 Register mask = scratch1; 3881 Register mask = scratch1;
3858 Register scratch = scratch2; 3882 Register scratch = scratch2;
3859 3883
3860 // Load the number string cache. 3884 // Load the number string cache.
3861 ExternalReference roots_address = 3885 ExternalReference roots_array_start =
3862 ExternalReference::roots_address(masm->isolate()); 3886 ExternalReference::roots_array_start(masm->isolate());
3863 __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex)); 3887 __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex));
3864 __ mov(number_string_cache, 3888 __ mov(number_string_cache,
3865 Operand::StaticArray(scratch, times_pointer_size, roots_address)); 3889 Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
3866 // Make the hash mask from the length of the number string cache. It 3890 // Make the hash mask from the length of the number string cache. It
3867 // contains two elements (number and string) for each cache entry. 3891 // contains two elements (number and string) for each cache entry.
3868 __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset)); 3892 __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
3869 __ shr(mask, kSmiTagSize + 1); // Untag length and divide it by two. 3893 __ shr(mask, kSmiTagSize + 1); // Untag length and divide it by two.
3870 __ sub(mask, Immediate(1)); // Make mask. 3894 __ sub(mask, Immediate(1)); // Make mask.
3871 3895
3872 // Calculate the entry in the number string cache. The hash value in the 3896 // Calculate the entry in the number string cache. The hash value in the
3873 // number string cache for smis is just the smi value, and the hash for 3897 // number string cache for smis is just the smi value, and the hash for
3874 // doubles is the xor of the upper and lower words. See 3898 // doubles is the xor of the upper and lower words. See
3875 // Heap::GetNumberStringCache. 3899 // Heap::GetNumberStringCache.
(...skipping 947 matching lines...) Expand 10 before | Expand all | Expand 10 after
4823 Register scratch = ecx; 4847 Register scratch = ecx;
4824 4848
4825 // Constants describing the call site code to patch. 4849 // Constants describing the call site code to patch.
4826 static const int kDeltaToCmpImmediate = 2; 4850 static const int kDeltaToCmpImmediate = 2;
4827 static const int kDeltaToMov = 8; 4851 static const int kDeltaToMov = 8;
4828 static const int kDeltaToMovImmediate = 9; 4852 static const int kDeltaToMovImmediate = 9;
4829 static const int8_t kCmpEdiImmediateByte1 = BitCast<int8_t, uint8_t>(0x81); 4853 static const int8_t kCmpEdiImmediateByte1 = BitCast<int8_t, uint8_t>(0x81);
4830 static const int8_t kCmpEdiImmediateByte2 = BitCast<int8_t, uint8_t>(0xff); 4854 static const int8_t kCmpEdiImmediateByte2 = BitCast<int8_t, uint8_t>(0xff);
4831 static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8); 4855 static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8);
4832 4856
4833 ExternalReference roots_address = 4857 ExternalReference roots_array_start =
4834 ExternalReference::roots_address(masm->isolate()); 4858 ExternalReference::roots_array_start(masm->isolate());
4835 4859
4836 ASSERT_EQ(object.code(), InstanceofStub::left().code()); 4860 ASSERT_EQ(object.code(), InstanceofStub::left().code());
4837 ASSERT_EQ(function.code(), InstanceofStub::right().code()); 4861 ASSERT_EQ(function.code(), InstanceofStub::right().code());
4838 4862
4839 // Get the object and function - they are always both needed. 4863 // Get the object and function - they are always both needed.
4840 Label slow, not_js_object; 4864 Label slow, not_js_object;
4841 if (!HasArgsInRegisters()) { 4865 if (!HasArgsInRegisters()) {
4842 __ mov(object, Operand(esp, 2 * kPointerSize)); 4866 __ mov(object, Operand(esp, 2 * kPointerSize));
4843 __ mov(function, Operand(esp, 1 * kPointerSize)); 4867 __ mov(function, Operand(esp, 1 * kPointerSize));
4844 } 4868 }
4845 4869
4846 // Check that the left hand is a JS object. 4870 // Check that the left hand is a JS object.
4847 __ JumpIfSmi(object, &not_js_object); 4871 __ JumpIfSmi(object, &not_js_object);
4848 __ IsObjectJSObjectType(object, map, scratch, &not_js_object); 4872 __ IsObjectJSObjectType(object, map, scratch, &not_js_object);
4849 4873
4850 // If there is a call site cache don't look in the global cache, but do the 4874 // If there is a call site cache don't look in the global cache, but do the
4851 // real lookup and update the call site cache. 4875 // real lookup and update the call site cache.
4852 if (!HasCallSiteInlineCheck()) { 4876 if (!HasCallSiteInlineCheck()) {
4853 // Look up the function and the map in the instanceof cache. 4877 // Look up the function and the map in the instanceof cache.
4854 Label miss; 4878 Label miss;
4855 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex)); 4879 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
4856 __ cmp(function, 4880 __ cmp(function, Operand::StaticArray(scratch,
4857 Operand::StaticArray(scratch, times_pointer_size, roots_address)); 4881 times_pointer_size,
4882 roots_array_start));
4858 __ j(not_equal, &miss, Label::kNear); 4883 __ j(not_equal, &miss, Label::kNear);
4859 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex)); 4884 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex));
4860 __ cmp(map, Operand::StaticArray( 4885 __ cmp(map, Operand::StaticArray(
4861 scratch, times_pointer_size, roots_address)); 4886 scratch, times_pointer_size, roots_array_start));
4862 __ j(not_equal, &miss, Label::kNear); 4887 __ j(not_equal, &miss, Label::kNear);
4863 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); 4888 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
4864 __ mov(eax, Operand::StaticArray( 4889 __ mov(eax, Operand::StaticArray(
4865 scratch, times_pointer_size, roots_address)); 4890 scratch, times_pointer_size, roots_array_start));
4866 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); 4891 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
4867 __ bind(&miss); 4892 __ bind(&miss);
4868 } 4893 }
4869 4894
4870 // Get the prototype of the function. 4895 // Get the prototype of the function.
4871 __ TryGetFunctionPrototype(function, prototype, scratch, &slow); 4896 __ TryGetFunctionPrototype(function, prototype, scratch, &slow, true);
4872 4897
4873 // Check that the function prototype is a JS object. 4898 // Check that the function prototype is a JS object.
4874 __ JumpIfSmi(prototype, &slow); 4899 __ JumpIfSmi(prototype, &slow);
4875 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow); 4900 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
4876 4901
4877 // Update the global instanceof or call site inlined cache with the current 4902 // Update the global instanceof or call site inlined cache with the current
4878 // map and function. The cached answer will be set when it is known below. 4903 // map and function. The cached answer will be set when it is known below.
4879 if (!HasCallSiteInlineCheck()) { 4904 if (!HasCallSiteInlineCheck()) {
4880 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex)); 4905 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex));
4881 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), map); 4906 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start),
4907 map);
4882 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex)); 4908 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
4883 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), 4909 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start),
4884 function); 4910 function);
4885 } else { 4911 } else {
4886 // The constants for the code patching are based on no push instructions 4912 // The constants for the code patching are based on no push instructions
4887 // at the call site. 4913 // at the call site.
4888 ASSERT(HasArgsInRegisters()); 4914 ASSERT(HasArgsInRegisters());
4889 // Get return address and delta to inlined map check. 4915 // Get return address and delta to inlined map check.
4890 __ mov(scratch, Operand(esp, 0 * kPointerSize)); 4916 __ mov(scratch, Operand(esp, 0 * kPointerSize));
4891 __ sub(scratch, Operand(esp, 1 * kPointerSize)); 4917 __ sub(scratch, Operand(esp, 1 * kPointerSize));
4892 if (FLAG_debug_code) { 4918 if (FLAG_debug_code) {
4893 __ cmpb(Operand(scratch, 0), kCmpEdiImmediateByte1); 4919 __ cmpb(Operand(scratch, 0), kCmpEdiImmediateByte1);
(...skipping 16 matching lines...) Expand all
4910 __ j(equal, &is_not_instance, Label::kNear); 4936 __ j(equal, &is_not_instance, Label::kNear);
4911 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); 4937 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
4912 __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset)); 4938 __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset));
4913 __ jmp(&loop); 4939 __ jmp(&loop);
4914 4940
4915 __ bind(&is_instance); 4941 __ bind(&is_instance);
4916 if (!HasCallSiteInlineCheck()) { 4942 if (!HasCallSiteInlineCheck()) {
4917 __ Set(eax, Immediate(0)); 4943 __ Set(eax, Immediate(0));
4918 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); 4944 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
4919 __ mov(Operand::StaticArray(scratch, 4945 __ mov(Operand::StaticArray(scratch,
4920 times_pointer_size, roots_address), eax); 4946 times_pointer_size, roots_array_start), eax);
4921 } else { 4947 } else {
4922 // Get return address and delta to inlined map check. 4948 // Get return address and delta to inlined map check.
4923 __ mov(eax, factory->true_value()); 4949 __ mov(eax, factory->true_value());
4924 __ mov(scratch, Operand(esp, 0 * kPointerSize)); 4950 __ mov(scratch, Operand(esp, 0 * kPointerSize));
4925 __ sub(scratch, Operand(esp, 1 * kPointerSize)); 4951 __ sub(scratch, Operand(esp, 1 * kPointerSize));
4926 if (FLAG_debug_code) { 4952 if (FLAG_debug_code) {
4927 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte); 4953 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
4928 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)"); 4954 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
4929 } 4955 }
4930 __ mov(Operand(scratch, kDeltaToMovImmediate), eax); 4956 __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
4931 if (!ReturnTrueFalseObject()) { 4957 if (!ReturnTrueFalseObject()) {
4932 __ Set(eax, Immediate(0)); 4958 __ Set(eax, Immediate(0));
4933 } 4959 }
4934 } 4960 }
4935 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); 4961 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
4936 4962
4937 __ bind(&is_not_instance); 4963 __ bind(&is_not_instance);
4938 if (!HasCallSiteInlineCheck()) { 4964 if (!HasCallSiteInlineCheck()) {
4939 __ Set(eax, Immediate(Smi::FromInt(1))); 4965 __ Set(eax, Immediate(Smi::FromInt(1)));
4940 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); 4966 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
4941 __ mov(Operand::StaticArray( 4967 __ mov(Operand::StaticArray(
4942 scratch, times_pointer_size, roots_address), eax); 4968 scratch, times_pointer_size, roots_array_start), eax);
4943 } else { 4969 } else {
4944 // Get return address and delta to inlined map check. 4970 // Get return address and delta to inlined map check.
4945 __ mov(eax, factory->false_value()); 4971 __ mov(eax, factory->false_value());
4946 __ mov(scratch, Operand(esp, 0 * kPointerSize)); 4972 __ mov(scratch, Operand(esp, 0 * kPointerSize));
4947 __ sub(scratch, Operand(esp, 1 * kPointerSize)); 4973 __ sub(scratch, Operand(esp, 1 * kPointerSize));
4948 if (FLAG_debug_code) { 4974 if (FLAG_debug_code) {
4949 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte); 4975 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
4950 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)"); 4976 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
4951 } 4977 }
4952 __ mov(Operand(scratch, kDeltaToMovImmediate), eax); 4978 __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
(...skipping 768 matching lines...) Expand 10 before | Expand all | Expand 10 after
5721 // Collect the two characters in a register. 5747 // Collect the two characters in a register.
5722 Register chars = c1; 5748 Register chars = c1;
5723 __ shl(c2, kBitsPerByte); 5749 __ shl(c2, kBitsPerByte);
5724 __ or_(chars, c2); 5750 __ or_(chars, c2);
5725 5751
5726 // chars: two character string, char 1 in byte 0 and char 2 in byte 1. 5752 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
5727 // hash: hash of two character string. 5753 // hash: hash of two character string.
5728 5754
5729 // Load the symbol table. 5755 // Load the symbol table.
5730 Register symbol_table = c2; 5756 Register symbol_table = c2;
5731 ExternalReference roots_address = 5757 ExternalReference roots_array_start =
5732 ExternalReference::roots_address(masm->isolate()); 5758 ExternalReference::roots_array_start(masm->isolate());
5733 __ mov(scratch, Immediate(Heap::kSymbolTableRootIndex)); 5759 __ mov(scratch, Immediate(Heap::kSymbolTableRootIndex));
5734 __ mov(symbol_table, 5760 __ mov(symbol_table,
5735 Operand::StaticArray(scratch, times_pointer_size, roots_address)); 5761 Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
5736 5762
5737 // Calculate capacity mask from the symbol table capacity. 5763 // Calculate capacity mask from the symbol table capacity.
5738 Register mask = scratch2; 5764 Register mask = scratch2;
5739 __ mov(mask, FieldOperand(symbol_table, SymbolTable::kCapacityOffset)); 5765 __ mov(mask, FieldOperand(symbol_table, SymbolTable::kCapacityOffset));
5740 __ SmiUntag(mask); 5766 __ SmiUntag(mask);
5741 __ sub(mask, Immediate(1)); 5767 __ sub(mask, Immediate(1));
5742 5768
5743 // Registers 5769 // Registers
5744 // chars: two character string, char 1 in byte 0 and char 2 in byte 1. 5770 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
5745 // hash: hash of two character string 5771 // hash: hash of two character string
(...skipping 762 matching lines...) Expand 10 before | Expand all | Expand 10 after
6508 // Do a tail call to the rewritten stub. 6534 // Do a tail call to the rewritten stub.
6509 __ jmp(edi); 6535 __ jmp(edi);
6510 } 6536 }
6511 6537
6512 6538
6513 // Helper function used to check that the dictionary doesn't contain 6539 // Helper function used to check that the dictionary doesn't contain
6514 // the property. This function may return false negatives, so miss_label 6540 // the property. This function may return false negatives, so miss_label
6515 // must always call a backup property check that is complete. 6541 // must always call a backup property check that is complete.
6516 // This function is safe to call if the receiver has fast properties. 6542 // This function is safe to call if the receiver has fast properties.
6517 // Name must be a symbol and receiver must be a heap object. 6543 // Name must be a symbol and receiver must be a heap object.
6518 MaybeObject* StringDictionaryLookupStub::GenerateNegativeLookup( 6544 void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
6545 Label* miss,
6546 Label* done,
6547 Register properties,
6548 Handle<String> name,
6549 Register r0) {
6550 ASSERT(name->IsSymbol());
6551
6552 // If names of slots in range from 1 to kProbes - 1 for the hash value are
6553 // not equal to the name and kProbes-th slot is not used (its name is the
6554 // undefined value), it guarantees the hash table doesn't contain the
6555 // property. It's true even if some slots represent deleted properties
6556 // (their names are the null value).
6557 for (int i = 0; i < kInlinedProbes; i++) {
6558 // Compute the masked index: (hash + i + i * i) & mask.
6559 Register index = r0;
6560 // Capacity is smi 2^n.
6561 __ mov(index, FieldOperand(properties, kCapacityOffset));
6562 __ dec(index);
6563 __ and_(index,
6564 Immediate(Smi::FromInt(name->Hash() +
6565 StringDictionary::GetProbeOffset(i))));
6566
6567 // Scale the index by multiplying by the entry size.
6568 ASSERT(StringDictionary::kEntrySize == 3);
6569 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3.
6570 Register entity_name = r0;
6571 // Having undefined at this place means the name is not contained.
6572 ASSERT_EQ(kSmiTagSize, 1);
6573 __ mov(entity_name, Operand(properties, index, times_half_pointer_size,
6574 kElementsStartOffset - kHeapObjectTag));
6575 __ cmp(entity_name, masm->isolate()->factory()->undefined_value());
6576 __ j(equal, done);
6577
6578 // Stop if found the property.
6579 __ cmp(entity_name, Handle<String>(name));
6580 __ j(equal, miss);
6581
6582 // Check if the entry name is not a symbol.
6583 __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
6584 __ test_b(FieldOperand(entity_name, Map::kInstanceTypeOffset),
6585 kIsSymbolMask);
6586 __ j(zero, miss);
6587 }
6588
6589 StringDictionaryLookupStub stub(properties,
6590 r0,
6591 r0,
6592 StringDictionaryLookupStub::NEGATIVE_LOOKUP);
6593 __ push(Immediate(Handle<Object>(name)));
6594 __ push(Immediate(name->Hash()));
6595 __ CallStub(&stub);
6596 __ test(r0, r0);
6597 __ j(not_zero, miss);
6598 __ jmp(done);
6599 }
6600
6601
6602 // TODO(kmillikin): Eliminate this function when the stub cache is fully
6603 // handlified.
6604 MaybeObject* StringDictionaryLookupStub::TryGenerateNegativeLookup(
6519 MacroAssembler* masm, 6605 MacroAssembler* masm,
6520 Label* miss, 6606 Label* miss,
6521 Label* done, 6607 Label* done,
6522 Register properties, 6608 Register properties,
6523 String* name, 6609 String* name,
6524 Register r0) { 6610 Register r0) {
6525 ASSERT(name->IsSymbol()); 6611 ASSERT(name->IsSymbol());
6526 6612
6527 // If names of slots in range from 1 to kProbes - 1 for the hash value are 6613 // If names of slots in range from 1 to kProbes - 1 for the hash value are
6528 // not equal to the name and kProbes-th slot is not used (its name is the 6614 // not equal to the name and kProbes-th slot is not used (its name is the
(...skipping 213 matching lines...) Expand 10 before | Expand all | Expand 10 after
6742 { edx, ecx, ebx, EMIT_REMEMBERED_SET }, 6828 { edx, ecx, ebx, EMIT_REMEMBERED_SET },
6743 // GenerateStoreField calls the stub with two different permutations of 6829 // GenerateStoreField calls the stub with two different permutations of
6744 // registers. This is the second. 6830 // registers. This is the second.
6745 { ebx, ecx, edx, EMIT_REMEMBERED_SET }, 6831 { ebx, ecx, edx, EMIT_REMEMBERED_SET },
6746 // StoreIC::GenerateNormal via GenerateDictionaryStore 6832 // StoreIC::GenerateNormal via GenerateDictionaryStore
6747 { ebx, edi, edx, EMIT_REMEMBERED_SET }, 6833 { ebx, edi, edx, EMIT_REMEMBERED_SET },
6748 // KeyedStoreIC::GenerateGeneric. 6834 // KeyedStoreIC::GenerateGeneric.
6749 { ebx, edx, ecx, EMIT_REMEMBERED_SET}, 6835 { ebx, edx, ecx, EMIT_REMEMBERED_SET},
6750 // KeyedStoreStubCompiler::GenerateStoreFastElement. 6836 // KeyedStoreStubCompiler::GenerateStoreFastElement.
6751 { edi, edx, ecx, EMIT_REMEMBERED_SET}, 6837 { edi, edx, ecx, EMIT_REMEMBERED_SET},
6838 // ElementsTransitionGenerator::GenerateSmiOnlyToObject
6839 // and ElementsTransitionGenerator::GenerateSmiOnlyToDouble
6840 // and ElementsTransitionGenerator::GenerateDoubleToObject
6841 { edx, ebx, edi, EMIT_REMEMBERED_SET},
6842 // ElementsTransitionGenerator::GenerateDoubleToObject
6843 { eax, edx, esi, EMIT_REMEMBERED_SET},
6844 { edx, eax, edi, EMIT_REMEMBERED_SET},
6752 // Null termination. 6845 // Null termination.
6753 { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET} 6846 { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET}
6754 }; 6847 };
6755 6848
6756 6849
6757 bool RecordWriteStub::IsPregenerated() { 6850 bool RecordWriteStub::IsPregenerated() {
6758 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime; 6851 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
6759 !entry->object.is(no_reg); 6852 !entry->object.is(no_reg);
6760 entry++) { 6853 entry++) {
6761 if (object_.is(entry->object) && 6854 if (object_.is(entry->object) &&
(...skipping 222 matching lines...) Expand 10 before | Expand all | Expand 10 after
6984 } 7077 }
6985 7078
6986 __ bind(&need_incremental_pop_object); 7079 __ bind(&need_incremental_pop_object);
6987 __ pop(regs_.object()); 7080 __ pop(regs_.object());
6988 7081
6989 __ bind(&need_incremental); 7082 __ bind(&need_incremental);
6990 7083
6991 // Fall through when we need to inform the incremental marker. 7084 // Fall through when we need to inform the incremental marker.
6992 } 7085 }
6993 7086
6994
6995 #undef __ 7087 #undef __
6996 7088
6997 } } // namespace v8::internal 7089 } } // namespace v8::internal
6998 7090
6999 #endif // V8_TARGET_ARCH_IA32 7091 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/code-stubs-ia32.h ('k') | src/ia32/codegen-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698