OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
68 | 68 |
69 // Check that the key in the entry matches the name. | 69 // Check that the key in the entry matches the name. |
70 // Multiply entry offset by 16 to get the entry address. Since the | 70 // Multiply entry offset by 16 to get the entry address. Since the |
71 // offset register already holds the entry offset times four, multiply | 71 // offset register already holds the entry offset times four, multiply |
72 // by a further four. | 72 // by a further four. |
73 __ cmpl(name, Operand(kScratchRegister, offset, scale_factor, 0)); | 73 __ cmpl(name, Operand(kScratchRegister, offset, scale_factor, 0)); |
74 __ j(not_equal, &miss); | 74 __ j(not_equal, &miss); |
75 | 75 |
76 // Get the map entry from the cache. | 76 // Get the map entry from the cache. |
77 // Use key_offset + kPointerSize * 2, rather than loading map_offset. | 77 // Use key_offset + kPointerSize * 2, rather than loading map_offset. |
78 __ movq(kScratchRegister, | 78 __ movp(kScratchRegister, |
79 Operand(kScratchRegister, offset, scale_factor, kPointerSize * 2)); | 79 Operand(kScratchRegister, offset, scale_factor, kPointerSize * 2)); |
80 __ cmpq(kScratchRegister, FieldOperand(receiver, HeapObject::kMapOffset)); | 80 __ cmpq(kScratchRegister, FieldOperand(receiver, HeapObject::kMapOffset)); |
81 __ j(not_equal, &miss); | 81 __ j(not_equal, &miss); |
82 | 82 |
83 // Get the code entry from the cache. | 83 // Get the code entry from the cache. |
84 __ LoadAddress(kScratchRegister, value_offset); | 84 __ LoadAddress(kScratchRegister, value_offset); |
85 __ movq(kScratchRegister, | 85 __ movp(kScratchRegister, |
86 Operand(kScratchRegister, offset, scale_factor, 0)); | 86 Operand(kScratchRegister, offset, scale_factor, 0)); |
87 | 87 |
88 // Check that the flags match what we're looking for. | 88 // Check that the flags match what we're looking for. |
89 __ movl(offset, FieldOperand(kScratchRegister, Code::kFlagsOffset)); | 89 __ movl(offset, FieldOperand(kScratchRegister, Code::kFlagsOffset)); |
90 __ and_(offset, Immediate(~Code::kFlagsNotUsedInLookup)); | 90 __ and_(offset, Immediate(~Code::kFlagsNotUsedInLookup)); |
91 __ cmpl(offset, Immediate(flags)); | 91 __ cmpl(offset, Immediate(flags)); |
92 __ j(not_equal, &miss); | 92 __ j(not_equal, &miss); |
93 | 93 |
94 #ifdef DEBUG | 94 #ifdef DEBUG |
95 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) { | 95 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) { |
(...skipping 16 matching lines...) Expand all Loading... |
112 Register receiver, | 112 Register receiver, |
113 Handle<Name> name, | 113 Handle<Name> name, |
114 Register scratch0, | 114 Register scratch0, |
115 Register scratch1) { | 115 Register scratch1) { |
116 ASSERT(name->IsUniqueName()); | 116 ASSERT(name->IsUniqueName()); |
117 ASSERT(!receiver.is(scratch0)); | 117 ASSERT(!receiver.is(scratch0)); |
118 Counters* counters = masm->isolate()->counters(); | 118 Counters* counters = masm->isolate()->counters(); |
119 __ IncrementCounter(counters->negative_lookups(), 1); | 119 __ IncrementCounter(counters->negative_lookups(), 1); |
120 __ IncrementCounter(counters->negative_lookups_miss(), 1); | 120 __ IncrementCounter(counters->negative_lookups_miss(), 1); |
121 | 121 |
122 __ movq(scratch0, FieldOperand(receiver, HeapObject::kMapOffset)); | 122 __ movp(scratch0, FieldOperand(receiver, HeapObject::kMapOffset)); |
123 | 123 |
124 const int kInterceptorOrAccessCheckNeededMask = | 124 const int kInterceptorOrAccessCheckNeededMask = |
125 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); | 125 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); |
126 | 126 |
127 // Bail out if the receiver has a named interceptor or requires access checks. | 127 // Bail out if the receiver has a named interceptor or requires access checks. |
128 __ testb(FieldOperand(scratch0, Map::kBitFieldOffset), | 128 __ testb(FieldOperand(scratch0, Map::kBitFieldOffset), |
129 Immediate(kInterceptorOrAccessCheckNeededMask)); | 129 Immediate(kInterceptorOrAccessCheckNeededMask)); |
130 __ j(not_zero, miss_label); | 130 __ j(not_zero, miss_label); |
131 | 131 |
132 // Check that receiver is a JSObject. | 132 // Check that receiver is a JSObject. |
133 __ CmpInstanceType(scratch0, FIRST_SPEC_OBJECT_TYPE); | 133 __ CmpInstanceType(scratch0, FIRST_SPEC_OBJECT_TYPE); |
134 __ j(below, miss_label); | 134 __ j(below, miss_label); |
135 | 135 |
136 // Load properties array. | 136 // Load properties array. |
137 Register properties = scratch0; | 137 Register properties = scratch0; |
138 __ movq(properties, FieldOperand(receiver, JSObject::kPropertiesOffset)); | 138 __ movp(properties, FieldOperand(receiver, JSObject::kPropertiesOffset)); |
139 | 139 |
140 // Check that the properties array is a dictionary. | 140 // Check that the properties array is a dictionary. |
141 __ CompareRoot(FieldOperand(properties, HeapObject::kMapOffset), | 141 __ CompareRoot(FieldOperand(properties, HeapObject::kMapOffset), |
142 Heap::kHashTableMapRootIndex); | 142 Heap::kHashTableMapRootIndex); |
143 __ j(not_equal, miss_label); | 143 __ j(not_equal, miss_label); |
144 | 144 |
145 Label done; | 145 Label done; |
146 NameDictionaryLookupStub::GenerateNegativeLookup(masm, | 146 NameDictionaryLookupStub::GenerateNegativeLookup(masm, |
147 miss_label, | 147 miss_label, |
148 &done, | 148 &done, |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
217 // entering the runtime system. | 217 // entering the runtime system. |
218 __ bind(&miss); | 218 __ bind(&miss); |
219 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1); | 219 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1); |
220 } | 220 } |
221 | 221 |
222 | 222 |
223 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm, | 223 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm, |
224 int index, | 224 int index, |
225 Register prototype) { | 225 Register prototype) { |
226 // Load the global or builtins object from the current context. | 226 // Load the global or builtins object from the current context. |
227 __ movq(prototype, | 227 __ movp(prototype, |
228 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | 228 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
229 // Load the native context from the global or builtins object. | 229 // Load the native context from the global or builtins object. |
230 __ movq(prototype, | 230 __ movp(prototype, |
231 FieldOperand(prototype, GlobalObject::kNativeContextOffset)); | 231 FieldOperand(prototype, GlobalObject::kNativeContextOffset)); |
232 // Load the function from the native context. | 232 // Load the function from the native context. |
233 __ movq(prototype, Operand(prototype, Context::SlotOffset(index))); | 233 __ movp(prototype, Operand(prototype, Context::SlotOffset(index))); |
234 // Load the initial map. The global functions all have initial maps. | 234 // Load the initial map. The global functions all have initial maps. |
235 __ movq(prototype, | 235 __ movp(prototype, |
236 FieldOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset)); | 236 FieldOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset)); |
237 // Load the prototype from the initial map. | 237 // Load the prototype from the initial map. |
238 __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset)); | 238 __ movp(prototype, FieldOperand(prototype, Map::kPrototypeOffset)); |
239 } | 239 } |
240 | 240 |
241 | 241 |
242 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype( | 242 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype( |
243 MacroAssembler* masm, | 243 MacroAssembler* masm, |
244 int index, | 244 int index, |
245 Register prototype, | 245 Register prototype, |
246 Label* miss) { | 246 Label* miss) { |
247 Isolate* isolate = masm->isolate(); | 247 Isolate* isolate = masm->isolate(); |
248 // Check we're still in the same context. | 248 // Check we're still in the same context. |
249 __ Move(prototype, isolate->global_object()); | 249 __ Move(prototype, isolate->global_object()); |
250 __ cmpq(Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)), | 250 __ cmpq(Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)), |
251 prototype); | 251 prototype); |
252 __ j(not_equal, miss); | 252 __ j(not_equal, miss); |
253 // Get the global function with the given index. | 253 // Get the global function with the given index. |
254 Handle<JSFunction> function( | 254 Handle<JSFunction> function( |
255 JSFunction::cast(isolate->native_context()->get(index))); | 255 JSFunction::cast(isolate->native_context()->get(index))); |
256 // Load its initial map. The global functions all have initial maps. | 256 // Load its initial map. The global functions all have initial maps. |
257 __ Move(prototype, Handle<Map>(function->initial_map())); | 257 __ Move(prototype, Handle<Map>(function->initial_map())); |
258 // Load the prototype from the initial map. | 258 // Load the prototype from the initial map. |
259 __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset)); | 259 __ movp(prototype, FieldOperand(prototype, Map::kPrototypeOffset)); |
260 } | 260 } |
261 | 261 |
262 | 262 |
263 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm, | 263 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm, |
264 Register receiver, | 264 Register receiver, |
265 Register scratch, | 265 Register scratch, |
266 Label* miss_label) { | 266 Label* miss_label) { |
267 // Check that the receiver isn't a smi. | 267 // Check that the receiver isn't a smi. |
268 __ JumpIfSmi(receiver, miss_label); | 268 __ JumpIfSmi(receiver, miss_label); |
269 | 269 |
270 // Check that the object is a JS array. | 270 // Check that the object is a JS array. |
271 __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch); | 271 __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch); |
272 __ j(not_equal, miss_label); | 272 __ j(not_equal, miss_label); |
273 | 273 |
274 // Load length directly from the JS array. | 274 // Load length directly from the JS array. |
275 __ movq(rax, FieldOperand(receiver, JSArray::kLengthOffset)); | 275 __ movp(rax, FieldOperand(receiver, JSArray::kLengthOffset)); |
276 __ ret(0); | 276 __ ret(0); |
277 } | 277 } |
278 | 278 |
279 | 279 |
280 // Generate code to check if an object is a string. If the object is | 280 // Generate code to check if an object is a string. If the object is |
281 // a string, the map's instance type is left in the scratch register. | 281 // a string, the map's instance type is left in the scratch register. |
282 static void GenerateStringCheck(MacroAssembler* masm, | 282 static void GenerateStringCheck(MacroAssembler* masm, |
283 Register receiver, | 283 Register receiver, |
284 Register scratch, | 284 Register scratch, |
285 Label* smi, | 285 Label* smi, |
286 Label* non_string_object) { | 286 Label* non_string_object) { |
287 // Check that the object isn't a smi. | 287 // Check that the object isn't a smi. |
288 __ JumpIfSmi(receiver, smi); | 288 __ JumpIfSmi(receiver, smi); |
289 | 289 |
290 // Check that the object is a string. | 290 // Check that the object is a string. |
291 __ movq(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); | 291 __ movp(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); |
292 __ movzxbq(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); | 292 __ movzxbq(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); |
293 STATIC_ASSERT(kNotStringTag != 0); | 293 STATIC_ASSERT(kNotStringTag != 0); |
294 __ testl(scratch, Immediate(kNotStringTag)); | 294 __ testl(scratch, Immediate(kNotStringTag)); |
295 __ j(not_zero, non_string_object); | 295 __ j(not_zero, non_string_object); |
296 } | 296 } |
297 | 297 |
298 | 298 |
299 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm, | 299 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm, |
300 Register receiver, | 300 Register receiver, |
301 Register scratch1, | 301 Register scratch1, |
302 Register scratch2, | 302 Register scratch2, |
303 Label* miss) { | 303 Label* miss) { |
304 Label check_wrapper; | 304 Label check_wrapper; |
305 | 305 |
306 // Check if the object is a string leaving the instance type in the | 306 // Check if the object is a string leaving the instance type in the |
307 // scratch register. | 307 // scratch register. |
308 GenerateStringCheck(masm, receiver, scratch1, miss, &check_wrapper); | 308 GenerateStringCheck(masm, receiver, scratch1, miss, &check_wrapper); |
309 | 309 |
310 // Load length directly from the string. | 310 // Load length directly from the string. |
311 __ movq(rax, FieldOperand(receiver, String::kLengthOffset)); | 311 __ movp(rax, FieldOperand(receiver, String::kLengthOffset)); |
312 __ ret(0); | 312 __ ret(0); |
313 | 313 |
314 // Check if the object is a JSValue wrapper. | 314 // Check if the object is a JSValue wrapper. |
315 __ bind(&check_wrapper); | 315 __ bind(&check_wrapper); |
316 __ cmpl(scratch1, Immediate(JS_VALUE_TYPE)); | 316 __ cmpl(scratch1, Immediate(JS_VALUE_TYPE)); |
317 __ j(not_equal, miss); | 317 __ j(not_equal, miss); |
318 | 318 |
319 // Check if the wrapped value is a string and load the length | 319 // Check if the wrapped value is a string and load the length |
320 // directly if it is. | 320 // directly if it is. |
321 __ movq(scratch2, FieldOperand(receiver, JSValue::kValueOffset)); | 321 __ movp(scratch2, FieldOperand(receiver, JSValue::kValueOffset)); |
322 GenerateStringCheck(masm, scratch2, scratch1, miss, miss); | 322 GenerateStringCheck(masm, scratch2, scratch1, miss, miss); |
323 __ movq(rax, FieldOperand(scratch2, String::kLengthOffset)); | 323 __ movp(rax, FieldOperand(scratch2, String::kLengthOffset)); |
324 __ ret(0); | 324 __ ret(0); |
325 } | 325 } |
326 | 326 |
327 | 327 |
328 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm, | 328 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm, |
329 Register receiver, | 329 Register receiver, |
330 Register result, | 330 Register result, |
331 Register scratch, | 331 Register scratch, |
332 Label* miss_label) { | 332 Label* miss_label) { |
333 __ TryGetFunctionPrototype(receiver, result, miss_label); | 333 __ TryGetFunctionPrototype(receiver, result, miss_label); |
334 if (!result.is(rax)) __ movq(rax, result); | 334 if (!result.is(rax)) __ movp(rax, result); |
335 __ ret(0); | 335 __ ret(0); |
336 } | 336 } |
337 | 337 |
338 | 338 |
339 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, | 339 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, |
340 Register dst, | 340 Register dst, |
341 Register src, | 341 Register src, |
342 bool inobject, | 342 bool inobject, |
343 int index, | 343 int index, |
344 Representation representation) { | 344 Representation representation) { |
345 ASSERT(!FLAG_track_double_fields || !representation.IsDouble()); | 345 ASSERT(!FLAG_track_double_fields || !representation.IsDouble()); |
346 int offset = index * kPointerSize; | 346 int offset = index * kPointerSize; |
347 if (!inobject) { | 347 if (!inobject) { |
348 // Calculate the offset into the properties array. | 348 // Calculate the offset into the properties array. |
349 offset = offset + FixedArray::kHeaderSize; | 349 offset = offset + FixedArray::kHeaderSize; |
350 __ movq(dst, FieldOperand(src, JSObject::kPropertiesOffset)); | 350 __ movp(dst, FieldOperand(src, JSObject::kPropertiesOffset)); |
351 src = dst; | 351 src = dst; |
352 } | 352 } |
353 __ movq(dst, FieldOperand(src, offset)); | 353 __ movp(dst, FieldOperand(src, offset)); |
354 } | 354 } |
355 | 355 |
356 | 356 |
357 static void PushInterceptorArguments(MacroAssembler* masm, | 357 static void PushInterceptorArguments(MacroAssembler* masm, |
358 Register receiver, | 358 Register receiver, |
359 Register holder, | 359 Register holder, |
360 Register name, | 360 Register name, |
361 Handle<JSObject> holder_obj) { | 361 Handle<JSObject> holder_obj) { |
362 STATIC_ASSERT(StubCache::kInterceptorArgsNameIndex == 0); | 362 STATIC_ASSERT(StubCache::kInterceptorArgsNameIndex == 0); |
363 STATIC_ASSERT(StubCache::kInterceptorArgsInfoIndex == 1); | 363 STATIC_ASSERT(StubCache::kInterceptorArgsInfoIndex == 1); |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
401 // -- rsp[0] : return address | 401 // -- rsp[0] : return address |
402 // -- rsp[8] : last argument in the internal frame of the caller | 402 // -- rsp[8] : last argument in the internal frame of the caller |
403 // ----------------------------------- | 403 // ----------------------------------- |
404 __ movq(scratch, StackOperandForReturnAddress(0)); | 404 __ movq(scratch, StackOperandForReturnAddress(0)); |
405 __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize)); | 405 __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize)); |
406 __ movq(StackOperandForReturnAddress(0), scratch); | 406 __ movq(StackOperandForReturnAddress(0), scratch); |
407 __ Move(scratch, Smi::FromInt(0)); | 407 __ Move(scratch, Smi::FromInt(0)); |
408 StackArgumentsAccessor args(rsp, kFastApiCallArguments, | 408 StackArgumentsAccessor args(rsp, kFastApiCallArguments, |
409 ARGUMENTS_DONT_CONTAIN_RECEIVER); | 409 ARGUMENTS_DONT_CONTAIN_RECEIVER); |
410 for (int i = 0; i < kFastApiCallArguments; i++) { | 410 for (int i = 0; i < kFastApiCallArguments; i++) { |
411 __ movq(args.GetArgumentOperand(i), scratch); | 411 __ movp(args.GetArgumentOperand(i), scratch); |
412 } | 412 } |
413 } | 413 } |
414 | 414 |
415 | 415 |
416 // Undoes the effects of ReserveSpaceForFastApiCall. | 416 // Undoes the effects of ReserveSpaceForFastApiCall. |
417 static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { | 417 static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { |
418 // ----------- S t a t e ------------- | 418 // ----------- S t a t e ------------- |
419 // -- rsp[0] : return address. | 419 // -- rsp[0] : return address. |
420 // -- rsp[8] : last fast api call extra argument. | 420 // -- rsp[8] : last fast api call extra argument. |
421 // -- ... | 421 // -- ... |
(...skipping 17 matching lines...) Expand all Loading... |
439 | 439 |
440 // Generates call to API function. | 440 // Generates call to API function. |
441 static void GenerateFastApiCall(MacroAssembler* masm, | 441 static void GenerateFastApiCall(MacroAssembler* masm, |
442 const CallOptimization& optimization, | 442 const CallOptimization& optimization, |
443 int argc) { | 443 int argc) { |
444 typedef FunctionCallbackArguments FCA; | 444 typedef FunctionCallbackArguments FCA; |
445 StackArgumentsAccessor args(rsp, argc + kFastApiCallArguments); | 445 StackArgumentsAccessor args(rsp, argc + kFastApiCallArguments); |
446 | 446 |
447 // Save calling context. | 447 // Save calling context. |
448 int offset = argc + kFastApiCallArguments; | 448 int offset = argc + kFastApiCallArguments; |
449 __ movq(args.GetArgumentOperand(offset - FCA::kContextSaveIndex), rsi); | 449 __ movp(args.GetArgumentOperand(offset - FCA::kContextSaveIndex), rsi); |
450 | 450 |
451 // Get the function and setup the context. | 451 // Get the function and setup the context. |
452 Handle<JSFunction> function = optimization.constant_function(); | 452 Handle<JSFunction> function = optimization.constant_function(); |
453 __ Move(rdi, function); | 453 __ Move(rdi, function); |
454 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); | 454 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); |
455 // Construct the FunctionCallbackInfo on the stack. | 455 // Construct the FunctionCallbackInfo on the stack. |
456 __ movq(args.GetArgumentOperand(offset - FCA::kCalleeIndex), rdi); | 456 __ movp(args.GetArgumentOperand(offset - FCA::kCalleeIndex), rdi); |
457 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); | 457 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); |
458 Handle<Object> call_data(api_call_info->data(), masm->isolate()); | 458 Handle<Object> call_data(api_call_info->data(), masm->isolate()); |
459 if (masm->isolate()->heap()->InNewSpace(*call_data)) { | 459 if (masm->isolate()->heap()->InNewSpace(*call_data)) { |
460 __ Move(rcx, api_call_info); | 460 __ Move(rcx, api_call_info); |
461 __ movq(rbx, FieldOperand(rcx, CallHandlerInfo::kDataOffset)); | 461 __ movp(rbx, FieldOperand(rcx, CallHandlerInfo::kDataOffset)); |
462 __ movq(args.GetArgumentOperand(offset - FCA::kDataIndex), rbx); | 462 __ movp(args.GetArgumentOperand(offset - FCA::kDataIndex), rbx); |
463 } else { | 463 } else { |
464 __ Move(args.GetArgumentOperand(offset - FCA::kDataIndex), call_data); | 464 __ Move(args.GetArgumentOperand(offset - FCA::kDataIndex), call_data); |
465 } | 465 } |
466 __ Move(kScratchRegister, | 466 __ Move(kScratchRegister, |
467 ExternalReference::isolate_address(masm->isolate())); | 467 ExternalReference::isolate_address(masm->isolate())); |
468 __ movq(args.GetArgumentOperand(offset - FCA::kIsolateIndex), | 468 __ movp(args.GetArgumentOperand(offset - FCA::kIsolateIndex), |
469 kScratchRegister); | 469 kScratchRegister); |
470 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); | 470 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); |
471 __ movq(args.GetArgumentOperand(offset - FCA::kReturnValueDefaultValueIndex), | 471 __ movp(args.GetArgumentOperand(offset - FCA::kReturnValueDefaultValueIndex), |
472 kScratchRegister); | 472 kScratchRegister); |
473 __ movq(args.GetArgumentOperand(offset - FCA::kReturnValueOffset), | 473 __ movp(args.GetArgumentOperand(offset - FCA::kReturnValueOffset), |
474 kScratchRegister); | 474 kScratchRegister); |
475 | 475 |
476 // Prepare arguments. | 476 // Prepare arguments. |
477 STATIC_ASSERT(kFastApiCallArguments == 7); | 477 STATIC_ASSERT(kFastApiCallArguments == 7); |
478 __ lea(rax, args.GetArgumentOperand(offset - FCA::kHolderIndex)); | 478 __ lea(rax, args.GetArgumentOperand(offset - FCA::kHolderIndex)); |
479 | 479 |
480 GenerateFastApiCallBody(masm, optimization, argc, false); | 480 GenerateFastApiCallBody(masm, optimization, argc, false); |
481 } | 481 } |
482 | 482 |
483 | 483 |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
529 __ Move(scratch2, function); | 529 __ Move(scratch2, function); |
530 __ push(scratch2); | 530 __ push(scratch2); |
531 | 531 |
532 Isolate* isolate = masm->isolate(); | 532 Isolate* isolate = masm->isolate(); |
533 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); | 533 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); |
534 Handle<Object> call_data(api_call_info->data(), isolate); | 534 Handle<Object> call_data(api_call_info->data(), isolate); |
535 // Push data from ExecutableAccessorInfo. | 535 // Push data from ExecutableAccessorInfo. |
536 bool call_data_undefined = false; | 536 bool call_data_undefined = false; |
537 if (isolate->heap()->InNewSpace(*call_data)) { | 537 if (isolate->heap()->InNewSpace(*call_data)) { |
538 __ Move(scratch2, api_call_info); | 538 __ Move(scratch2, api_call_info); |
539 __ movq(scratch3, FieldOperand(scratch2, CallHandlerInfo::kDataOffset)); | 539 __ movp(scratch3, FieldOperand(scratch2, CallHandlerInfo::kDataOffset)); |
540 } else if (call_data->IsUndefined()) { | 540 } else if (call_data->IsUndefined()) { |
541 call_data_undefined = true; | 541 call_data_undefined = true; |
542 __ LoadRoot(scratch3, Heap::kUndefinedValueRootIndex); | 542 __ LoadRoot(scratch3, Heap::kUndefinedValueRootIndex); |
543 } else { | 543 } else { |
544 __ Move(scratch3, call_data); | 544 __ Move(scratch3, call_data); |
545 } | 545 } |
546 // call data | 546 // call data |
547 __ push(scratch3); | 547 __ push(scratch3); |
548 if (!call_data_undefined) { | 548 if (!call_data_undefined) { |
549 __ LoadRoot(scratch3, Heap::kUndefinedValueRootIndex); | 549 __ LoadRoot(scratch3, Heap::kUndefinedValueRootIndex); |
550 } | 550 } |
551 // return value | 551 // return value |
552 __ push(scratch3); | 552 __ push(scratch3); |
553 // return value default | 553 // return value default |
554 __ push(scratch3); | 554 __ push(scratch3); |
555 // isolate | 555 // isolate |
556 __ Move(scratch3, | 556 __ Move(scratch3, |
557 ExternalReference::isolate_address(masm->isolate())); | 557 ExternalReference::isolate_address(masm->isolate())); |
558 __ push(scratch3); | 558 __ push(scratch3); |
559 // holder | 559 // holder |
560 __ push(receiver); | 560 __ push(receiver); |
561 | 561 |
562 ASSERT(!scratch1.is(rax)); | 562 ASSERT(!scratch1.is(rax)); |
563 // store receiver address for GenerateFastApiCallBody | 563 // store receiver address for GenerateFastApiCallBody |
564 __ movq(rax, rsp); | 564 __ movp(rax, rsp); |
565 __ PushReturnAddressFrom(scratch1); | 565 __ PushReturnAddressFrom(scratch1); |
566 | 566 |
567 GenerateFastApiCallBody(masm, optimization, argc, true); | 567 GenerateFastApiCallBody(masm, optimization, argc, true); |
568 } | 568 } |
569 | 569 |
570 | 570 |
571 static void GenerateFastApiCallBody(MacroAssembler* masm, | 571 static void GenerateFastApiCallBody(MacroAssembler* masm, |
572 const CallOptimization& optimization, | 572 const CallOptimization& optimization, |
573 int argc, | 573 int argc, |
574 bool restore_context) { | 574 bool restore_context) { |
(...skipping 14 matching lines...) Expand all Loading... |
589 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); | 589 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); |
590 // Function address is a foreign pointer outside V8's heap. | 590 // Function address is a foreign pointer outside V8's heap. |
591 Address function_address = v8::ToCData<Address>(api_call_info->callback()); | 591 Address function_address = v8::ToCData<Address>(api_call_info->callback()); |
592 | 592 |
593 // Allocate the v8::Arguments structure in the arguments' space since | 593 // Allocate the v8::Arguments structure in the arguments' space since |
594 // it's not controlled by GC. | 594 // it's not controlled by GC. |
595 const int kApiStackSpace = 4; | 595 const int kApiStackSpace = 4; |
596 | 596 |
597 __ PrepareCallApiFunction(kApiStackSpace); | 597 __ PrepareCallApiFunction(kApiStackSpace); |
598 | 598 |
599 __ movq(StackSpaceOperand(0), rax); // FunctionCallbackInfo::implicit_args_. | 599 __ movp(StackSpaceOperand(0), rax); // FunctionCallbackInfo::implicit_args_. |
600 __ addq(rax, Immediate((argc + kFastApiCallArguments - 1) * kPointerSize)); | 600 __ addq(rax, Immediate((argc + kFastApiCallArguments - 1) * kPointerSize)); |
601 __ movq(StackSpaceOperand(1), rax); // FunctionCallbackInfo::values_. | 601 __ movp(StackSpaceOperand(1), rax); // FunctionCallbackInfo::values_. |
602 __ Set(StackSpaceOperand(2), argc); // FunctionCallbackInfo::length_. | 602 __ Set(StackSpaceOperand(2), argc); // FunctionCallbackInfo::length_. |
603 // FunctionCallbackInfo::is_construct_call_. | 603 // FunctionCallbackInfo::is_construct_call_. |
604 __ Set(StackSpaceOperand(3), 0); | 604 __ Set(StackSpaceOperand(3), 0); |
605 | 605 |
606 #if defined(__MINGW64__) || defined(_WIN64) | 606 #if defined(__MINGW64__) || defined(_WIN64) |
607 Register arguments_arg = rcx; | 607 Register arguments_arg = rcx; |
608 Register callback_arg = rdx; | 608 Register callback_arg = rdx; |
609 #else | 609 #else |
610 Register arguments_arg = rdi; | 610 Register arguments_arg = rdi; |
611 Register callback_arg = rsi; | 611 Register callback_arg = rsi; |
(...skipping 313 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
925 __ TailCallExternalReference( | 925 __ TailCallExternalReference( |
926 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage), | 926 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage), |
927 masm->isolate()), | 927 masm->isolate()), |
928 3, | 928 3, |
929 1); | 929 1); |
930 return; | 930 return; |
931 } | 931 } |
932 | 932 |
933 // Update the map of the object. | 933 // Update the map of the object. |
934 __ Move(scratch1, transition); | 934 __ Move(scratch1, transition); |
935 __ movq(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1); | 935 __ movp(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1); |
936 | 936 |
937 // Update the write barrier for the map field. | 937 // Update the write barrier for the map field. |
938 __ RecordWriteField(receiver_reg, | 938 __ RecordWriteField(receiver_reg, |
939 HeapObject::kMapOffset, | 939 HeapObject::kMapOffset, |
940 scratch1, | 940 scratch1, |
941 scratch2, | 941 scratch2, |
942 kDontSaveFPRegs, | 942 kDontSaveFPRegs, |
943 OMIT_REMEMBERED_SET, | 943 OMIT_REMEMBERED_SET, |
944 OMIT_SMI_CHECK); | 944 OMIT_SMI_CHECK); |
945 | 945 |
(...skipping 11 matching lines...) Expand all Loading... |
957 // object and the number of in-object properties is not going to change. | 957 // object and the number of in-object properties is not going to change. |
958 index -= object->map()->inobject_properties(); | 958 index -= object->map()->inobject_properties(); |
959 | 959 |
960 // TODO(verwaest): Share this code as a code stub. | 960 // TODO(verwaest): Share this code as a code stub. |
961 SmiCheck smi_check = representation.IsTagged() | 961 SmiCheck smi_check = representation.IsTagged() |
962 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; | 962 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; |
963 if (index < 0) { | 963 if (index < 0) { |
964 // Set the property straight into the object. | 964 // Set the property straight into the object. |
965 int offset = object->map()->instance_size() + (index * kPointerSize); | 965 int offset = object->map()->instance_size() + (index * kPointerSize); |
966 if (FLAG_track_double_fields && representation.IsDouble()) { | 966 if (FLAG_track_double_fields && representation.IsDouble()) { |
967 __ movq(FieldOperand(receiver_reg, offset), storage_reg); | 967 __ movp(FieldOperand(receiver_reg, offset), storage_reg); |
968 } else { | 968 } else { |
969 __ movq(FieldOperand(receiver_reg, offset), value_reg); | 969 __ movp(FieldOperand(receiver_reg, offset), value_reg); |
970 } | 970 } |
971 | 971 |
972 if (!FLAG_track_fields || !representation.IsSmi()) { | 972 if (!FLAG_track_fields || !representation.IsSmi()) { |
973 // Update the write barrier for the array address. | 973 // Update the write barrier for the array address. |
974 if (!FLAG_track_double_fields || !representation.IsDouble()) { | 974 if (!FLAG_track_double_fields || !representation.IsDouble()) { |
975 __ movq(storage_reg, value_reg); | 975 __ movp(storage_reg, value_reg); |
976 } | 976 } |
977 __ RecordWriteField( | 977 __ RecordWriteField( |
978 receiver_reg, offset, storage_reg, scratch1, kDontSaveFPRegs, | 978 receiver_reg, offset, storage_reg, scratch1, kDontSaveFPRegs, |
979 EMIT_REMEMBERED_SET, smi_check); | 979 EMIT_REMEMBERED_SET, smi_check); |
980 } | 980 } |
981 } else { | 981 } else { |
982 // Write to the properties array. | 982 // Write to the properties array. |
983 int offset = index * kPointerSize + FixedArray::kHeaderSize; | 983 int offset = index * kPointerSize + FixedArray::kHeaderSize; |
984 // Get the properties array (optimistically). | 984 // Get the properties array (optimistically). |
985 __ movq(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); | 985 __ movp(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); |
986 if (FLAG_track_double_fields && representation.IsDouble()) { | 986 if (FLAG_track_double_fields && representation.IsDouble()) { |
987 __ movq(FieldOperand(scratch1, offset), storage_reg); | 987 __ movp(FieldOperand(scratch1, offset), storage_reg); |
988 } else { | 988 } else { |
989 __ movq(FieldOperand(scratch1, offset), value_reg); | 989 __ movp(FieldOperand(scratch1, offset), value_reg); |
990 } | 990 } |
991 | 991 |
992 if (!FLAG_track_fields || !representation.IsSmi()) { | 992 if (!FLAG_track_fields || !representation.IsSmi()) { |
993 // Update the write barrier for the array address. | 993 // Update the write barrier for the array address. |
994 if (!FLAG_track_double_fields || !representation.IsDouble()) { | 994 if (!FLAG_track_double_fields || !representation.IsDouble()) { |
995 __ movq(storage_reg, value_reg); | 995 __ movp(storage_reg, value_reg); |
996 } | 996 } |
997 __ RecordWriteField( | 997 __ RecordWriteField( |
998 scratch1, offset, storage_reg, receiver_reg, kDontSaveFPRegs, | 998 scratch1, offset, storage_reg, receiver_reg, kDontSaveFPRegs, |
999 EMIT_REMEMBERED_SET, smi_check); | 999 EMIT_REMEMBERED_SET, smi_check); |
1000 } | 1000 } |
1001 } | 1001 } |
1002 | 1002 |
1003 // Return the value (register rax). | 1003 // Return the value (register rax). |
1004 ASSERT(value_reg.is(rax)); | 1004 ASSERT(value_reg.is(rax)); |
1005 __ ret(0); | 1005 __ ret(0); |
(...skipping 25 matching lines...) Expand all Loading... |
1031 Representation representation = lookup->representation(); | 1031 Representation representation = lookup->representation(); |
1032 ASSERT(!representation.IsNone()); | 1032 ASSERT(!representation.IsNone()); |
1033 if (FLAG_track_fields && representation.IsSmi()) { | 1033 if (FLAG_track_fields && representation.IsSmi()) { |
1034 __ JumpIfNotSmi(value_reg, miss_label); | 1034 __ JumpIfNotSmi(value_reg, miss_label); |
1035 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) { | 1035 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) { |
1036 __ JumpIfSmi(value_reg, miss_label); | 1036 __ JumpIfSmi(value_reg, miss_label); |
1037 } else if (FLAG_track_double_fields && representation.IsDouble()) { | 1037 } else if (FLAG_track_double_fields && representation.IsDouble()) { |
1038 // Load the double storage. | 1038 // Load the double storage. |
1039 if (index < 0) { | 1039 if (index < 0) { |
1040 int offset = object->map()->instance_size() + (index * kPointerSize); | 1040 int offset = object->map()->instance_size() + (index * kPointerSize); |
1041 __ movq(scratch1, FieldOperand(receiver_reg, offset)); | 1041 __ movp(scratch1, FieldOperand(receiver_reg, offset)); |
1042 } else { | 1042 } else { |
1043 __ movq(scratch1, | 1043 __ movp(scratch1, |
1044 FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); | 1044 FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); |
1045 int offset = index * kPointerSize + FixedArray::kHeaderSize; | 1045 int offset = index * kPointerSize + FixedArray::kHeaderSize; |
1046 __ movq(scratch1, FieldOperand(scratch1, offset)); | 1046 __ movp(scratch1, FieldOperand(scratch1, offset)); |
1047 } | 1047 } |
1048 | 1048 |
1049 // Store the value into the storage. | 1049 // Store the value into the storage. |
1050 Label do_store, heap_number; | 1050 Label do_store, heap_number; |
1051 __ JumpIfNotSmi(value_reg, &heap_number); | 1051 __ JumpIfNotSmi(value_reg, &heap_number); |
1052 __ SmiToInteger32(scratch2, value_reg); | 1052 __ SmiToInteger32(scratch2, value_reg); |
1053 __ Cvtlsi2sd(xmm0, scratch2); | 1053 __ Cvtlsi2sd(xmm0, scratch2); |
1054 __ jmp(&do_store); | 1054 __ jmp(&do_store); |
1055 | 1055 |
1056 __ bind(&heap_number); | 1056 __ bind(&heap_number); |
1057 __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(), | 1057 __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(), |
1058 miss_label, DONT_DO_SMI_CHECK); | 1058 miss_label, DONT_DO_SMI_CHECK); |
1059 __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset)); | 1059 __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset)); |
1060 __ bind(&do_store); | 1060 __ bind(&do_store); |
1061 __ movsd(FieldOperand(scratch1, HeapNumber::kValueOffset), xmm0); | 1061 __ movsd(FieldOperand(scratch1, HeapNumber::kValueOffset), xmm0); |
1062 // Return the value (register rax). | 1062 // Return the value (register rax). |
1063 ASSERT(value_reg.is(rax)); | 1063 ASSERT(value_reg.is(rax)); |
1064 __ ret(0); | 1064 __ ret(0); |
1065 return; | 1065 return; |
1066 } | 1066 } |
1067 | 1067 |
1068 // TODO(verwaest): Share this code as a code stub. | 1068 // TODO(verwaest): Share this code as a code stub. |
1069 SmiCheck smi_check = representation.IsTagged() | 1069 SmiCheck smi_check = representation.IsTagged() |
1070 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; | 1070 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; |
1071 if (index < 0) { | 1071 if (index < 0) { |
1072 // Set the property straight into the object. | 1072 // Set the property straight into the object. |
1073 int offset = object->map()->instance_size() + (index * kPointerSize); | 1073 int offset = object->map()->instance_size() + (index * kPointerSize); |
1074 __ movq(FieldOperand(receiver_reg, offset), value_reg); | 1074 __ movp(FieldOperand(receiver_reg, offset), value_reg); |
1075 | 1075 |
1076 if (!FLAG_track_fields || !representation.IsSmi()) { | 1076 if (!FLAG_track_fields || !representation.IsSmi()) { |
1077 // Update the write barrier for the array address. | 1077 // Update the write barrier for the array address. |
1078 // Pass the value being stored in the now unused name_reg. | 1078 // Pass the value being stored in the now unused name_reg. |
1079 __ movq(name_reg, value_reg); | 1079 __ movp(name_reg, value_reg); |
1080 __ RecordWriteField( | 1080 __ RecordWriteField( |
1081 receiver_reg, offset, name_reg, scratch1, kDontSaveFPRegs, | 1081 receiver_reg, offset, name_reg, scratch1, kDontSaveFPRegs, |
1082 EMIT_REMEMBERED_SET, smi_check); | 1082 EMIT_REMEMBERED_SET, smi_check); |
1083 } | 1083 } |
1084 } else { | 1084 } else { |
1085 // Write to the properties array. | 1085 // Write to the properties array. |
1086 int offset = index * kPointerSize + FixedArray::kHeaderSize; | 1086 int offset = index * kPointerSize + FixedArray::kHeaderSize; |
1087 // Get the properties array (optimistically). | 1087 // Get the properties array (optimistically). |
1088 __ movq(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); | 1088 __ movp(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); |
1089 __ movq(FieldOperand(scratch1, offset), value_reg); | 1089 __ movp(FieldOperand(scratch1, offset), value_reg); |
1090 | 1090 |
1091 if (!FLAG_track_fields || !representation.IsSmi()) { | 1091 if (!FLAG_track_fields || !representation.IsSmi()) { |
1092 // Update the write barrier for the array address. | 1092 // Update the write barrier for the array address. |
1093 // Pass the value being stored in the now unused name_reg. | 1093 // Pass the value being stored in the now unused name_reg. |
1094 __ movq(name_reg, value_reg); | 1094 __ movp(name_reg, value_reg); |
1095 __ RecordWriteField( | 1095 __ RecordWriteField( |
1096 scratch1, offset, name_reg, receiver_reg, kDontSaveFPRegs, | 1096 scratch1, offset, name_reg, receiver_reg, kDontSaveFPRegs, |
1097 EMIT_REMEMBERED_SET, smi_check); | 1097 EMIT_REMEMBERED_SET, smi_check); |
1098 } | 1098 } |
1099 } | 1099 } |
1100 | 1100 |
1101 // Return the value (register rax). | 1101 // Return the value (register rax). |
1102 ASSERT(value_reg.is(rax)); | 1102 ASSERT(value_reg.is(rax)); |
1103 __ ret(0); | 1103 __ ret(0); |
1104 } | 1104 } |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1138 // it is an alias for holder_reg. | 1138 // it is an alias for holder_reg. |
1139 Register reg = object_reg; | 1139 Register reg = object_reg; |
1140 int depth = 0; | 1140 int depth = 0; |
1141 | 1141 |
1142 StackArgumentsAccessor args(rsp, kFastApiCallArguments, | 1142 StackArgumentsAccessor args(rsp, kFastApiCallArguments, |
1143 ARGUMENTS_DONT_CONTAIN_RECEIVER); | 1143 ARGUMENTS_DONT_CONTAIN_RECEIVER); |
1144 const int kHolderIndex = kFastApiCallArguments - 1 - | 1144 const int kHolderIndex = kFastApiCallArguments - 1 - |
1145 FunctionCallbackArguments::kHolderIndex; | 1145 FunctionCallbackArguments::kHolderIndex; |
1146 | 1146 |
1147 if (save_at_depth == depth) { | 1147 if (save_at_depth == depth) { |
1148 __ movq(args.GetArgumentOperand(kHolderIndex), object_reg); | 1148 __ movp(args.GetArgumentOperand(kHolderIndex), object_reg); |
1149 } | 1149 } |
1150 | 1150 |
1151 Handle<JSObject> current = Handle<JSObject>::null(); | 1151 Handle<JSObject> current = Handle<JSObject>::null(); |
1152 if (type->IsConstant()) current = Handle<JSObject>::cast(type->AsConstant()); | 1152 if (type->IsConstant()) current = Handle<JSObject>::cast(type->AsConstant()); |
1153 Handle<JSObject> prototype = Handle<JSObject>::null(); | 1153 Handle<JSObject> prototype = Handle<JSObject>::null(); |
1154 Handle<Map> current_map = receiver_map; | 1154 Handle<Map> current_map = receiver_map; |
1155 Handle<Map> holder_map(holder->map()); | 1155 Handle<Map> holder_map(holder->map()); |
1156 // Traverse the prototype chain and check the maps in the prototype chain for | 1156 // Traverse the prototype chain and check the maps in the prototype chain for |
1157 // fast and global objects or do negative lookup for normal objects. | 1157 // fast and global objects or do negative lookup for normal objects. |
1158 while (!current_map.is_identical_to(holder_map)) { | 1158 while (!current_map.is_identical_to(holder_map)) { |
(...skipping 12 matching lines...) Expand all Loading... |
1171 ASSERT(name->IsString()); | 1171 ASSERT(name->IsString()); |
1172 name = factory()->InternalizeString(Handle<String>::cast(name)); | 1172 name = factory()->InternalizeString(Handle<String>::cast(name)); |
1173 } | 1173 } |
1174 ASSERT(current.is_null() || | 1174 ASSERT(current.is_null() || |
1175 current->property_dictionary()->FindEntry(*name) == | 1175 current->property_dictionary()->FindEntry(*name) == |
1176 NameDictionary::kNotFound); | 1176 NameDictionary::kNotFound); |
1177 | 1177 |
1178 GenerateDictionaryNegativeLookup(masm(), miss, reg, name, | 1178 GenerateDictionaryNegativeLookup(masm(), miss, reg, name, |
1179 scratch1, scratch2); | 1179 scratch1, scratch2); |
1180 | 1180 |
1181 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset)); | 1181 __ movp(scratch1, FieldOperand(reg, HeapObject::kMapOffset)); |
1182 reg = holder_reg; // From now on the object will be in holder_reg. | 1182 reg = holder_reg; // From now on the object will be in holder_reg. |
1183 __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset)); | 1183 __ movp(reg, FieldOperand(scratch1, Map::kPrototypeOffset)); |
1184 } else { | 1184 } else { |
1185 bool in_new_space = heap()->InNewSpace(*prototype); | 1185 bool in_new_space = heap()->InNewSpace(*prototype); |
1186 if (in_new_space) { | 1186 if (in_new_space) { |
1187 // Save the map in scratch1 for later. | 1187 // Save the map in scratch1 for later. |
1188 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset)); | 1188 __ movp(scratch1, FieldOperand(reg, HeapObject::kMapOffset)); |
1189 } | 1189 } |
1190 if (depth != 1 || check == CHECK_ALL_MAPS) { | 1190 if (depth != 1 || check == CHECK_ALL_MAPS) { |
1191 __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK); | 1191 __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK); |
1192 } | 1192 } |
1193 | 1193 |
1194 // Check access rights to the global object. This has to happen after | 1194 // Check access rights to the global object. This has to happen after |
1195 // the map check so that we know that the object is actually a global | 1195 // the map check so that we know that the object is actually a global |
1196 // object. | 1196 // object. |
1197 if (current_map->IsJSGlobalProxyMap()) { | 1197 if (current_map->IsJSGlobalProxyMap()) { |
1198 __ CheckAccessGlobalProxy(reg, scratch2, miss); | 1198 __ CheckAccessGlobalProxy(reg, scratch2, miss); |
1199 } else if (current_map->IsJSGlobalObjectMap()) { | 1199 } else if (current_map->IsJSGlobalObjectMap()) { |
1200 GenerateCheckPropertyCell( | 1200 GenerateCheckPropertyCell( |
1201 masm(), Handle<JSGlobalObject>::cast(current), name, | 1201 masm(), Handle<JSGlobalObject>::cast(current), name, |
1202 scratch2, miss); | 1202 scratch2, miss); |
1203 } | 1203 } |
1204 reg = holder_reg; // From now on the object will be in holder_reg. | 1204 reg = holder_reg; // From now on the object will be in holder_reg. |
1205 | 1205 |
1206 if (in_new_space) { | 1206 if (in_new_space) { |
1207 // The prototype is in new space; we cannot store a reference to it | 1207 // The prototype is in new space; we cannot store a reference to it |
1208 // in the code. Load it from the map. | 1208 // in the code. Load it from the map. |
1209 __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset)); | 1209 __ movp(reg, FieldOperand(scratch1, Map::kPrototypeOffset)); |
1210 } else { | 1210 } else { |
1211 // The prototype is in old space; load it directly. | 1211 // The prototype is in old space; load it directly. |
1212 __ Move(reg, prototype); | 1212 __ Move(reg, prototype); |
1213 } | 1213 } |
1214 } | 1214 } |
1215 | 1215 |
1216 if (save_at_depth == depth) { | 1216 if (save_at_depth == depth) { |
1217 __ movq(args.GetArgumentOperand(kHolderIndex), reg); | 1217 __ movp(args.GetArgumentOperand(kHolderIndex), reg); |
1218 } | 1218 } |
1219 | 1219 |
1220 // Go to the next object in the prototype chain. | 1220 // Go to the next object in the prototype chain. |
1221 current = prototype; | 1221 current = prototype; |
1222 current_map = handle(current->map()); | 1222 current_map = handle(current->map()); |
1223 } | 1223 } |
1224 | 1224 |
1225 // Log the check depth. | 1225 // Log the check depth. |
1226 LOG(isolate(), IntEvent("check-maps-depth", depth + 1)); | 1226 LOG(isolate(), IntEvent("check-maps-depth", depth + 1)); |
1227 | 1227 |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1274 | 1274 |
1275 Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss); | 1275 Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss); |
1276 | 1276 |
1277 if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) { | 1277 if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) { |
1278 ASSERT(!reg.is(scratch2())); | 1278 ASSERT(!reg.is(scratch2())); |
1279 ASSERT(!reg.is(scratch3())); | 1279 ASSERT(!reg.is(scratch3())); |
1280 ASSERT(!reg.is(scratch4())); | 1280 ASSERT(!reg.is(scratch4())); |
1281 | 1281 |
1282 // Load the properties dictionary. | 1282 // Load the properties dictionary. |
1283 Register dictionary = scratch4(); | 1283 Register dictionary = scratch4(); |
1284 __ movq(dictionary, FieldOperand(reg, JSObject::kPropertiesOffset)); | 1284 __ movp(dictionary, FieldOperand(reg, JSObject::kPropertiesOffset)); |
1285 | 1285 |
1286 // Probe the dictionary. | 1286 // Probe the dictionary. |
1287 Label probe_done; | 1287 Label probe_done; |
1288 NameDictionaryLookupStub::GeneratePositiveLookup(masm(), | 1288 NameDictionaryLookupStub::GeneratePositiveLookup(masm(), |
1289 &miss, | 1289 &miss, |
1290 &probe_done, | 1290 &probe_done, |
1291 dictionary, | 1291 dictionary, |
1292 this->name(), | 1292 this->name(), |
1293 scratch2(), | 1293 scratch2(), |
1294 scratch3()); | 1294 scratch3()); |
1295 __ bind(&probe_done); | 1295 __ bind(&probe_done); |
1296 | 1296 |
1297 // If probing finds an entry in the dictionary, scratch3 contains the | 1297 // If probing finds an entry in the dictionary, scratch3 contains the |
1298 // index into the dictionary. Check that the value is the callback. | 1298 // index into the dictionary. Check that the value is the callback. |
1299 Register index = scratch3(); | 1299 Register index = scratch3(); |
1300 const int kElementsStartOffset = | 1300 const int kElementsStartOffset = |
1301 NameDictionary::kHeaderSize + | 1301 NameDictionary::kHeaderSize + |
1302 NameDictionary::kElementsStartIndex * kPointerSize; | 1302 NameDictionary::kElementsStartIndex * kPointerSize; |
1303 const int kValueOffset = kElementsStartOffset + kPointerSize; | 1303 const int kValueOffset = kElementsStartOffset + kPointerSize; |
1304 __ movq(scratch2(), | 1304 __ movp(scratch2(), |
1305 Operand(dictionary, index, times_pointer_size, | 1305 Operand(dictionary, index, times_pointer_size, |
1306 kValueOffset - kHeapObjectTag)); | 1306 kValueOffset - kHeapObjectTag)); |
1307 __ Move(scratch3(), callback, RelocInfo::EMBEDDED_OBJECT); | 1307 __ Move(scratch3(), callback, RelocInfo::EMBEDDED_OBJECT); |
1308 __ cmpq(scratch2(), scratch3()); | 1308 __ cmpq(scratch2(), scratch3()); |
1309 __ j(not_equal, &miss); | 1309 __ j(not_equal, &miss); |
1310 } | 1310 } |
1311 | 1311 |
1312 HandlerFrontendFooter(name, &miss); | 1312 HandlerFrontendFooter(name, &miss); |
1313 return reg; | 1313 return reg; |
1314 } | 1314 } |
1315 | 1315 |
1316 | 1316 |
1317 void LoadStubCompiler::GenerateLoadField(Register reg, | 1317 void LoadStubCompiler::GenerateLoadField(Register reg, |
1318 Handle<JSObject> holder, | 1318 Handle<JSObject> holder, |
1319 PropertyIndex field, | 1319 PropertyIndex field, |
1320 Representation representation) { | 1320 Representation representation) { |
1321 if (!reg.is(receiver())) __ movq(receiver(), reg); | 1321 if (!reg.is(receiver())) __ movp(receiver(), reg); |
1322 if (kind() == Code::LOAD_IC) { | 1322 if (kind() == Code::LOAD_IC) { |
1323 LoadFieldStub stub(field.is_inobject(holder), | 1323 LoadFieldStub stub(field.is_inobject(holder), |
1324 field.translate(holder), | 1324 field.translate(holder), |
1325 representation); | 1325 representation); |
1326 GenerateTailCall(masm(), stub.GetCode(isolate())); | 1326 GenerateTailCall(masm(), stub.GetCode(isolate())); |
1327 } else { | 1327 } else { |
1328 KeyedLoadFieldStub stub(field.is_inobject(holder), | 1328 KeyedLoadFieldStub stub(field.is_inobject(holder), |
1329 field.translate(holder), | 1329 field.translate(holder), |
1330 representation); | 1330 representation); |
1331 GenerateTailCall(masm(), stub.GetCode(isolate())); | 1331 GenerateTailCall(masm(), stub.GetCode(isolate())); |
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1380 Register getter_arg = r8; | 1380 Register getter_arg = r8; |
1381 Register accessor_info_arg = rdx; | 1381 Register accessor_info_arg = rdx; |
1382 Register name_arg = rcx; | 1382 Register name_arg = rcx; |
1383 #else | 1383 #else |
1384 Register getter_arg = rdx; | 1384 Register getter_arg = rdx; |
1385 Register accessor_info_arg = rsi; | 1385 Register accessor_info_arg = rsi; |
1386 Register name_arg = rdi; | 1386 Register name_arg = rdi; |
1387 #endif | 1387 #endif |
1388 | 1388 |
1389 ASSERT(!name_arg.is(scratch4())); | 1389 ASSERT(!name_arg.is(scratch4())); |
1390 __ movq(name_arg, rsp); | 1390 __ movp(name_arg, rsp); |
1391 __ PushReturnAddressFrom(scratch4()); | 1391 __ PushReturnAddressFrom(scratch4()); |
1392 | 1392 |
1393 // v8::Arguments::values_ and handler for name. | 1393 // v8::Arguments::values_ and handler for name. |
1394 const int kStackSpace = PropertyCallbackArguments::kArgsLength + 1; | 1394 const int kStackSpace = PropertyCallbackArguments::kArgsLength + 1; |
1395 | 1395 |
1396 // Allocate v8::AccessorInfo in non-GCed stack space. | 1396 // Allocate v8::AccessorInfo in non-GCed stack space. |
1397 const int kArgStackSpace = 1; | 1397 const int kArgStackSpace = 1; |
1398 | 1398 |
1399 __ PrepareCallApiFunction(kArgStackSpace); | 1399 __ PrepareCallApiFunction(kArgStackSpace); |
1400 __ lea(rax, Operand(name_arg, 1 * kPointerSize)); | 1400 __ lea(rax, Operand(name_arg, 1 * kPointerSize)); |
1401 | 1401 |
1402 // v8::PropertyAccessorInfo::args_. | 1402 // v8::PropertyAccessorInfo::args_. |
1403 __ movq(StackSpaceOperand(0), rax); | 1403 __ movp(StackSpaceOperand(0), rax); |
1404 | 1404 |
1405 // The context register (rsi) has been saved in PrepareCallApiFunction and | 1405 // The context register (rsi) has been saved in PrepareCallApiFunction and |
1406 // could be used to pass arguments. | 1406 // could be used to pass arguments. |
1407 __ lea(accessor_info_arg, StackSpaceOperand(0)); | 1407 __ lea(accessor_info_arg, StackSpaceOperand(0)); |
1408 | 1408 |
1409 Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback); | 1409 Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback); |
1410 | 1410 |
1411 // The name handler is counted as an argument. | 1411 // The name handler is counted as an argument. |
1412 StackArgumentsAccessor args(rbp, PropertyCallbackArguments::kArgsLength); | 1412 StackArgumentsAccessor args(rbp, PropertyCallbackArguments::kArgsLength); |
1413 Operand return_value_operand = args.GetArgumentOperand( | 1413 Operand return_value_operand = args.GetArgumentOperand( |
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1536 __ j(not_equal, miss); | 1536 __ j(not_equal, miss); |
1537 } | 1537 } |
1538 | 1538 |
1539 | 1539 |
1540 void CallStubCompiler::GenerateLoadFunctionFromCell( | 1540 void CallStubCompiler::GenerateLoadFunctionFromCell( |
1541 Handle<Cell> cell, | 1541 Handle<Cell> cell, |
1542 Handle<JSFunction> function, | 1542 Handle<JSFunction> function, |
1543 Label* miss) { | 1543 Label* miss) { |
1544 // Get the value from the cell. | 1544 // Get the value from the cell. |
1545 __ Move(rdi, cell); | 1545 __ Move(rdi, cell); |
1546 __ movq(rdi, FieldOperand(rdi, Cell::kValueOffset)); | 1546 __ movp(rdi, FieldOperand(rdi, Cell::kValueOffset)); |
1547 | 1547 |
1548 // Check that the cell contains the same function. | 1548 // Check that the cell contains the same function. |
1549 if (heap()->InNewSpace(*function)) { | 1549 if (heap()->InNewSpace(*function)) { |
1550 // We can't embed a pointer to a function in new space so we have | 1550 // We can't embed a pointer to a function in new space so we have |
1551 // to verify that the shared function info is unchanged. This has | 1551 // to verify that the shared function info is unchanged. This has |
1552 // the nice side effect that multiple closures based on the same | 1552 // the nice side effect that multiple closures based on the same |
1553 // function can all use this call IC. Before we load through the | 1553 // function can all use this call IC. Before we load through the |
1554 // function, we have to verify that it still is a function. | 1554 // function, we have to verify that it still is a function. |
1555 GenerateFunctionCheck(rdi, rax, miss); | 1555 GenerateFunctionCheck(rdi, rax, miss); |
1556 | 1556 |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1610 } | 1610 } |
1611 | 1611 |
1612 Label miss; | 1612 Label miss; |
1613 | 1613 |
1614 HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss); | 1614 HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss); |
1615 | 1615 |
1616 const int argc = arguments().immediate(); | 1616 const int argc = arguments().immediate(); |
1617 StackArgumentsAccessor args(rsp, argc); | 1617 StackArgumentsAccessor args(rsp, argc); |
1618 if (argc == 0) { | 1618 if (argc == 0) { |
1619 // Noop, return the length. | 1619 // Noop, return the length. |
1620 __ movq(rax, FieldOperand(rdx, JSArray::kLengthOffset)); | 1620 __ movp(rax, FieldOperand(rdx, JSArray::kLengthOffset)); |
1621 __ ret((argc + 1) * kPointerSize); | 1621 __ ret((argc + 1) * kPointerSize); |
1622 } else { | 1622 } else { |
1623 Label call_builtin; | 1623 Label call_builtin; |
1624 | 1624 |
1625 if (argc == 1) { // Otherwise fall through to call builtin. | 1625 if (argc == 1) { // Otherwise fall through to call builtin. |
1626 Label attempt_to_grow_elements, with_write_barrier, check_double; | 1626 Label attempt_to_grow_elements, with_write_barrier, check_double; |
1627 | 1627 |
1628 // Get the elements array of the object. | 1628 // Get the elements array of the object. |
1629 __ movq(rdi, FieldOperand(rdx, JSArray::kElementsOffset)); | 1629 __ movp(rdi, FieldOperand(rdx, JSArray::kElementsOffset)); |
1630 | 1630 |
1631 // Check that the elements are in fast mode and writable. | 1631 // Check that the elements are in fast mode and writable. |
1632 __ Cmp(FieldOperand(rdi, HeapObject::kMapOffset), | 1632 __ Cmp(FieldOperand(rdi, HeapObject::kMapOffset), |
1633 factory()->fixed_array_map()); | 1633 factory()->fixed_array_map()); |
1634 __ j(not_equal, &check_double); | 1634 __ j(not_equal, &check_double); |
1635 | 1635 |
1636 // Get the array's length into rax and calculate new length. | 1636 // Get the array's length into rax and calculate new length. |
1637 __ SmiToInteger32(rax, FieldOperand(rdx, JSArray::kLengthOffset)); | 1637 __ SmiToInteger32(rax, FieldOperand(rdx, JSArray::kLengthOffset)); |
1638 STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue); | 1638 STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue); |
1639 __ addl(rax, Immediate(argc)); | 1639 __ addl(rax, Immediate(argc)); |
1640 | 1640 |
1641 // Get the elements' length into rcx. | 1641 // Get the elements' length into rcx. |
1642 __ SmiToInteger32(rcx, FieldOperand(rdi, FixedArray::kLengthOffset)); | 1642 __ SmiToInteger32(rcx, FieldOperand(rdi, FixedArray::kLengthOffset)); |
1643 | 1643 |
1644 // Check if we could survive without allocation. | 1644 // Check if we could survive without allocation. |
1645 __ cmpl(rax, rcx); | 1645 __ cmpl(rax, rcx); |
1646 __ j(greater, &attempt_to_grow_elements); | 1646 __ j(greater, &attempt_to_grow_elements); |
1647 | 1647 |
1648 // Check if value is a smi. | 1648 // Check if value is a smi. |
1649 __ movq(rcx, args.GetArgumentOperand(1)); | 1649 __ movp(rcx, args.GetArgumentOperand(1)); |
1650 __ JumpIfNotSmi(rcx, &with_write_barrier); | 1650 __ JumpIfNotSmi(rcx, &with_write_barrier); |
1651 | 1651 |
1652 // Save new length. | 1652 // Save new length. |
1653 __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax); | 1653 __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax); |
1654 | 1654 |
1655 // Store the value. | 1655 // Store the value. |
1656 __ movq(FieldOperand(rdi, | 1656 __ movp(FieldOperand(rdi, |
1657 rax, | 1657 rax, |
1658 times_pointer_size, | 1658 times_pointer_size, |
1659 FixedArray::kHeaderSize - argc * kPointerSize), | 1659 FixedArray::kHeaderSize - argc * kPointerSize), |
1660 rcx); | 1660 rcx); |
1661 | 1661 |
1662 __ Integer32ToSmi(rax, rax); // Return new length as smi. | 1662 __ Integer32ToSmi(rax, rax); // Return new length as smi. |
1663 __ ret((argc + 1) * kPointerSize); | 1663 __ ret((argc + 1) * kPointerSize); |
1664 | 1664 |
1665 __ bind(&check_double); | 1665 __ bind(&check_double); |
1666 | 1666 |
1667 // Check that the elements are in double mode. | 1667 // Check that the elements are in double mode. |
1668 __ Cmp(FieldOperand(rdi, HeapObject::kMapOffset), | 1668 __ Cmp(FieldOperand(rdi, HeapObject::kMapOffset), |
1669 factory()->fixed_double_array_map()); | 1669 factory()->fixed_double_array_map()); |
1670 __ j(not_equal, &call_builtin); | 1670 __ j(not_equal, &call_builtin); |
1671 | 1671 |
1672 // Get the array's length into rax and calculate new length. | 1672 // Get the array's length into rax and calculate new length. |
1673 __ SmiToInteger32(rax, FieldOperand(rdx, JSArray::kLengthOffset)); | 1673 __ SmiToInteger32(rax, FieldOperand(rdx, JSArray::kLengthOffset)); |
1674 STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue); | 1674 STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue); |
1675 __ addl(rax, Immediate(argc)); | 1675 __ addl(rax, Immediate(argc)); |
1676 | 1676 |
1677 // Get the elements' length into rcx. | 1677 // Get the elements' length into rcx. |
1678 __ SmiToInteger32(rcx, FieldOperand(rdi, FixedArray::kLengthOffset)); | 1678 __ SmiToInteger32(rcx, FieldOperand(rdi, FixedArray::kLengthOffset)); |
1679 | 1679 |
1680 // Check if we could survive without allocation. | 1680 // Check if we could survive without allocation. |
1681 __ cmpl(rax, rcx); | 1681 __ cmpl(rax, rcx); |
1682 __ j(greater, &call_builtin); | 1682 __ j(greater, &call_builtin); |
1683 | 1683 |
1684 __ movq(rcx, args.GetArgumentOperand(1)); | 1684 __ movp(rcx, args.GetArgumentOperand(1)); |
1685 __ StoreNumberToDoubleElements( | 1685 __ StoreNumberToDoubleElements( |
1686 rcx, rdi, rax, xmm0, &call_builtin, argc * kDoubleSize); | 1686 rcx, rdi, rax, xmm0, &call_builtin, argc * kDoubleSize); |
1687 | 1687 |
1688 // Save new length. | 1688 // Save new length. |
1689 __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax); | 1689 __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax); |
1690 __ Integer32ToSmi(rax, rax); // Return new length as smi. | 1690 __ Integer32ToSmi(rax, rax); // Return new length as smi. |
1691 __ ret((argc + 1) * kPointerSize); | 1691 __ ret((argc + 1) * kPointerSize); |
1692 | 1692 |
1693 __ bind(&with_write_barrier); | 1693 __ bind(&with_write_barrier); |
1694 | 1694 |
1695 __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset)); | 1695 __ movp(rbx, FieldOperand(rdx, HeapObject::kMapOffset)); |
1696 | 1696 |
1697 if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) { | 1697 if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) { |
1698 Label fast_object, not_fast_object; | 1698 Label fast_object, not_fast_object; |
1699 __ CheckFastObjectElements(rbx, ¬_fast_object, Label::kNear); | 1699 __ CheckFastObjectElements(rbx, ¬_fast_object, Label::kNear); |
1700 __ jmp(&fast_object); | 1700 __ jmp(&fast_object); |
1701 // In case of fast smi-only, convert to fast object, otherwise bail out. | 1701 // In case of fast smi-only, convert to fast object, otherwise bail out. |
1702 __ bind(¬_fast_object); | 1702 __ bind(¬_fast_object); |
1703 __ CheckFastSmiElements(rbx, &call_builtin); | 1703 __ CheckFastSmiElements(rbx, &call_builtin); |
1704 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), | 1704 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), |
1705 factory()->heap_number_map()); | 1705 factory()->heap_number_map()); |
1706 __ j(equal, &call_builtin); | 1706 __ j(equal, &call_builtin); |
1707 // rdx: receiver | 1707 // rdx: receiver |
1708 // rbx: map | 1708 // rbx: map |
1709 | 1709 |
1710 Label try_holey_map; | 1710 Label try_holey_map; |
1711 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, | 1711 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, |
1712 FAST_ELEMENTS, | 1712 FAST_ELEMENTS, |
1713 rbx, | 1713 rbx, |
1714 rdi, | 1714 rdi, |
1715 &try_holey_map); | 1715 &try_holey_map); |
1716 | 1716 |
1717 ElementsTransitionGenerator:: | 1717 ElementsTransitionGenerator:: |
1718 GenerateMapChangeElementsTransition(masm(), | 1718 GenerateMapChangeElementsTransition(masm(), |
1719 DONT_TRACK_ALLOCATION_SITE, | 1719 DONT_TRACK_ALLOCATION_SITE, |
1720 NULL); | 1720 NULL); |
1721 // Restore edi. | 1721 // Restore edi. |
1722 __ movq(rdi, FieldOperand(rdx, JSArray::kElementsOffset)); | 1722 __ movp(rdi, FieldOperand(rdx, JSArray::kElementsOffset)); |
1723 __ jmp(&fast_object); | 1723 __ jmp(&fast_object); |
1724 | 1724 |
1725 __ bind(&try_holey_map); | 1725 __ bind(&try_holey_map); |
1726 __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS, | 1726 __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS, |
1727 FAST_HOLEY_ELEMENTS, | 1727 FAST_HOLEY_ELEMENTS, |
1728 rbx, | 1728 rbx, |
1729 rdi, | 1729 rdi, |
1730 &call_builtin); | 1730 &call_builtin); |
1731 ElementsTransitionGenerator:: | 1731 ElementsTransitionGenerator:: |
1732 GenerateMapChangeElementsTransition(masm(), | 1732 GenerateMapChangeElementsTransition(masm(), |
1733 DONT_TRACK_ALLOCATION_SITE, | 1733 DONT_TRACK_ALLOCATION_SITE, |
1734 NULL); | 1734 NULL); |
1735 __ movq(rdi, FieldOperand(rdx, JSArray::kElementsOffset)); | 1735 __ movp(rdi, FieldOperand(rdx, JSArray::kElementsOffset)); |
1736 __ bind(&fast_object); | 1736 __ bind(&fast_object); |
1737 } else { | 1737 } else { |
1738 __ CheckFastObjectElements(rbx, &call_builtin); | 1738 __ CheckFastObjectElements(rbx, &call_builtin); |
1739 } | 1739 } |
1740 | 1740 |
1741 // Save new length. | 1741 // Save new length. |
1742 __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax); | 1742 __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax); |
1743 | 1743 |
1744 // Store the value. | 1744 // Store the value. |
1745 __ lea(rdx, FieldOperand(rdi, | 1745 __ lea(rdx, FieldOperand(rdi, |
1746 rax, times_pointer_size, | 1746 rax, times_pointer_size, |
1747 FixedArray::kHeaderSize - argc * kPointerSize)); | 1747 FixedArray::kHeaderSize - argc * kPointerSize)); |
1748 __ movq(Operand(rdx, 0), rcx); | 1748 __ movp(Operand(rdx, 0), rcx); |
1749 | 1749 |
1750 __ RecordWrite(rdi, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, | 1750 __ RecordWrite(rdi, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, |
1751 OMIT_SMI_CHECK); | 1751 OMIT_SMI_CHECK); |
1752 | 1752 |
1753 __ Integer32ToSmi(rax, rax); // Return new length as smi. | 1753 __ Integer32ToSmi(rax, rax); // Return new length as smi. |
1754 __ ret((argc + 1) * kPointerSize); | 1754 __ ret((argc + 1) * kPointerSize); |
1755 | 1755 |
1756 __ bind(&attempt_to_grow_elements); | 1756 __ bind(&attempt_to_grow_elements); |
1757 if (!FLAG_inline_new) { | 1757 if (!FLAG_inline_new) { |
1758 __ jmp(&call_builtin); | 1758 __ jmp(&call_builtin); |
1759 } | 1759 } |
1760 | 1760 |
1761 __ movq(rbx, args.GetArgumentOperand(1)); | 1761 __ movp(rbx, args.GetArgumentOperand(1)); |
1762 // Growing elements that are SMI-only requires special handling in case | 1762 // Growing elements that are SMI-only requires special handling in case |
1763 // the new element is non-Smi. For now, delegate to the builtin. | 1763 // the new element is non-Smi. For now, delegate to the builtin. |
1764 Label no_fast_elements_check; | 1764 Label no_fast_elements_check; |
1765 __ JumpIfSmi(rbx, &no_fast_elements_check); | 1765 __ JumpIfSmi(rbx, &no_fast_elements_check); |
1766 __ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset)); | 1766 __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset)); |
1767 __ CheckFastObjectElements(rcx, &call_builtin, Label::kFar); | 1767 __ CheckFastObjectElements(rcx, &call_builtin, Label::kFar); |
1768 __ bind(&no_fast_elements_check); | 1768 __ bind(&no_fast_elements_check); |
1769 | 1769 |
1770 ExternalReference new_space_allocation_top = | 1770 ExternalReference new_space_allocation_top = |
1771 ExternalReference::new_space_allocation_top_address(isolate()); | 1771 ExternalReference::new_space_allocation_top_address(isolate()); |
1772 ExternalReference new_space_allocation_limit = | 1772 ExternalReference new_space_allocation_limit = |
1773 ExternalReference::new_space_allocation_limit_address(isolate()); | 1773 ExternalReference::new_space_allocation_limit_address(isolate()); |
1774 | 1774 |
1775 const int kAllocationDelta = 4; | 1775 const int kAllocationDelta = 4; |
1776 // Load top. | 1776 // Load top. |
1777 __ Load(rcx, new_space_allocation_top); | 1777 __ Load(rcx, new_space_allocation_top); |
1778 | 1778 |
1779 // Check if it's the end of elements. | 1779 // Check if it's the end of elements. |
1780 __ lea(rdx, FieldOperand(rdi, | 1780 __ lea(rdx, FieldOperand(rdi, |
1781 rax, times_pointer_size, | 1781 rax, times_pointer_size, |
1782 FixedArray::kHeaderSize - argc * kPointerSize)); | 1782 FixedArray::kHeaderSize - argc * kPointerSize)); |
1783 __ cmpq(rdx, rcx); | 1783 __ cmpq(rdx, rcx); |
1784 __ j(not_equal, &call_builtin); | 1784 __ j(not_equal, &call_builtin); |
1785 __ addq(rcx, Immediate(kAllocationDelta * kPointerSize)); | 1785 __ addq(rcx, Immediate(kAllocationDelta * kPointerSize)); |
1786 Operand limit_operand = | 1786 Operand limit_operand = |
1787 masm()->ExternalOperand(new_space_allocation_limit); | 1787 masm()->ExternalOperand(new_space_allocation_limit); |
1788 __ cmpq(rcx, limit_operand); | 1788 __ cmpq(rcx, limit_operand); |
1789 __ j(above, &call_builtin); | 1789 __ j(above, &call_builtin); |
1790 | 1790 |
1791 // We fit and could grow elements. | 1791 // We fit and could grow elements. |
1792 __ Store(new_space_allocation_top, rcx); | 1792 __ Store(new_space_allocation_top, rcx); |
1793 | 1793 |
1794 // Push the argument... | 1794 // Push the argument... |
1795 __ movq(Operand(rdx, 0), rbx); | 1795 __ movp(Operand(rdx, 0), rbx); |
1796 // ... and fill the rest with holes. | 1796 // ... and fill the rest with holes. |
1797 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); | 1797 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); |
1798 for (int i = 1; i < kAllocationDelta; i++) { | 1798 for (int i = 1; i < kAllocationDelta; i++) { |
1799 __ movq(Operand(rdx, i * kPointerSize), kScratchRegister); | 1799 __ movp(Operand(rdx, i * kPointerSize), kScratchRegister); |
1800 } | 1800 } |
1801 | 1801 |
1802 // We know the elements array is in new space so we don't need the | 1802 // We know the elements array is in new space so we don't need the |
1803 // remembered set, but we just pushed a value onto it so we may have to | 1803 // remembered set, but we just pushed a value onto it so we may have to |
1804 // tell the incremental marker to rescan the object that we just grew. We | 1804 // tell the incremental marker to rescan the object that we just grew. We |
1805 // don't need to worry about the holes because they are in old space and | 1805 // don't need to worry about the holes because they are in old space and |
1806 // already marked black. | 1806 // already marked black. |
1807 __ RecordWrite(rdi, rdx, rbx, kDontSaveFPRegs, OMIT_REMEMBERED_SET); | 1807 __ RecordWrite(rdi, rdx, rbx, kDontSaveFPRegs, OMIT_REMEMBERED_SET); |
1808 | 1808 |
1809 // Restore receiver to rdx as finish sequence assumes it's here. | 1809 // Restore receiver to rdx as finish sequence assumes it's here. |
1810 __ movq(rdx, args.GetReceiverOperand()); | 1810 __ movp(rdx, args.GetReceiverOperand()); |
1811 | 1811 |
1812 // Increment element's and array's sizes. | 1812 // Increment element's and array's sizes. |
1813 __ SmiAddConstant(FieldOperand(rdi, FixedArray::kLengthOffset), | 1813 __ SmiAddConstant(FieldOperand(rdi, FixedArray::kLengthOffset), |
1814 Smi::FromInt(kAllocationDelta)); | 1814 Smi::FromInt(kAllocationDelta)); |
1815 | 1815 |
1816 // Make new length a smi before returning it. | 1816 // Make new length a smi before returning it. |
1817 __ Integer32ToSmi(rax, rax); | 1817 __ Integer32ToSmi(rax, rax); |
1818 __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rax); | 1818 __ movp(FieldOperand(rdx, JSArray::kLengthOffset), rax); |
1819 | 1819 |
1820 __ ret((argc + 1) * kPointerSize); | 1820 __ ret((argc + 1) * kPointerSize); |
1821 } | 1821 } |
1822 | 1822 |
1823 __ bind(&call_builtin); | 1823 __ bind(&call_builtin); |
1824 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush, | 1824 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush, |
1825 isolate()), | 1825 isolate()), |
1826 argc + 1, | 1826 argc + 1, |
1827 1); | 1827 1); |
1828 } | 1828 } |
(...skipping 19 matching lines...) Expand all Loading... |
1848 Handle<JSArray>::cast(object)->map()->is_observed() || | 1848 Handle<JSArray>::cast(object)->map()->is_observed() || |
1849 !Handle<JSArray>::cast(object)->map()->is_extensible()) { | 1849 !Handle<JSArray>::cast(object)->map()->is_extensible()) { |
1850 return Handle<Code>::null(); | 1850 return Handle<Code>::null(); |
1851 } | 1851 } |
1852 | 1852 |
1853 Label miss, return_undefined, call_builtin; | 1853 Label miss, return_undefined, call_builtin; |
1854 | 1854 |
1855 HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss); | 1855 HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss); |
1856 | 1856 |
1857 // Get the elements array of the object. | 1857 // Get the elements array of the object. |
1858 __ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset)); | 1858 __ movp(rbx, FieldOperand(rdx, JSArray::kElementsOffset)); |
1859 | 1859 |
1860 // Check that the elements are in fast mode and writable. | 1860 // Check that the elements are in fast mode and writable. |
1861 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset), | 1861 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset), |
1862 Heap::kFixedArrayMapRootIndex); | 1862 Heap::kFixedArrayMapRootIndex); |
1863 __ j(not_equal, &call_builtin); | 1863 __ j(not_equal, &call_builtin); |
1864 | 1864 |
1865 // Get the array's length into rcx and calculate new length. | 1865 // Get the array's length into rcx and calculate new length. |
1866 __ SmiToInteger32(rcx, FieldOperand(rdx, JSArray::kLengthOffset)); | 1866 __ SmiToInteger32(rcx, FieldOperand(rdx, JSArray::kLengthOffset)); |
1867 __ subl(rcx, Immediate(1)); | 1867 __ subl(rcx, Immediate(1)); |
1868 __ j(negative, &return_undefined); | 1868 __ j(negative, &return_undefined); |
1869 | 1869 |
1870 // Get the last element. | 1870 // Get the last element. |
1871 __ LoadRoot(r9, Heap::kTheHoleValueRootIndex); | 1871 __ LoadRoot(r9, Heap::kTheHoleValueRootIndex); |
1872 __ movq(rax, FieldOperand(rbx, | 1872 __ movp(rax, FieldOperand(rbx, |
1873 rcx, times_pointer_size, | 1873 rcx, times_pointer_size, |
1874 FixedArray::kHeaderSize)); | 1874 FixedArray::kHeaderSize)); |
1875 // Check if element is already the hole. | 1875 // Check if element is already the hole. |
1876 __ cmpq(rax, r9); | 1876 __ cmpq(rax, r9); |
1877 // If so, call slow-case to also check prototypes for value. | 1877 // If so, call slow-case to also check prototypes for value. |
1878 __ j(equal, &call_builtin); | 1878 __ j(equal, &call_builtin); |
1879 | 1879 |
1880 // Set the array's length. | 1880 // Set the array's length. |
1881 __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rcx); | 1881 __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rcx); |
1882 | 1882 |
1883 // Fill with the hole and return original value. | 1883 // Fill with the hole and return original value. |
1884 __ movq(FieldOperand(rbx, | 1884 __ movp(FieldOperand(rbx, |
1885 rcx, times_pointer_size, | 1885 rcx, times_pointer_size, |
1886 FixedArray::kHeaderSize), | 1886 FixedArray::kHeaderSize), |
1887 r9); | 1887 r9); |
1888 const int argc = arguments().immediate(); | 1888 const int argc = arguments().immediate(); |
1889 __ ret((argc + 1) * kPointerSize); | 1889 __ ret((argc + 1) * kPointerSize); |
1890 | 1890 |
1891 __ bind(&return_undefined); | 1891 __ bind(&return_undefined); |
1892 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); | 1892 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); |
1893 __ ret((argc + 1) * kPointerSize); | 1893 __ ret((argc + 1) * kPointerSize); |
1894 | 1894 |
(...skipping 25 matching lines...) Expand all Loading... |
1920 if (!object->IsJSObject()) return Handle<Code>::null(); | 1920 if (!object->IsJSObject()) return Handle<Code>::null(); |
1921 int depth = optimization.GetPrototypeDepthOfExpectedType( | 1921 int depth = optimization.GetPrototypeDepthOfExpectedType( |
1922 Handle<JSObject>::cast(object), holder); | 1922 Handle<JSObject>::cast(object), holder); |
1923 if (depth == kInvalidProtoDepth) return Handle<Code>::null(); | 1923 if (depth == kInvalidProtoDepth) return Handle<Code>::null(); |
1924 | 1924 |
1925 Label miss, miss_before_stack_reserved; | 1925 Label miss, miss_before_stack_reserved; |
1926 GenerateNameCheck(name, &miss_before_stack_reserved); | 1926 GenerateNameCheck(name, &miss_before_stack_reserved); |
1927 | 1927 |
1928 const int argc = arguments().immediate(); | 1928 const int argc = arguments().immediate(); |
1929 StackArgumentsAccessor args(rsp, argc); | 1929 StackArgumentsAccessor args(rsp, argc); |
1930 __ movq(rdx, args.GetReceiverOperand()); | 1930 __ movp(rdx, args.GetReceiverOperand()); |
1931 | 1931 |
1932 // Check that the receiver isn't a smi. | 1932 // Check that the receiver isn't a smi. |
1933 __ JumpIfSmi(rdx, &miss_before_stack_reserved); | 1933 __ JumpIfSmi(rdx, &miss_before_stack_reserved); |
1934 | 1934 |
1935 Counters* counters = isolate()->counters(); | 1935 Counters* counters = isolate()->counters(); |
1936 __ IncrementCounter(counters->call_const(), 1); | 1936 __ IncrementCounter(counters->call_const(), 1); |
1937 __ IncrementCounter(counters->call_const_fast_api(), 1); | 1937 __ IncrementCounter(counters->call_const_fast_api(), 1); |
1938 | 1938 |
1939 // Allocate space for v8::Arguments implicit values. Must be initialized | 1939 // Allocate space for v8::Arguments implicit values. Must be initialized |
1940 // before calling any runtime function. | 1940 // before calling any runtime function. |
(...skipping 28 matching lines...) Expand all Loading... |
1969 __ CompareRoot(object, Heap::kFalseValueRootIndex); | 1969 __ CompareRoot(object, Heap::kFalseValueRootIndex); |
1970 __ j(not_equal, miss); | 1970 __ j(not_equal, miss); |
1971 __ bind(&success); | 1971 __ bind(&success); |
1972 } | 1972 } |
1973 | 1973 |
1974 | 1974 |
1975 void CallStubCompiler::PatchImplicitReceiver(Handle<Object> object) { | 1975 void CallStubCompiler::PatchImplicitReceiver(Handle<Object> object) { |
1976 if (object->IsGlobalObject()) { | 1976 if (object->IsGlobalObject()) { |
1977 StackArgumentsAccessor args(rsp, arguments()); | 1977 StackArgumentsAccessor args(rsp, arguments()); |
1978 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); | 1978 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); |
1979 __ movq(args.GetReceiverOperand(), rdx); | 1979 __ movp(args.GetReceiverOperand(), rdx); |
1980 } | 1980 } |
1981 } | 1981 } |
1982 | 1982 |
1983 | 1983 |
1984 Register CallStubCompiler::HandlerFrontendHeader(Handle<Object> object, | 1984 Register CallStubCompiler::HandlerFrontendHeader(Handle<Object> object, |
1985 Handle<JSObject> holder, | 1985 Handle<JSObject> holder, |
1986 Handle<Name> name, | 1986 Handle<Name> name, |
1987 CheckType check, | 1987 CheckType check, |
1988 Label* miss) { | 1988 Label* miss) { |
1989 GenerateNameCheck(name, miss); | 1989 GenerateNameCheck(name, miss); |
1990 | 1990 |
1991 Register reg = rdx; | 1991 Register reg = rdx; |
1992 | 1992 |
1993 StackArgumentsAccessor args(rsp, arguments()); | 1993 StackArgumentsAccessor args(rsp, arguments()); |
1994 __ movq(reg, args.GetReceiverOperand()); | 1994 __ movp(reg, args.GetReceiverOperand()); |
1995 | 1995 |
1996 // Check that the receiver isn't a smi. | 1996 // Check that the receiver isn't a smi. |
1997 if (check != NUMBER_CHECK) { | 1997 if (check != NUMBER_CHECK) { |
1998 __ JumpIfSmi(reg, miss); | 1998 __ JumpIfSmi(reg, miss); |
1999 } | 1999 } |
2000 | 2000 |
2001 // Make sure that it's okay not to patch the on stack receiver | 2001 // Make sure that it's okay not to patch the on stack receiver |
2002 // unless we're doing a receiver map check. | 2002 // unless we're doing a receiver map check. |
2003 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK); | 2003 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK); |
2004 | 2004 |
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2061 return reg; | 2061 return reg; |
2062 } | 2062 } |
2063 | 2063 |
2064 | 2064 |
2065 void CallStubCompiler::GenerateJumpFunction(Handle<Object> object, | 2065 void CallStubCompiler::GenerateJumpFunction(Handle<Object> object, |
2066 Register function, | 2066 Register function, |
2067 Label* miss) { | 2067 Label* miss) { |
2068 // Check that the function really is a function. | 2068 // Check that the function really is a function. |
2069 GenerateFunctionCheck(function, rbx, miss); | 2069 GenerateFunctionCheck(function, rbx, miss); |
2070 | 2070 |
2071 if (!function.is(rdi)) __ movq(rdi, function); | 2071 if (!function.is(rdi)) __ movp(rdi, function); |
2072 PatchImplicitReceiver(object); | 2072 PatchImplicitReceiver(object); |
2073 | 2073 |
2074 // Invoke the function. | 2074 // Invoke the function. |
2075 __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION, NullCallWrapper()); | 2075 __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION, NullCallWrapper()); |
2076 } | 2076 } |
2077 | 2077 |
2078 | 2078 |
2079 Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object, | 2079 Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object, |
2080 Handle<JSObject> holder, | 2080 Handle<JSObject> holder, |
2081 Handle<Name> name) { | 2081 Handle<Name> name) { |
2082 Label miss; | 2082 Label miss; |
2083 GenerateNameCheck(name, &miss); | 2083 GenerateNameCheck(name, &miss); |
2084 | 2084 |
2085 LookupResult lookup(isolate()); | 2085 LookupResult lookup(isolate()); |
2086 LookupPostInterceptor(holder, name, &lookup); | 2086 LookupPostInterceptor(holder, name, &lookup); |
2087 | 2087 |
2088 // Get the receiver from the stack. | 2088 // Get the receiver from the stack. |
2089 StackArgumentsAccessor args(rsp, arguments()); | 2089 StackArgumentsAccessor args(rsp, arguments()); |
2090 __ movq(rdx, args.GetReceiverOperand()); | 2090 __ movp(rdx, args.GetReceiverOperand()); |
2091 | 2091 |
2092 CallInterceptorCompiler compiler(this, arguments(), rcx); | 2092 CallInterceptorCompiler compiler(this, arguments(), rcx); |
2093 compiler.Compile(masm(), object, holder, name, &lookup, rdx, rbx, rdi, rax, | 2093 compiler.Compile(masm(), object, holder, name, &lookup, rdx, rbx, rdi, rax, |
2094 &miss); | 2094 &miss); |
2095 | 2095 |
2096 // Restore receiver. | 2096 // Restore receiver. |
2097 __ movq(rdx, args.GetReceiverOperand()); | 2097 __ movp(rdx, args.GetReceiverOperand()); |
2098 | 2098 |
2099 GenerateJumpFunction(object, rax, &miss); | 2099 GenerateJumpFunction(object, rax, &miss); |
2100 | 2100 |
2101 HandlerFrontendFooter(&miss); | 2101 HandlerFrontendFooter(&miss); |
2102 | 2102 |
2103 // Return the generated code. | 2103 // Return the generated code. |
2104 return GetCode(Code::FAST, name); | 2104 return GetCode(Code::FAST, name); |
2105 } | 2105 } |
2106 | 2106 |
2107 | 2107 |
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2208 } else { | 2208 } else { |
2209 // If we generate a global code snippet for deoptimization only, remember | 2209 // If we generate a global code snippet for deoptimization only, remember |
2210 // the place to continue after deoptimization. | 2210 // the place to continue after deoptimization. |
2211 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset()); | 2211 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset()); |
2212 } | 2212 } |
2213 | 2213 |
2214 // We have to return the passed value, not the return value of the setter. | 2214 // We have to return the passed value, not the return value of the setter. |
2215 __ pop(rax); | 2215 __ pop(rax); |
2216 | 2216 |
2217 // Restore context register. | 2217 // Restore context register. |
2218 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 2218 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
2219 } | 2219 } |
2220 __ ret(0); | 2220 __ ret(0); |
2221 } | 2221 } |
2222 | 2222 |
2223 | 2223 |
2224 #undef __ | 2224 #undef __ |
2225 #define __ ACCESS_MASM(masm()) | 2225 #define __ ACCESS_MASM(masm()) |
2226 | 2226 |
2227 | 2227 |
2228 Handle<Code> StoreStubCompiler::CompileStoreInterceptor( | 2228 Handle<Code> StoreStubCompiler::CompileStoreInterceptor( |
(...skipping 15 matching lines...) Expand all Loading... |
2244 } | 2244 } |
2245 | 2245 |
2246 | 2246 |
2247 Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic( | 2247 Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic( |
2248 MapHandleList* receiver_maps, | 2248 MapHandleList* receiver_maps, |
2249 CodeHandleList* handler_stubs, | 2249 CodeHandleList* handler_stubs, |
2250 MapHandleList* transitioned_maps) { | 2250 MapHandleList* transitioned_maps) { |
2251 Label miss; | 2251 Label miss; |
2252 __ JumpIfSmi(receiver(), &miss, Label::kNear); | 2252 __ JumpIfSmi(receiver(), &miss, Label::kNear); |
2253 | 2253 |
2254 __ movq(scratch1(), FieldOperand(receiver(), HeapObject::kMapOffset)); | 2254 __ movp(scratch1(), FieldOperand(receiver(), HeapObject::kMapOffset)); |
2255 int receiver_count = receiver_maps->length(); | 2255 int receiver_count = receiver_maps->length(); |
2256 for (int i = 0; i < receiver_count; ++i) { | 2256 for (int i = 0; i < receiver_count; ++i) { |
2257 // Check map and tail call if there's a match | 2257 // Check map and tail call if there's a match |
2258 __ Cmp(scratch1(), receiver_maps->at(i)); | 2258 __ Cmp(scratch1(), receiver_maps->at(i)); |
2259 if (transitioned_maps->at(i).is_null()) { | 2259 if (transitioned_maps->at(i).is_null()) { |
2260 __ j(equal, handler_stubs->at(i), RelocInfo::CODE_TARGET); | 2260 __ j(equal, handler_stubs->at(i), RelocInfo::CODE_TARGET); |
2261 } else { | 2261 } else { |
2262 Label next_map; | 2262 Label next_map; |
2263 __ j(not_equal, &next_map, Label::kNear); | 2263 __ j(not_equal, &next_map, Label::kNear); |
2264 __ Move(transition_map(), | 2264 __ Move(transition_map(), |
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2360 ParameterCount expected(getter); | 2360 ParameterCount expected(getter); |
2361 __ InvokeFunction(getter, expected, actual, | 2361 __ InvokeFunction(getter, expected, actual, |
2362 CALL_FUNCTION, NullCallWrapper()); | 2362 CALL_FUNCTION, NullCallWrapper()); |
2363 } else { | 2363 } else { |
2364 // If we generate a global code snippet for deoptimization only, remember | 2364 // If we generate a global code snippet for deoptimization only, remember |
2365 // the place to continue after deoptimization. | 2365 // the place to continue after deoptimization. |
2366 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset()); | 2366 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset()); |
2367 } | 2367 } |
2368 | 2368 |
2369 // Restore context register. | 2369 // Restore context register. |
2370 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 2370 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
2371 } | 2371 } |
2372 __ ret(0); | 2372 __ ret(0); |
2373 } | 2373 } |
2374 | 2374 |
2375 | 2375 |
2376 #undef __ | 2376 #undef __ |
2377 #define __ ACCESS_MASM(masm()) | 2377 #define __ ACCESS_MASM(masm()) |
2378 | 2378 |
2379 | 2379 |
2380 Handle<Code> LoadStubCompiler::CompileLoadGlobal( | 2380 Handle<Code> LoadStubCompiler::CompileLoadGlobal( |
2381 Handle<Type> type, | 2381 Handle<Type> type, |
2382 Handle<GlobalObject> global, | 2382 Handle<GlobalObject> global, |
2383 Handle<PropertyCell> cell, | 2383 Handle<PropertyCell> cell, |
2384 Handle<Name> name, | 2384 Handle<Name> name, |
2385 bool is_dont_delete) { | 2385 bool is_dont_delete) { |
2386 Label miss; | 2386 Label miss; |
2387 // TODO(verwaest): Directly store to rax. Currently we cannot do this, since | 2387 // TODO(verwaest): Directly store to rax. Currently we cannot do this, since |
2388 // rax is used as receiver(), which we would otherwise clobber before a | 2388 // rax is used as receiver(), which we would otherwise clobber before a |
2389 // potential miss. | 2389 // potential miss. |
2390 HandlerFrontendHeader(type, receiver(), global, name, &miss); | 2390 HandlerFrontendHeader(type, receiver(), global, name, &miss); |
2391 | 2391 |
2392 // Get the value from the cell. | 2392 // Get the value from the cell. |
2393 __ Move(rbx, cell); | 2393 __ Move(rbx, cell); |
2394 __ movq(rbx, FieldOperand(rbx, PropertyCell::kValueOffset)); | 2394 __ movp(rbx, FieldOperand(rbx, PropertyCell::kValueOffset)); |
2395 | 2395 |
2396 // Check for deleted property if property can actually be deleted. | 2396 // Check for deleted property if property can actually be deleted. |
2397 if (!is_dont_delete) { | 2397 if (!is_dont_delete) { |
2398 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex); | 2398 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex); |
2399 __ j(equal, &miss); | 2399 __ j(equal, &miss); |
2400 } else if (FLAG_debug_code) { | 2400 } else if (FLAG_debug_code) { |
2401 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex); | 2401 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex); |
2402 __ Check(not_equal, kDontDeleteCellsCannotContainTheHole); | 2402 __ Check(not_equal, kDontDeleteCellsCannotContainTheHole); |
2403 } | 2403 } |
2404 | 2404 |
2405 HandlerFrontendFooter(name, &miss); | 2405 HandlerFrontendFooter(name, &miss); |
2406 | 2406 |
2407 Counters* counters = isolate()->counters(); | 2407 Counters* counters = isolate()->counters(); |
2408 __ IncrementCounter(counters->named_load_global_stub(), 1); | 2408 __ IncrementCounter(counters->named_load_global_stub(), 1); |
2409 __ movq(rax, rbx); | 2409 __ movp(rax, rbx); |
2410 __ ret(0); | 2410 __ ret(0); |
2411 | 2411 |
2412 // Return the generated code. | 2412 // Return the generated code. |
2413 return GetCode(kind(), Code::NORMAL, name); | 2413 return GetCode(kind(), Code::NORMAL, name); |
2414 } | 2414 } |
2415 | 2415 |
2416 | 2416 |
2417 Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC( | 2417 Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC( |
2418 TypeHandleList* types, | 2418 TypeHandleList* types, |
2419 CodeHandleList* handlers, | 2419 CodeHandleList* handlers, |
2420 Handle<Name> name, | 2420 Handle<Name> name, |
2421 Code::StubType type, | 2421 Code::StubType type, |
2422 IcCheckType check) { | 2422 IcCheckType check) { |
2423 Label miss; | 2423 Label miss; |
2424 | 2424 |
2425 if (check == PROPERTY) { | 2425 if (check == PROPERTY) { |
2426 GenerateNameCheck(name, this->name(), &miss); | 2426 GenerateNameCheck(name, this->name(), &miss); |
2427 } | 2427 } |
2428 | 2428 |
2429 Label number_case; | 2429 Label number_case; |
2430 Label* smi_target = IncludesNumberType(types) ? &number_case : &miss; | 2430 Label* smi_target = IncludesNumberType(types) ? &number_case : &miss; |
2431 __ JumpIfSmi(receiver(), smi_target); | 2431 __ JumpIfSmi(receiver(), smi_target); |
2432 | 2432 |
2433 Register map_reg = scratch1(); | 2433 Register map_reg = scratch1(); |
2434 __ movq(map_reg, FieldOperand(receiver(), HeapObject::kMapOffset)); | 2434 __ movp(map_reg, FieldOperand(receiver(), HeapObject::kMapOffset)); |
2435 int receiver_count = types->length(); | 2435 int receiver_count = types->length(); |
2436 int number_of_handled_maps = 0; | 2436 int number_of_handled_maps = 0; |
2437 for (int current = 0; current < receiver_count; ++current) { | 2437 for (int current = 0; current < receiver_count; ++current) { |
2438 Handle<Type> type = types->at(current); | 2438 Handle<Type> type = types->at(current); |
2439 Handle<Map> map = IC::TypeToMap(*type, isolate()); | 2439 Handle<Map> map = IC::TypeToMap(*type, isolate()); |
2440 if (!map->is_deprecated()) { | 2440 if (!map->is_deprecated()) { |
2441 number_of_handled_maps++; | 2441 number_of_handled_maps++; |
2442 // Check map and tail call if there's a match | 2442 // Check map and tail call if there's a match |
2443 __ Cmp(map_reg, map); | 2443 __ Cmp(map_reg, map); |
2444 if (type->Is(Type::Number())) { | 2444 if (type->Is(Type::Number())) { |
(...skipping 26 matching lines...) Expand all Loading... |
2471 // -- rdx : receiver | 2471 // -- rdx : receiver |
2472 // -- rsp[0] : return address | 2472 // -- rsp[0] : return address |
2473 // ----------------------------------- | 2473 // ----------------------------------- |
2474 Label slow, miss; | 2474 Label slow, miss; |
2475 | 2475 |
2476 // This stub is meant to be tail-jumped to, the receiver must already | 2476 // This stub is meant to be tail-jumped to, the receiver must already |
2477 // have been verified by the caller to not be a smi. | 2477 // have been verified by the caller to not be a smi. |
2478 | 2478 |
2479 __ JumpIfNotSmi(rax, &miss); | 2479 __ JumpIfNotSmi(rax, &miss); |
2480 __ SmiToInteger32(rbx, rax); | 2480 __ SmiToInteger32(rbx, rax); |
2481 __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset)); | 2481 __ movp(rcx, FieldOperand(rdx, JSObject::kElementsOffset)); |
2482 | 2482 |
2483 // Check whether the elements is a number dictionary. | 2483 // Check whether the elements is a number dictionary. |
2484 // rdx: receiver | 2484 // rdx: receiver |
2485 // rax: key | 2485 // rax: key |
2486 // rbx: key as untagged int32 | 2486 // rbx: key as untagged int32 |
2487 // rcx: elements | 2487 // rcx: elements |
2488 __ LoadFromNumberDictionary(&slow, rcx, rax, rbx, r9, rdi, rax); | 2488 __ LoadFromNumberDictionary(&slow, rcx, rax, rbx, r9, rdi, rax); |
2489 __ ret(0); | 2489 __ ret(0); |
2490 | 2490 |
2491 __ bind(&slow); | 2491 __ bind(&slow); |
(...skipping 12 matching lines...) Expand all Loading... |
2504 // ----------------------------------- | 2504 // ----------------------------------- |
2505 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); | 2505 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); |
2506 } | 2506 } |
2507 | 2507 |
2508 | 2508 |
2509 #undef __ | 2509 #undef __ |
2510 | 2510 |
2511 } } // namespace v8::internal | 2511 } } // namespace v8::internal |
2512 | 2512 |
2513 #endif // V8_TARGET_ARCH_X64 | 2513 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |