Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(207)

Side by Side Diff: src/mips/stub-cache-mips.cc

Issue 196133017: Experimental parser: merge r19949 (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/parser
Patch Set: Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mips/macro-assembler-mips.cc ('k') | src/mirror-debugger.js » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 295 matching lines...) Expand 10 before | Expand all | Expand 10 after
306 __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset)); 306 __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
307 } 307 }
308 308
309 309
310 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, 310 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
311 Register dst, 311 Register dst,
312 Register src, 312 Register src,
313 bool inobject, 313 bool inobject,
314 int index, 314 int index,
315 Representation representation) { 315 Representation representation) {
316 ASSERT(!FLAG_track_double_fields || !representation.IsDouble()); 316 ASSERT(!representation.IsDouble());
317 int offset = index * kPointerSize; 317 int offset = index * kPointerSize;
318 if (!inobject) { 318 if (!inobject) {
319 // Calculate the offset into the properties array. 319 // Calculate the offset into the properties array.
320 offset = offset + FixedArray::kHeaderSize; 320 offset = offset + FixedArray::kHeaderSize;
321 __ lw(dst, FieldMemOperand(src, JSObject::kPropertiesOffset)); 321 __ lw(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
322 src = dst; 322 src = dst;
323 } 323 }
324 __ lw(dst, FieldMemOperand(src, offset)); 324 __ lw(dst, FieldMemOperand(src, offset));
325 } 325 }
326 326
327 327
328 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm, 328 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
329 Register receiver, 329 Register receiver,
330 Register scratch, 330 Register scratch,
331 Label* miss_label) { 331 Label* miss_label) {
332 // Check that the receiver isn't a smi. 332 // Check that the receiver isn't a smi.
333 __ JumpIfSmi(receiver, miss_label); 333 __ JumpIfSmi(receiver, miss_label);
334 334
335 // Check that the object is a JS array. 335 // Check that the object is a JS array.
336 __ GetObjectType(receiver, scratch, scratch); 336 __ GetObjectType(receiver, scratch, scratch);
337 __ Branch(miss_label, ne, scratch, Operand(JS_ARRAY_TYPE)); 337 __ Branch(miss_label, ne, scratch, Operand(JS_ARRAY_TYPE));
338 338
339 // Load length directly from the JS array. 339 // Load length directly from the JS array.
340 __ Ret(USE_DELAY_SLOT); 340 __ Ret(USE_DELAY_SLOT);
341 __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset)); 341 __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
342 } 342 }
343 343
344 344
345 // Generate code to check if an object is a string. If the object is a
346 // heap object, its map's instance type is left in the scratch1 register.
347 // If this is not needed, scratch1 and scratch2 may be the same register.
348 static void GenerateStringCheck(MacroAssembler* masm,
349 Register receiver,
350 Register scratch1,
351 Register scratch2,
352 Label* smi,
353 Label* non_string_object) {
354 // Check that the receiver isn't a smi.
355 __ JumpIfSmi(receiver, smi, t0);
356
357 // Check that the object is a string.
358 __ lw(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
359 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
360 __ And(scratch2, scratch1, Operand(kIsNotStringMask));
361 // The cast is to resolve the overload for the argument of 0x0.
362 __ Branch(non_string_object,
363 ne,
364 scratch2,
365 Operand(static_cast<int32_t>(kStringTag)));
366 }
367
368
369 // Generate code to load the length from a string object and return the length.
370 // If the receiver object is not a string or a wrapped string object the
371 // execution continues at the miss label. The register containing the
372 // receiver is potentially clobbered.
373 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
374 Register receiver,
375 Register scratch1,
376 Register scratch2,
377 Label* miss) {
378 Label check_wrapper;
379
380 // Check if the object is a string leaving the instance type in the
381 // scratch1 register.
382 GenerateStringCheck(masm, receiver, scratch1, scratch2, miss, &check_wrapper);
383
384 // Load length directly from the string.
385 __ Ret(USE_DELAY_SLOT);
386 __ lw(v0, FieldMemOperand(receiver, String::kLengthOffset));
387
388 // Check if the object is a JSValue wrapper.
389 __ bind(&check_wrapper);
390 __ Branch(miss, ne, scratch1, Operand(JS_VALUE_TYPE));
391
392 // Unwrap the value and check if the wrapped value is a string.
393 __ lw(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset));
394 GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss);
395 __ Ret(USE_DELAY_SLOT);
396 __ lw(v0, FieldMemOperand(scratch1, String::kLengthOffset));
397 }
398
399
400 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm, 345 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
401 Register receiver, 346 Register receiver,
402 Register scratch1, 347 Register scratch1,
403 Register scratch2, 348 Register scratch2,
404 Label* miss_label) { 349 Label* miss_label) {
405 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label); 350 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
406 __ Ret(USE_DELAY_SLOT); 351 __ Ret(USE_DELAY_SLOT);
407 __ mov(v0, scratch1); 352 __ mov(v0, scratch1);
408 } 353 }
409 354
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
460 int descriptor = transition->LastAdded(); 405 int descriptor = transition->LastAdded();
461 DescriptorArray* descriptors = transition->instance_descriptors(); 406 DescriptorArray* descriptors = transition->instance_descriptors();
462 PropertyDetails details = descriptors->GetDetails(descriptor); 407 PropertyDetails details = descriptors->GetDetails(descriptor);
463 Representation representation = details.representation(); 408 Representation representation = details.representation();
464 ASSERT(!representation.IsNone()); 409 ASSERT(!representation.IsNone());
465 410
466 if (details.type() == CONSTANT) { 411 if (details.type() == CONSTANT) {
467 Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate()); 412 Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
468 __ li(scratch1, constant); 413 __ li(scratch1, constant);
469 __ Branch(miss_label, ne, value_reg, Operand(scratch1)); 414 __ Branch(miss_label, ne, value_reg, Operand(scratch1));
470 } else if (FLAG_track_fields && representation.IsSmi()) { 415 } else if (representation.IsSmi()) {
471 __ JumpIfNotSmi(value_reg, miss_label); 416 __ JumpIfNotSmi(value_reg, miss_label);
472 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) { 417 } else if (representation.IsHeapObject()) {
473 __ JumpIfSmi(value_reg, miss_label); 418 __ JumpIfSmi(value_reg, miss_label);
474 } else if (FLAG_track_double_fields && representation.IsDouble()) { 419 } else if (representation.IsDouble()) {
475 Label do_store, heap_number; 420 Label do_store, heap_number;
476 __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex); 421 __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex);
477 __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow); 422 __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow);
478 423
479 __ JumpIfNotSmi(value_reg, &heap_number); 424 __ JumpIfNotSmi(value_reg, &heap_number);
480 __ SmiUntag(scratch1, value_reg); 425 __ SmiUntag(scratch1, value_reg);
481 __ mtc1(scratch1, f6); 426 __ mtc1(scratch1, f6);
482 __ cvt_d_w(f4, f6); 427 __ cvt_d_w(f4, f6);
483 __ jmp(&do_store); 428 __ jmp(&do_store);
484 429
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
538 // face of a transition we can use the old map here because the size of the 483 // face of a transition we can use the old map here because the size of the
539 // object and the number of in-object properties is not going to change. 484 // object and the number of in-object properties is not going to change.
540 index -= object->map()->inobject_properties(); 485 index -= object->map()->inobject_properties();
541 486
542 // TODO(verwaest): Share this code as a code stub. 487 // TODO(verwaest): Share this code as a code stub.
543 SmiCheck smi_check = representation.IsTagged() 488 SmiCheck smi_check = representation.IsTagged()
544 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; 489 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
545 if (index < 0) { 490 if (index < 0) {
546 // Set the property straight into the object. 491 // Set the property straight into the object.
547 int offset = object->map()->instance_size() + (index * kPointerSize); 492 int offset = object->map()->instance_size() + (index * kPointerSize);
548 if (FLAG_track_double_fields && representation.IsDouble()) { 493 if (representation.IsDouble()) {
549 __ sw(storage_reg, FieldMemOperand(receiver_reg, offset)); 494 __ sw(storage_reg, FieldMemOperand(receiver_reg, offset));
550 } else { 495 } else {
551 __ sw(value_reg, FieldMemOperand(receiver_reg, offset)); 496 __ sw(value_reg, FieldMemOperand(receiver_reg, offset));
552 } 497 }
553 498
554 if (!FLAG_track_fields || !representation.IsSmi()) { 499 if (!representation.IsSmi()) {
555 // Update the write barrier for the array address. 500 // Update the write barrier for the array address.
556 if (!FLAG_track_double_fields || !representation.IsDouble()) { 501 if (!representation.IsDouble()) {
557 __ mov(storage_reg, value_reg); 502 __ mov(storage_reg, value_reg);
558 } 503 }
559 __ RecordWriteField(receiver_reg, 504 __ RecordWriteField(receiver_reg,
560 offset, 505 offset,
561 storage_reg, 506 storage_reg,
562 scratch1, 507 scratch1,
563 kRAHasNotBeenSaved, 508 kRAHasNotBeenSaved,
564 kDontSaveFPRegs, 509 kDontSaveFPRegs,
565 EMIT_REMEMBERED_SET, 510 EMIT_REMEMBERED_SET,
566 smi_check); 511 smi_check);
567 } 512 }
568 } else { 513 } else {
569 // Write to the properties array. 514 // Write to the properties array.
570 int offset = index * kPointerSize + FixedArray::kHeaderSize; 515 int offset = index * kPointerSize + FixedArray::kHeaderSize;
571 // Get the properties array 516 // Get the properties array
572 __ lw(scratch1, 517 __ lw(scratch1,
573 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); 518 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
574 if (FLAG_track_double_fields && representation.IsDouble()) { 519 if (representation.IsDouble()) {
575 __ sw(storage_reg, FieldMemOperand(scratch1, offset)); 520 __ sw(storage_reg, FieldMemOperand(scratch1, offset));
576 } else { 521 } else {
577 __ sw(value_reg, FieldMemOperand(scratch1, offset)); 522 __ sw(value_reg, FieldMemOperand(scratch1, offset));
578 } 523 }
579 524
580 if (!FLAG_track_fields || !representation.IsSmi()) { 525 if (!representation.IsSmi()) {
581 // Update the write barrier for the array address. 526 // Update the write barrier for the array address.
582 if (!FLAG_track_double_fields || !representation.IsDouble()) { 527 if (!representation.IsDouble()) {
583 __ mov(storage_reg, value_reg); 528 __ mov(storage_reg, value_reg);
584 } 529 }
585 __ RecordWriteField(scratch1, 530 __ RecordWriteField(scratch1,
586 offset, 531 offset,
587 storage_reg, 532 storage_reg,
588 receiver_reg, 533 receiver_reg,
589 kRAHasNotBeenSaved, 534 kRAHasNotBeenSaved,
590 kDontSaveFPRegs, 535 kDontSaveFPRegs,
591 EMIT_REMEMBERED_SET, 536 EMIT_REMEMBERED_SET,
592 smi_check); 537 smi_check);
(...skipping 30 matching lines...) Expand all
623 568
624 int index = lookup->GetFieldIndex().field_index(); 569 int index = lookup->GetFieldIndex().field_index();
625 570
626 // Adjust for the number of properties stored in the object. Even in the 571 // Adjust for the number of properties stored in the object. Even in the
627 // face of a transition we can use the old map here because the size of the 572 // face of a transition we can use the old map here because the size of the
628 // object and the number of in-object properties is not going to change. 573 // object and the number of in-object properties is not going to change.
629 index -= object->map()->inobject_properties(); 574 index -= object->map()->inobject_properties();
630 575
631 Representation representation = lookup->representation(); 576 Representation representation = lookup->representation();
632 ASSERT(!representation.IsNone()); 577 ASSERT(!representation.IsNone());
633 if (FLAG_track_fields && representation.IsSmi()) { 578 if (representation.IsSmi()) {
634 __ JumpIfNotSmi(value_reg, miss_label); 579 __ JumpIfNotSmi(value_reg, miss_label);
635 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) { 580 } else if (representation.IsHeapObject()) {
636 __ JumpIfSmi(value_reg, miss_label); 581 __ JumpIfSmi(value_reg, miss_label);
637 } else if (FLAG_track_double_fields && representation.IsDouble()) { 582 } else if (representation.IsDouble()) {
638 // Load the double storage. 583 // Load the double storage.
639 if (index < 0) { 584 if (index < 0) {
640 int offset = object->map()->instance_size() + (index * kPointerSize); 585 int offset = object->map()->instance_size() + (index * kPointerSize);
641 __ lw(scratch1, FieldMemOperand(receiver_reg, offset)); 586 __ lw(scratch1, FieldMemOperand(receiver_reg, offset));
642 } else { 587 } else {
643 __ lw(scratch1, 588 __ lw(scratch1,
644 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); 589 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
645 int offset = index * kPointerSize + FixedArray::kHeaderSize; 590 int offset = index * kPointerSize + FixedArray::kHeaderSize;
646 __ lw(scratch1, FieldMemOperand(scratch1, offset)); 591 __ lw(scratch1, FieldMemOperand(scratch1, offset));
647 } 592 }
(...skipping 21 matching lines...) Expand all
669 } 614 }
670 615
671 // TODO(verwaest): Share this code as a code stub. 616 // TODO(verwaest): Share this code as a code stub.
672 SmiCheck smi_check = representation.IsTagged() 617 SmiCheck smi_check = representation.IsTagged()
673 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; 618 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
674 if (index < 0) { 619 if (index < 0) {
675 // Set the property straight into the object. 620 // Set the property straight into the object.
676 int offset = object->map()->instance_size() + (index * kPointerSize); 621 int offset = object->map()->instance_size() + (index * kPointerSize);
677 __ sw(value_reg, FieldMemOperand(receiver_reg, offset)); 622 __ sw(value_reg, FieldMemOperand(receiver_reg, offset));
678 623
679 if (!FLAG_track_fields || !representation.IsSmi()) { 624 if (!representation.IsSmi()) {
680 // Skip updating write barrier if storing a smi. 625 // Skip updating write barrier if storing a smi.
681 __ JumpIfSmi(value_reg, &exit); 626 __ JumpIfSmi(value_reg, &exit);
682 627
683 // Update the write barrier for the array address. 628 // Update the write barrier for the array address.
684 // Pass the now unused name_reg as a scratch register. 629 // Pass the now unused name_reg as a scratch register.
685 __ mov(name_reg, value_reg); 630 __ mov(name_reg, value_reg);
686 __ RecordWriteField(receiver_reg, 631 __ RecordWriteField(receiver_reg,
687 offset, 632 offset,
688 name_reg, 633 name_reg,
689 scratch1, 634 scratch1,
690 kRAHasNotBeenSaved, 635 kRAHasNotBeenSaved,
691 kDontSaveFPRegs, 636 kDontSaveFPRegs,
692 EMIT_REMEMBERED_SET, 637 EMIT_REMEMBERED_SET,
693 smi_check); 638 smi_check);
694 } 639 }
695 } else { 640 } else {
696 // Write to the properties array. 641 // Write to the properties array.
697 int offset = index * kPointerSize + FixedArray::kHeaderSize; 642 int offset = index * kPointerSize + FixedArray::kHeaderSize;
698 // Get the properties array. 643 // Get the properties array.
699 __ lw(scratch1, 644 __ lw(scratch1,
700 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); 645 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
701 __ sw(value_reg, FieldMemOperand(scratch1, offset)); 646 __ sw(value_reg, FieldMemOperand(scratch1, offset));
702 647
703 if (!FLAG_track_fields || !representation.IsSmi()) { 648 if (!representation.IsSmi()) {
704 // Skip updating write barrier if storing a smi. 649 // Skip updating write barrier if storing a smi.
705 __ JumpIfSmi(value_reg, &exit); 650 __ JumpIfSmi(value_reg, &exit);
706 651
707 // Update the write barrier for the array address. 652 // Update the write barrier for the array address.
708 // Ok to clobber receiver_reg and name_reg, since we return. 653 // Ok to clobber receiver_reg and name_reg, since we return.
709 __ mov(name_reg, value_reg); 654 __ mov(name_reg, value_reg);
710 __ RecordWriteField(scratch1, 655 __ RecordWriteField(scratch1,
711 offset, 656 offset,
712 name_reg, 657 name_reg,
713 receiver_reg, 658 receiver_reg,
(...skipping 521 matching lines...) Expand 10 before | Expand all | Expand 10 after
1235 } 1180 }
1236 1181
1237 1182
1238 #undef __ 1183 #undef __
1239 #define __ ACCESS_MASM(masm) 1184 #define __ ACCESS_MASM(masm)
1240 1185
1241 1186
1242 void StoreStubCompiler::GenerateStoreViaSetter( 1187 void StoreStubCompiler::GenerateStoreViaSetter(
1243 MacroAssembler* masm, 1188 MacroAssembler* masm,
1244 Handle<HeapType> type, 1189 Handle<HeapType> type,
1190 Register receiver,
1245 Handle<JSFunction> setter) { 1191 Handle<JSFunction> setter) {
1246 // ----------- S t a t e ------------- 1192 // ----------- S t a t e -------------
1247 // -- a0 : value
1248 // -- a1 : receiver
1249 // -- a2 : name
1250 // -- ra : return address 1193 // -- ra : return address
1251 // ----------------------------------- 1194 // -----------------------------------
1252 { 1195 {
1253 FrameScope scope(masm, StackFrame::INTERNAL); 1196 FrameScope scope(masm, StackFrame::INTERNAL);
1254 Register receiver = a1;
1255 Register value = a0;
1256 1197
1257 // Save value register, so we can restore it later. 1198 // Save value register, so we can restore it later.
1258 __ push(value); 1199 __ push(value());
1259 1200
1260 if (!setter.is_null()) { 1201 if (!setter.is_null()) {
1261 // Call the JavaScript setter with receiver and value on the stack. 1202 // Call the JavaScript setter with receiver and value on the stack.
1262 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) { 1203 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1263 // Swap in the global receiver. 1204 // Swap in the global receiver.
1264 __ lw(receiver, 1205 __ lw(receiver,
1265 FieldMemOperand( 1206 FieldMemOperand(
1266 receiver, JSGlobalObject::kGlobalReceiverOffset)); 1207 receiver, JSGlobalObject::kGlobalReceiverOffset));
1267 } 1208 }
1268 __ Push(receiver, value); 1209 __ Push(receiver, value());
1269 ParameterCount actual(1); 1210 ParameterCount actual(1);
1270 ParameterCount expected(setter); 1211 ParameterCount expected(setter);
1271 __ InvokeFunction(setter, expected, actual, 1212 __ InvokeFunction(setter, expected, actual,
1272 CALL_FUNCTION, NullCallWrapper()); 1213 CALL_FUNCTION, NullCallWrapper());
1273 } else { 1214 } else {
1274 // If we generate a global code snippet for deoptimization only, remember 1215 // If we generate a global code snippet for deoptimization only, remember
1275 // the place to continue after deoptimization. 1216 // the place to continue after deoptimization.
1276 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset()); 1217 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
1277 } 1218 }
1278 1219
1279 // We have to return the passed value, not the return value of the setter. 1220 // We have to return the passed value, not the return value of the setter.
1280 __ pop(v0); 1221 __ pop(v0);
1281 1222
1282 // Restore context register. 1223 // Restore context register.
1283 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 1224 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1284 } 1225 }
1285 __ Ret(); 1226 __ Ret();
1286 } 1227 }
1287 1228
1288 1229
1289 #undef __ 1230 #undef __
1290 #define __ ACCESS_MASM(masm()) 1231 #define __ ACCESS_MASM(masm())
1291 1232
1292 1233
1293 Handle<Code> StoreStubCompiler::CompileStoreInterceptor( 1234 Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
1294 Handle<JSObject> object, 1235 Handle<JSObject> object,
1295 Handle<Name> name) { 1236 Handle<Name> name) {
1296 Label miss;
1297
1298 // Check that the map of the object hasn't changed.
1299 __ CheckMap(receiver(), scratch1(), Handle<Map>(object->map()), &miss,
1300 DO_SMI_CHECK);
1301
1302 // Perform global security token check if needed.
1303 if (object->IsJSGlobalProxy()) {
1304 __ CheckAccessGlobalProxy(receiver(), scratch1(), &miss);
1305 }
1306
1307 // Stub is never generated for non-global objects that require access
1308 // checks.
1309 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
1310
1311 __ Push(receiver(), this->name(), value()); 1237 __ Push(receiver(), this->name(), value());
1312 1238
1313 // Do tail-call to the runtime system. 1239 // Do tail-call to the runtime system.
1314 ExternalReference store_ic_property = 1240 ExternalReference store_ic_property =
1315 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate()); 1241 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
1316 __ TailCallExternalReference(store_ic_property, 3, 1); 1242 __ TailCallExternalReference(store_ic_property, 3, 1);
1317 1243
1318 // Handle store cache miss.
1319 __ bind(&miss);
1320 TailCallBuiltin(masm(), MissBuiltin(kind()));
1321
1322 // Return the generated code. 1244 // Return the generated code.
1323 return GetCode(kind(), Code::FAST, name); 1245 return GetCode(kind(), Code::FAST, name);
1324 } 1246 }
1325 1247
1326 1248
1327 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<HeapType> type, 1249 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<HeapType> type,
1328 Handle<JSObject> last, 1250 Handle<JSObject> last,
1329 Handle<Name> name) { 1251 Handle<Name> name) {
1330 NonexistentHandlerFrontend(type, last, name); 1252 NonexistentHandlerFrontend(type, last, name);
1331 1253
(...skipping 13 matching lines...) Expand all
1345 } 1267 }
1346 1268
1347 1269
1348 Register* KeyedLoadStubCompiler::registers() { 1270 Register* KeyedLoadStubCompiler::registers() {
1349 // receiver, name, scratch1, scratch2, scratch3, scratch4. 1271 // receiver, name, scratch1, scratch2, scratch3, scratch4.
1350 static Register registers[] = { a1, a0, a2, a3, t0, t1 }; 1272 static Register registers[] = { a1, a0, a2, a3, t0, t1 };
1351 return registers; 1273 return registers;
1352 } 1274 }
1353 1275
1354 1276
1277 Register StoreStubCompiler::value() {
1278 return a0;
1279 }
1280
1281
1355 Register* StoreStubCompiler::registers() { 1282 Register* StoreStubCompiler::registers() {
1356 // receiver, name, value, scratch1, scratch2, scratch3. 1283 // receiver, name, scratch1, scratch2, scratch3.
1357 static Register registers[] = { a1, a2, a0, a3, t0, t1 }; 1284 static Register registers[] = { a1, a2, a3, t0, t1 };
1358 return registers; 1285 return registers;
1359 } 1286 }
1360 1287
1361 1288
1362 Register* KeyedStoreStubCompiler::registers() { 1289 Register* KeyedStoreStubCompiler::registers() {
1363 // receiver, name, value, scratch1, scratch2, scratch3. 1290 // receiver, name, scratch1, scratch2, scratch3.
1364 static Register registers[] = { a2, a1, a0, a3, t0, t1 }; 1291 static Register registers[] = { a2, a1, a3, t0, t1 };
1365 return registers; 1292 return registers;
1366 } 1293 }
1367 1294
1368 1295
1369 #undef __ 1296 #undef __
1370 #define __ ACCESS_MASM(masm) 1297 #define __ ACCESS_MASM(masm)
1371 1298
1372 1299
1373 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm, 1300 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
1374 Handle<HeapType> type, 1301 Handle<HeapType> type,
(...skipping 196 matching lines...) Expand 10 before | Expand all | Expand 10 after
1571 // ----------------------------------- 1498 // -----------------------------------
1572 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); 1499 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
1573 } 1500 }
1574 1501
1575 1502
1576 #undef __ 1503 #undef __
1577 1504
1578 } } // namespace v8::internal 1505 } } // namespace v8::internal
1579 1506
1580 #endif // V8_TARGET_ARCH_MIPS 1507 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/mips/macro-assembler-mips.cc ('k') | src/mirror-debugger.js » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698