| OLD | NEW |
| (Empty) | |
| 1 #include "v8.h" |
| 2 |
| 3 #include "ic-inl.h" |
| 4 #include "codegen-inl.h" |
| 5 #include "stub-cache.h" |
| 6 |
| 7 namespace v8 { |
| 8 namespace internal { |
| 9 |
| 10 #define __ ACCESS_MASM(masm) |
| 11 |
| 12 |
| 13 //static void ProbeTable(MacroAssembler* masm, |
| 14 // Code::Flags flags, |
| 15 // StubCache::Table table, |
| 16 // Register name, |
| 17 // Register offset) { |
| 18 // UNIMPLEMENTED_(); |
| 19 // ExternalReference key_offset(SCTableReference::keyReference(table)); |
| 20 // ExternalReference value_offset(SCTableReference::valueReference(table)); |
| 21 // |
| 22 // Label miss; |
| 23 // |
| 24 // // Save the offset on the stack. |
| 25 // __ push(offset); |
| 26 // |
| 27 // // Check that the key in the entry matches the name. |
| 28 //// __ mov(ip, Operand(key_offset)); |
| 29 //// __ ldr(ip, MemOperand(ip, offset, LSL, 1)); |
| 30 //// __ cmp(name, Operand(ip)); |
| 31 //// __ b(ne, &miss); |
| 32 // __ li(ip, Operand(key_offset)); |
| 33 // __ sll(t0, offset, 1); // Don't clobber t0 we use it again below. |
| 34 // __ addu(t1, ip, t0); |
| 35 // __ lw(ip, MemOperand(t1)); |
| 36 // __ bcond(ne, &miss, name, Operand(ip)); |
| 37 // __ nop(); // NOP_ADDED |
| 38 // |
| 39 // // Get the code entry from the cache. |
| 40 //// __ mov(ip, Operand(value_offset)); |
| 41 //// __ ldr(offset, MemOperand(ip, offset, LSL, 1)); |
| 42 // __ li(ip, Operand(value_offset)); |
| 43 //// __ sll(t0, offset, 1); // Done a few instructions above. |
| 44 // __ addu(t1, ip, t0); |
| 45 // __ lw(offset, MemOperand(t1)); |
| 46 // |
| 47 // // Check that the flags match what we're looking for. |
| 48 //// __ ldr(offset, FieldMemOperand(offset, Code::kFlagsOffset)); |
| 49 //// __ and_(offset, offset, Operand(~Code::kFlagsNotUsedInLookup)); |
| 50 //// __ cmp(offset, Operand(flags)); |
| 51 //// __ b(ne, &miss); |
| 52 // __ lw(offset, FieldMemOperand(offset, Code::kFlagsOffset)); |
| 53 // __ and_(offset, offset, Operand(~Code::kFlagsNotUsedInLookup)); |
| 54 // __ bcond(ne, &miss, offset, Operand(flags)); |
| 55 // __ nop(); // NOP_ADDED |
| 56 // |
| 57 // // Restore offset and re-load code entry from cache. |
| 58 // __ pop(offset); |
| 59 //// __ mov(ip, Operand(value_offset)); |
| 60 //// __ ldr(offset, MemOperand(ip, offset, LSL, 1)); |
| 61 // __ li(ip, Operand(value_offset)); |
| 62 //// __ sll(t0, offset, 1); // Done a few instructions above. |
| 63 // __ addu(t1, ip, t0); |
| 64 // __ lw(ip, MemOperand(t1)); |
| 65 // |
| 66 // // Jump to the first instruction in the code stub. |
| 67 //// __ add(offset, offset, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 68 // __ addu(offset, offset, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 69 // __ Jump(offset); |
| 70 // __ nop(); // NOP_ADDED |
| 71 // |
| 72 // // Miss: Restore offset and fall through. |
| 73 // __ bind(&miss); |
| 74 // __ pop(offset); |
| 75 //} |
| 76 |
| 77 |
| 78 void StubCache::GenerateProbe(MacroAssembler* masm, |
| 79 Code::Flags flags, |
| 80 Register receiver, |
| 81 Register name, |
| 82 Register scratch, |
| 83 Register extra) { |
| 84 UNIMPLEMENTED_(); |
| 85 // Label miss; |
| 86 // |
| 87 // // Make sure that code is valid. The shifting code relies on the |
| 88 // // entry size being 8. |
| 89 // ASSERT(sizeof(Entry) == 8); |
| 90 // |
| 91 // // Make sure the flags does not name a specific type. |
| 92 // ASSERT(Code::ExtractTypeFromFlags(flags) == 0); |
| 93 //// |
| 94 // // Make sure that there are no register conflicts. |
| 95 // ASSERT(!scratch.is(receiver)); |
| 96 // ASSERT(!scratch.is(name)); |
| 97 // |
| 98 // // Check that the receiver isn't a smi. |
| 99 //// __ tst(receiver, Operand(kSmiTagMask)); |
| 100 //// __ b(eq, &miss); |
| 101 // __ andi(scratch, receiver, Operand(kSmiTagMask)); |
| 102 // __ bcond(eq, &miss, scratch, Operand(zero_reg)); |
| 103 // __ nop(); // NOP_ADDED |
| 104 // |
| 105 // // Get the map of the receiver and compute the hash. |
| 106 //// __ ldr(scratch, FieldMemOperand(name, String::kLengthOffset)); |
| 107 //// __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
| 108 //// __ add(scratch, scratch, Operand(ip)); |
| 109 //// __ eor(scratch, scratch, Operand(flags)); |
| 110 //// __ and_(scratch, |
| 111 //// scratch, |
| 112 //// Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize)); |
| 113 // __ lw(scratch, FieldMemOperand(name, String::kLengthOffset)); |
| 114 // __ lw(ip, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
| 115 // __ addu(scratch, scratch, Operand(ip)); |
| 116 // __ xor_(scratch, scratch, Operand(flags)); |
| 117 // __ and_(scratch, |
| 118 // scratch, |
| 119 // Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize)); |
| 120 // |
| 121 // // Probe the primary table. |
| 122 // ProbeTable(masm, flags, kPrimary, name, scratch); |
| 123 // |
| 124 // // Primary miss: Compute hash for secondary probe. |
| 125 //// __ sub(scratch, scratch, Operand(name)); |
| 126 //// __ add(scratch, scratch, Operand(flags)); |
| 127 //// __ and_(scratch, |
| 128 //// scratch, |
| 129 //// Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize)); |
| 130 // __ subu(scratch, scratch, Operand(name)); |
| 131 // __ addu(scratch, scratch, Operand(flags)); |
| 132 // __ and_(scratch, |
| 133 // scratch, |
| 134 // Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize)); |
| 135 // |
| 136 // // Probe the secondary table. |
| 137 // ProbeTable(masm, flags, kSecondary, name, scratch); |
| 138 // |
| 139 // // Cache miss: Fall-through and let caller handle the miss by |
| 140 // // entering the runtime system. |
| 141 // __ bind(&miss); |
| 142 } |
| 143 |
| 144 |
| 145 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm, |
| 146 int index, |
| 147 Register prototype) { |
| 148 UNIMPLEMENTED_(); |
| 149 // // Load the global or builtins object from the current context. |
| 150 //// __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX
))); |
| 151 // __ lw(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)))
; |
| 152 // // Load the global context from the global or builtins object. |
| 153 //// __ ldr(prototype, |
| 154 //// FieldMemOperand(prototype, GlobalObject::kGlobalContextOffset)); |
| 155 // __ lw(prototype, |
| 156 // FieldMemOperand(prototype, GlobalObject::kGlobalContextOffset)); |
| 157 // // Load the function from the global context. |
| 158 //// __ ldr(prototype, MemOperand(prototype, Context::SlotOffset(index))); |
| 159 // __ lw(prototype, MemOperand(prototype, Context::SlotOffset(index))); |
| 160 // // Load the initial map. The global functions all have initial maps. |
| 161 //// __ ldr(prototype, |
| 162 //// FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset
)); |
| 163 // __ lw(prototype, |
| 164 // FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset))
; |
| 165 // // Load the prototype from the initial map. |
| 166 // __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset)); |
| 167 } |
| 168 |
| 169 |
| 170 // Load a fast property out of a holder object (src). In-object properties |
| 171 // are loaded directly otherwise the property is loaded from the properties |
| 172 // fixed array. |
| 173 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, |
| 174 Register dst, Register src, |
| 175 JSObject* holder, int index) { |
| 176 UNIMPLEMENTED_(); |
| 177 // // Adjust for the number of properties stored in the holder. |
| 178 // index -= holder->map()->inobject_properties(); |
| 179 // if (index < 0) { |
| 180 // // Get the property straight out of the holder. |
| 181 // int offset = holder->map()->instance_size() + (index * kPointerSize); |
| 182 //// __ ldr(dst, FieldMemOperand(src, offset)); |
| 183 // __ lw(dst, FieldMemOperand(src, offset)); |
| 184 // } else { |
| 185 // // Calculate the offset into the properties array. |
| 186 // int offset = index * kPointerSize + FixedArray::kHeaderSize; |
| 187 //// __ ldr(dst, FieldMemOperand(src, JSObject::kPropertiesOffset)); |
| 188 //// __ ldr(dst, FieldMemOperand(dst, offset)); |
| 189 // __ lw(dst, FieldMemOperand(src, JSObject::kPropertiesOffset)); |
| 190 // __ lw(dst, FieldMemOperand(dst, offset)); |
| 191 // } |
| 192 } |
| 193 |
| 194 |
| 195 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm, |
| 196 Register receiver, |
| 197 Register scratch, |
| 198 Label* miss_label) { |
| 199 UNIMPLEMENTED_(); |
| 200 // // Check that the receiver isn't a smi. |
| 201 //// __ tst(receiver, Operand(kSmiTagMask)); |
| 202 //// __ b(eq, miss_label); |
| 203 // __ andi(scratch, receiver, Operand(kSmiTagMask)); |
| 204 // __ bcond(eq, miss_label, scratch, Operand(zero_reg)); |
| 205 // __ nop(); // NOP_ADDED |
| 206 // |
| 207 // // Check that the object is a JS array. |
| 208 //// __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE); |
| 209 //// __ b(ne, miss_label); |
| 210 // __ GetObjectType(receiver, scratch, scratch); |
| 211 // __ bcond(ne, miss_label, scratch, Operand(JS_ARRAY_TYPE)); |
| 212 // __ nop(); // NOP_ADDED |
| 213 // |
| 214 // // Load length directly from the JS array. |
| 215 //// __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset)); |
| 216 // __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset)); |
| 217 // __ Ret(); |
| 218 // __ nop(); // NOP_ADDED |
| 219 } |
| 220 |
| 221 |
| 222 // Generate code to check if an object is a string. If the object is |
| 223 // a string, the map's instance type is left in the scratch1 register. |
| 224 //static void GenerateStringCheck(MacroAssembler* masm, |
| 225 // Register receiver, |
| 226 // Register scratch1, |
| 227 // Register scratch2, |
| 228 // Label* smi, |
| 229 // Label* non_string_object) { |
| 230 // // Check that the receiver isn't a smi. |
| 231 // __ tst(receiver, Operand(kSmiTagMask)); |
| 232 // __ b(eq, smi); |
| 233 // |
| 234 // // Check that the object is a string. |
| 235 // __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
| 236 // __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); |
| 237 // __ and_(scratch2, scratch1, Operand(kIsNotStringMask)); |
| 238 // // The cast is to resolve the overload for the argument of 0x0. |
| 239 // __ cmp(scratch2, Operand(static_cast<int32_t>(kStringTag))); |
| 240 // __ b(ne, non_string_object); |
| 241 //} |
| 242 |
| 243 |
| 244 // Generate code to load the length from a string object and return the length. |
| 245 // If the receiver object is not a string or a wrapped string object the |
| 246 // execution continues at the miss label. The register containing the |
| 247 // receiver is potentially clobbered. |
| 248 void StubCompiler::GenerateLoadStringLength2(MacroAssembler* masm, |
| 249 Register receiver, |
| 250 Register scratch1, |
| 251 Register scratch2, |
| 252 Label* miss) { |
| 253 UNIMPLEMENTED_(); |
| 254 __ break_(0x249); |
| 255 // Label check_string, check_wrapper; |
| 256 // |
| 257 // __ bind(&check_string); |
| 258 // // Check if the object is a string leaving the instance type in the |
| 259 // // scratch1 register. |
| 260 // GenerateStringCheck(masm, receiver, scratch1, scratch2, |
| 261 // miss, &check_wrapper); |
| 262 // |
| 263 // // Load length directly from the string. |
| 264 // __ and_(scratch1, scratch1, Operand(kStringSizeMask)); |
| 265 // __ add(scratch1, scratch1, Operand(String::kHashShift)); |
| 266 // __ ldr(r0, FieldMemOperand(receiver, String::kLengthOffset)); |
| 267 // __ mov(r0, Operand(r0, LSR, scratch1)); |
| 268 // __ mov(r0, Operand(r0, LSL, kSmiTagSize)); |
| 269 // __ Ret(); |
| 270 // |
| 271 // // Check if the object is a JSValue wrapper. |
| 272 // __ bind(&check_wrapper); |
| 273 // __ cmp(scratch1, Operand(JS_VALUE_TYPE)); |
| 274 // __ b(ne, miss); |
| 275 // |
| 276 // // Unwrap the value in place and check if the wrapped value is a string. |
| 277 // __ ldr(receiver, FieldMemOperand(receiver, JSValue::kValueOffset)); |
| 278 // __ b(&check_string); |
| 279 } |
| 280 |
| 281 |
| 282 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm, |
| 283 Register receiver, |
| 284 Register scratch1, |
| 285 Register scratch2, |
| 286 Label* miss_label) { |
| 287 UNIMPLEMENTED_(); |
| 288 //// __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label); |
| 289 //// __ mov(r0, scratch1); |
| 290 //// __ Ret(); |
| 291 // __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label); |
| 292 // __ mov(v0, scratch1); |
| 293 // __ Ret(); |
| 294 // __ nop(); // NOP_ADDED |
| 295 } |
| 296 |
| 297 |
| 298 // Generate StoreField code, value is passed in r0 register. |
| 299 // After executing generated code, the receiver_reg and name_reg |
| 300 // may be clobbered. |
| 301 void StubCompiler::GenerateStoreField(MacroAssembler* masm, |
| 302 Builtins::Name storage_extend, |
| 303 JSObject* object, |
| 304 int index, |
| 305 Map* transition, |
| 306 Register receiver_reg, |
| 307 Register name_reg, |
| 308 Register scratch, |
| 309 Label* miss_label) { |
| 310 UNIMPLEMENTED_(); |
| 311 // // a0 : value |
| 312 // Label exit; |
| 313 // |
| 314 // // Check that the receiver isn't a smi. |
| 315 //// __ tst(receiver_reg, Operand(kSmiTagMask)); |
| 316 //// __ b(eq, miss_label); |
| 317 // __ andi(t0, receiver_reg, Operand(kSmiTagMask)); |
| 318 // __ bcond(eq, miss_label, t0, Operand(zero_reg)); |
| 319 // __ nop(); // NOP_ADDED |
| 320 // |
| 321 // // Check that the map of the receiver hasn't changed. |
| 322 //// __ ldr(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset)); |
| 323 //// __ cmp(scratch, Operand(Handle<Map>(object->map()))); |
| 324 //// __ b(ne, miss_label); |
| 325 // __ lw(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset)); |
| 326 // __ bcond(ne, miss_label, scratch, Operand(Handle<Map>(object->map()))); |
| 327 // __ nop(); // NOP_ADDED |
| 328 // |
| 329 // // Perform global security token check if needed. |
| 330 // if (object->IsJSGlobalProxy()) { |
| 331 // __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label); |
| 332 // } |
| 333 // |
| 334 // // Stub never generated for non-global objects that require access |
| 335 // // checks. |
| 336 // ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); |
| 337 // |
| 338 // // Perform map transition for the receiver if necessary. |
| 339 // if ((transition != NULL) && (object->map()->unused_property_fields() == 0))
{ |
| 340 // // The properties must be extended before we can store the value. |
| 341 // // We jump to a runtime call that extends the properties array. |
| 342 //// __ mov(r2, Operand(Handle<Map>(transition))); |
| 343 // __ li(a2, Operand(Handle<Map>(transition))); |
| 344 // // Please note, if we implement keyed store for arm we need |
| 345 // // to call the Builtins::KeyedStoreIC_ExtendStorage. |
| 346 // Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_ExtendStorage)); |
| 347 // __ Jump(ic, RelocInfo::CODE_TARGET); |
| 348 // __ nop(); // NOP_ADDED |
| 349 // return; |
| 350 // } |
| 351 // |
| 352 // if (transition != NULL) { |
| 353 // // Update the map of the object; no write barrier updating is |
| 354 // // needed because the map is never in new space. |
| 355 //// __ mov(ip, Operand(Handle<Map>(transition))); |
| 356 //// __ str(ip, FieldMemOperand(receiver_reg, HeapObject::kMapOffset)); |
| 357 // __ li(ip, Operand(Handle<Map>(transition))); |
| 358 // __ sw(ip, FieldMemOperand(receiver_reg, HeapObject::kMapOffset)); |
| 359 // } |
| 360 // |
| 361 // // Adjust for the number of properties stored in the object. Even in the |
| 362 // // face of a transition we can use the old map here because the size of the |
| 363 // // object and the number of in-object properties is not going to change. |
| 364 // index -= object->map()->inobject_properties(); |
| 365 // |
| 366 // if (index < 0) { |
| 367 // // Set the property straight into the object. |
| 368 // int offset = object->map()->instance_size() + (index * kPointerSize); |
| 369 //// __ str(r0, FieldMemOperand(receiver_reg, offset)); |
| 370 // __ sw(a0, FieldMemOperand(receiver_reg, offset)); |
| 371 // |
| 372 // // Skip updating write barrier if storing a smi. |
| 373 //// __ tst(r0, Operand(kSmiTagMask)); |
| 374 //// __ b(eq, &exit); |
| 375 // __ andi(t0, a0, Operand(kSmiTagMask)); |
| 376 // __ bcond(eq, &exit, t0, Operand(zero_reg)); |
| 377 // __ nop(); // NOP_ADDED |
| 378 // |
| 379 // // Update the write barrier for the array address. |
| 380 // // Pass the value being stored in the now unused name_reg. |
| 381 //// __ mov(name_reg, Operand(offset)); |
| 382 // __ li(name_reg, Operand(offset)); |
| 383 // __ RecordWrite(receiver_reg, name_reg, scratch); |
| 384 // } else { |
| 385 // // Write to the properties array. |
| 386 // int offset = index * kPointerSize + FixedArray::kHeaderSize; |
| 387 // // Get the properties array |
| 388 //// __ ldr(scratch, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffse
t)); |
| 389 //// __ str(r0, FieldMemOperand(scratch, offset)); |
| 390 // __ lw(scratch, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset))
; |
| 391 // __ sw(a0, FieldMemOperand(scratch, offset)); |
| 392 // |
| 393 // // Skip updating write barrier if storing a smi. |
| 394 //// __ tst(r0, Operand(kSmiTagMask)); |
| 395 //// __ b(eq, &exit); |
| 396 // __ andi(t0, a0, Operand(kSmiTagMask)); |
| 397 // __ bcond(eq, &exit, t0, Operand(zero_reg)); |
| 398 // __ nop(); // NOP_ADDED |
| 399 // |
| 400 // // Update the write barrier for the array address. |
| 401 // // Ok to clobber receiver_reg and name_reg, since we return. |
| 402 //// __ mov(name_reg, Operand(offset)); |
| 403 // __ li(name_reg, Operand(offset)); |
| 404 // __ RecordWrite(scratch, name_reg, receiver_reg); |
| 405 // } |
| 406 // |
| 407 // // Return the value (register v0). |
| 408 // __ bind(&exit); |
| 409 // __ mov(v0, a0); |
| 410 // __ Ret(); |
| 411 // __ nop(); // NOP_ADDED |
| 412 } |
| 413 |
| 414 |
| 415 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) { |
| 416 UNIMPLEMENTED_(); |
| 417 // ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC); |
| 418 // Code* code = NULL; |
| 419 // if (kind == Code::LOAD_IC) { |
| 420 // code = Builtins::builtin(Builtins::LoadIC_Miss); |
| 421 // } else { |
| 422 // code = Builtins::builtin(Builtins::KeyedLoadIC_Miss); |
| 423 // } |
| 424 // |
| 425 // Handle<Code> ic(code); |
| 426 // __ Jump(ic, RelocInfo::CODE_TARGET); |
| 427 // __ nop(); |
| 428 } |
| 429 |
| 430 |
| 431 #undef __ |
| 432 #define __ ACCESS_MASM(masm()) |
| 433 |
| 434 |
| 435 Register StubCompiler::CheckPrototypes(JSObject* object, |
| 436 Register object_reg, |
| 437 JSObject* holder, |
| 438 Register holder_reg, |
| 439 Register scratch, |
| 440 String* name, |
| 441 Label* miss) { |
| 442 UNIMPLEMENTED_(); |
| 443 // // Check that the maps haven't changed. |
| 444 // Register result = |
| 445 // masm()->CheckMaps(object, object_reg, holder, holder_reg, scratch, miss)
; |
| 446 // |
| 447 // // If we've skipped any global objects, it's not enough to verify |
| 448 // // that their maps haven't changed. |
| 449 // while (object != holder) { |
| 450 // if (object->IsGlobalObject()) { |
| 451 // GlobalObject* global = GlobalObject::cast(object); |
| 452 // Object* probe = global->EnsurePropertyCell(name); |
| 453 // if (probe->IsFailure()) { |
| 454 // set_failure(Failure::cast(probe)); |
| 455 // return result; |
| 456 // } |
| 457 // JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe); |
| 458 // ASSERT(cell->value()->IsTheHole()); |
| 459 //// __ mov(scratch, Operand(Handle<Object>(cell))); |
| 460 //// __ ldr(scratch, |
| 461 //// FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset)); |
| 462 //// __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
| 463 //// __ cmp(scratch, ip); |
| 464 //// __ b(ne, miss); |
| 465 // __ li(scratch, Operand(Handle<Object>(cell))); |
| 466 // __ lw(scratch, |
| 467 // FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset)); |
| 468 // __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
| 469 // __ bcond(ne, miss, scratch, Operand(ip)); |
| 470 // __ nop(); // NOP_ADDED |
| 471 // } |
| 472 // object = JSObject::cast(object->GetPrototype()); |
| 473 // } |
| 474 // |
| 475 // // Return the register containin the holder. |
| 476 // return result; |
| 477 return at; // UNIMPLEMENTED RETURN |
| 478 } |
| 479 |
| 480 |
| 481 void StubCompiler::GenerateLoadField(JSObject* object, |
| 482 JSObject* holder, |
| 483 Register receiver, |
| 484 Register scratch1, |
| 485 Register scratch2, |
| 486 int index, |
| 487 String* name, |
| 488 Label* miss) { |
| 489 UNIMPLEMENTED_(); |
| 490 // // Check that the receiver isn't a smi. |
| 491 //// __ tst(receiver, Operand(kSmiTagMask)); |
| 492 //// __ b(eq, miss); |
| 493 // __ andi(t0, receiver, Operand(kSmiTagMask)); |
| 494 // __ bcond(eq, miss, t0, Operand(zero_reg)); |
| 495 // __ nop(); // NOP_ADDED |
| 496 // |
| 497 // // Check that the maps haven't changed. |
| 498 // Register reg = |
| 499 // CheckPrototypes(object, receiver, holder, scratch1, scratch2, name, miss
); |
| 500 // GenerateFastPropertyLoad(masm(), v0, reg, holder, index); |
| 501 // __ Ret(); |
| 502 // __ nop(); // NOP_ADDED |
| 503 } |
| 504 |
| 505 |
| 506 void StubCompiler::GenerateLoadConstant(JSObject* object, |
| 507 JSObject* holder, |
| 508 Register receiver, |
| 509 Register scratch1, |
| 510 Register scratch2, |
| 511 Object* value, |
| 512 String* name, |
| 513 Label* miss) { |
| 514 UNIMPLEMENTED_(); |
| 515 // // Check that the receiver isn't a smi. |
| 516 //// __ tst(receiver, Operand(kSmiTagMask)); |
| 517 //// __ b(eq, miss); |
| 518 // __ andi(t0, receiver, Operand(kSmiTagMask)); |
| 519 // __ bcond(eq, miss, t0, Operand(zero_reg)); |
| 520 // __ nop(); // NOP_ADDED |
| 521 // |
| 522 // // Check that the maps haven't changed. |
| 523 // Register reg = |
| 524 // CheckPrototypes(object, receiver, holder, scratch1, scratch2, name, miss
); |
| 525 // |
| 526 // // Return the constant value. |
| 527 //// __ mov(r0, Operand(Handle<Object>(value))); |
| 528 // __ li(v0, Operand(Handle<Object>(value))); |
| 529 // __ Ret(); |
| 530 // __ nop(); // NOP_ADDED |
| 531 } |
| 532 |
| 533 |
| 534 void StubCompiler::GenerateLoadCallback(JSObject* object, |
| 535 JSObject* holder, |
| 536 Register receiver, |
| 537 Register name_reg, |
| 538 Register scratch1, |
| 539 Register scratch2, |
| 540 AccessorInfo* callback, |
| 541 String* name, |
| 542 Label* miss) { |
| 543 UNIMPLEMENTED(); |
| 544 __ break_(0x470); |
| 545 // // Check that the receiver isn't a smi. |
| 546 // __ tst(receiver, Operand(kSmiTagMask)); |
| 547 // __ b(eq, miss); |
| 548 // |
| 549 // // Check that the maps haven't changed. |
| 550 // Register reg = |
| 551 // CheckPrototypes(object, receiver, holder, scratch1, scratch2, name, miss
); |
| 552 // |
| 553 // // Push the arguments on the JS stack of the caller. |
| 554 // __ push(receiver); // receiver |
| 555 // __ push(reg); // holder |
| 556 // __ mov(ip, Operand(Handle<AccessorInfo>(callback))); // callback data |
| 557 // __ push(ip); |
| 558 // __ ldr(reg, FieldMemOperand(ip, AccessorInfo::kDataOffset)); |
| 559 // __ push(reg); |
| 560 // __ push(name_reg); // name |
| 561 // |
| 562 // // Do tail-call to the runtime system. |
| 563 // ExternalReference load_callback_property = |
| 564 // ExternalReference(IC_Utility(IC::kLoadCallbackProperty)); |
| 565 // __ TailCallRuntime(load_callback_property, 5, 1); |
| 566 } |
| 567 |
| 568 |
| 569 void StubCompiler::GenerateLoadInterceptor(JSObject* object, |
| 570 JSObject* holder, |
| 571 LookupResult* lookup, |
| 572 Register receiver, |
| 573 Register name_reg, |
| 574 Register scratch1, |
| 575 Register scratch2, |
| 576 String* name, |
| 577 Label* miss) { |
| 578 UNIMPLEMENTED(); |
| 579 __ break_(0x505); |
| 580 // // Check that the receiver isn't a smi. |
| 581 // __ tst(receiver, Operand(kSmiTagMask)); |
| 582 // __ b(eq, miss); |
| 583 // |
| 584 // // Check that the maps haven't changed. |
| 585 // Register reg = |
| 586 // CheckPrototypes(object, receiver, holder, scratch1, scratch2, name, miss
); |
| 587 // |
| 588 // // Push the arguments on the JS stack of the caller. |
| 589 // __ push(receiver); // receiver |
| 590 // __ push(reg); // holder |
| 591 // __ push(name_reg); // name |
| 592 // |
| 593 // InterceptorInfo* interceptor = holder->GetNamedInterceptor(); |
| 594 // ASSERT(!Heap::InNewSpace(interceptor)); |
| 595 // __ mov(scratch1, Operand(Handle<Object>(interceptor))); |
| 596 // __ push(scratch1); |
| 597 // __ ldr(scratch2, FieldMemOperand(scratch1, InterceptorInfo::kDataOffset)); |
| 598 // __ push(scratch2); |
| 599 // |
| 600 // // Do tail-call to the runtime system. |
| 601 // ExternalReference load_ic_property = |
| 602 // ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad)); |
| 603 // __ TailCallRuntime(load_ic_property, 5, 1); |
| 604 } |
| 605 |
| 606 |
| 607 Object* StubCompiler::CompileLazyCompile(Code::Flags flags) { |
| 608 UNIMPLEMENTED_(); |
| 609 // // ----------- S t a t e ------------- |
| 610 // // -- r1->a1: function |
| 611 // // -- ls->ra: return address |
| 612 // // ----------------------------------- |
| 613 // |
| 614 // // Enter an internal frame. |
| 615 // __ EnterInternalFrame(); |
| 616 // |
| 617 // // Preserve the function. |
| 618 // __ push(a1); |
| 619 // |
| 620 // // Setup aligned call. arg_count=1 as we will only push the function (a1). |
| 621 // __ SetupAlignedCall(t0, 1); |
| 622 // |
| 623 // // Push the function on the stack as the argument to the runtime function. |
| 624 // __ push(a1); |
| 625 // |
| 626 // // Call the runtime function |
| 627 // __ CallRuntime(Runtime::kLazyCompile, 1); |
| 628 // __ nop(); // NOP_ADDED |
| 629 // |
| 630 // // Calculate the entry point. |
| 631 // __ addiu(t9, v0, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 632 // |
| 633 // __ ReturnFromAlignedCall(); |
| 634 // |
| 635 // // Restore saved function. |
| 636 // __ pop(a1); |
| 637 // |
| 638 // // Tear down temporary frame. |
| 639 // __ LeaveInternalFrame(); |
| 640 // |
| 641 // // Do a tail-call of the compiled function. |
| 642 // __ Jump(t9); |
| 643 //// __ addiu(sp, sp, -StandardFrameConstants::kRArgsSlotsSize); |
| 644 //// __ addiu(sp, sp, StandardFrameConstants::kRArgsSlotsSize); |
| 645 // __ nop(); |
| 646 // |
| 647 // return GetCodeWithFlags(flags, "LazyCompileStub"); |
| 648 return (Object*)NULL; // UNIMPLEMENTED RETURN |
| 649 } |
| 650 |
| 651 |
| 652 Object* CallStubCompiler::CompileCallField(Object* object, |
| 653 JSObject* holder, |
| 654 int index, |
| 655 String* name) { |
| 656 UNIMPLEMENTED_(); |
| 657 // // ----------- S t a t e ------------- |
| 658 // // -- ra: return address |
| 659 // // ----------------------------------- |
| 660 // Label miss; |
| 661 // |
| 662 // const int argc = arguments().immediate(); |
| 663 // |
| 664 // __ break_(0x649); |
| 665 // |
| 666 // // Get the receiver of the function from the stack into a0. |
| 667 //// __ ldr(r0, MemOperand(sp, argc * kPointerSize)); |
| 668 // __ lw(a0, MemOperand(sp, argc * kPointerSize |
| 669 // + StandardFrameConstants::kRArgsSlotsSize)); |
| 670 // // Check that the receiver isn't a smi. |
| 671 //// __ tst(r0, Operand(kSmiTagMask)); |
| 672 //// __ b(eq, &miss); |
| 673 // __ andi(t0, a0, Operand(kSmiTagMask)); |
| 674 // __ bcond(eq, &miss, t0, Operand(zero_reg)); |
| 675 // __ nop(); // NOP_ADDED |
| 676 // |
| 677 // // Do the right check and compute the holder register. |
| 678 // Register reg = |
| 679 // CheckPrototypes(JSObject::cast(object), a0, holder, a3, a2, name, &miss)
; |
| 680 // GenerateFastPropertyLoad(masm(), a1, reg, holder, index); |
| 681 // |
| 682 // // Check that the function really is a function. |
| 683 //// __ tst(r1, Operand(kSmiTagMask)); |
| 684 //// __ b(eq, &miss); |
| 685 // __ andi(t1, a1, Operand(kSmiTagMask)); |
| 686 // __ bcond(eq, &miss, t1, Operand(zero_reg)); |
| 687 // __ nop(); // NOP_ADDED |
| 688 // // Get the map. |
| 689 //// __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE); |
| 690 //// __ b(ne, &miss); |
| 691 // __ GetObjectType(a1, a2, a2); |
| 692 // __ bcond(ne, &miss, a2, Operand(JS_FUNCTION_TYPE)); |
| 693 // __ nop(); // NOP_ADDED |
| 694 // |
| 695 // // Patch the receiver on the stack with the global proxy if |
| 696 // // necessary. |
| 697 // if (object->IsGlobalObject()) { |
| 698 //// __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset)); |
| 699 //// __ str(r3, MemOperand(sp, argc * kPointerSize)); |
| 700 // __ lw(a3, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset)); |
| 701 // __ break_(0x645); |
| 702 // __ sw(a3, MemOperand(sp, argc * kPointerSize |
| 703 // + StandardFrameConstants::kRArgsSlotsSize)); |
| 704 // } |
| 705 // |
| 706 // __ break_(0x688); |
| 707 // // Invoke the function. |
| 708 // __ InvokeFunction(a1, arguments(), JUMP_FUNCTION); |
| 709 // __ nop(); // NOP_ADDED |
| 710 // |
| 711 // // Handle call cache miss. |
| 712 // __ bind(&miss); |
| 713 // Handle<Code> ic = ComputeCallMiss(arguments().immediate()); |
| 714 // __ Jump(ic, RelocInfo::CODE_TARGET); |
| 715 // __ nop(); // NOP_ADDED |
| 716 // |
| 717 // // Return the generated code. |
| 718 // return GetCode(FIELD, name); |
| 719 return (Object*)NULL; // UNIMPLEMENTED RETURN |
| 720 } |
| 721 |
| 722 |
| 723 Object* CallStubCompiler::CompileCallConstant(Object* object, |
| 724 JSObject* holder, |
| 725 JSFunction* function, |
| 726 String* name, |
| 727 CheckType check) { |
| 728 UNIMPLEMENTED_(); |
| 729 // // ----------- S t a t e ------------- |
| 730 // // -- ra: return address |
| 731 // // ----------------------------------- |
| 732 // Label miss; |
| 733 // |
| 734 // // Get the receiver from the stack |
| 735 // const int argc = arguments().immediate(); |
| 736 // __ break_(0x719); |
| 737 //// __ ldr(r1, MemOperand(sp, argc * kPointerSize)); |
| 738 // __ lw(a1, MemOperand(sp, argc * kPointerSize |
| 739 // + StandardFrameConstants::kRArgsSlotsSize)); |
| 740 // |
| 741 // // Check that the receiver isn't a smi. |
| 742 // if (check != NUMBER_CHECK) { |
| 743 //// __ tst(r1, Operand(kSmiTagMask)); |
| 744 //// __ b(eq, &miss); |
| 745 // __ andi(t1, a1, Operand(kSmiTagMask)); |
| 746 // __ bcond(eq, &miss, t1, Operand(zero_reg)); |
| 747 // __ nop(); // NOP_ADDED |
| 748 // } |
| 749 // |
| 750 // // Make sure that it's okay not to patch the on stack receiver |
| 751 // // unless we're doing a receiver map check. |
| 752 // ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK); |
| 753 // |
| 754 // switch (check) { |
| 755 // case RECEIVER_MAP_CHECK: |
| 756 // // Check that the maps haven't changed. |
| 757 // CheckPrototypes(JSObject::cast(object), a1, holder, a3, a2, name, &miss)
; |
| 758 // |
| 759 // // Patch the receiver on the stack with the global proxy if |
| 760 // // necessary. |
| 761 // if (object->IsGlobalObject()) { |
| 762 // __ break_(0x745); |
| 763 //// __ ldr(r3, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset))
; |
| 764 //// __ str(r3, MemOperand(sp, argc * kPointerSize)); |
| 765 // __ lw(a3, FieldMemOperand(a1, GlobalObject::kGlobalReceiverOffset)); |
| 766 // __ sw(a3, MemOperand(sp, argc * kPointerSize |
| 767 // + StandardFrameConstants::kRArgsSlotsSize)); |
| 768 // } |
| 769 // break; |
| 770 // |
| 771 // case STRING_CHECK: |
| 772 // // Check that the object is a two-byte string or a symbol. |
| 773 //// __ CompareObjectType(r1, r2, r2, FIRST_NONSTRING_TYPE); |
| 774 //// __ b(hs, &miss); |
| 775 // __ GetObjectType(a1, a2, a2); |
| 776 // __ bcond(Ugreater_equal, &miss, a2, Operand(FIRST_NONSTRING_TYPE)); |
| 777 // __ nop(); // NOP_ADDED |
| 778 // // Check that the maps starting from the prototype haven't changed. |
| 779 // GenerateLoadGlobalFunctionPrototype(masm(), |
| 780 // Context::STRING_FUNCTION_INDEX, |
| 781 // a2); |
| 782 // CheckPrototypes(JSObject::cast(object->GetPrototype()), a2, holder, a3, |
| 783 // a1, name, &miss); |
| 784 // break; |
| 785 // |
| 786 // case NUMBER_CHECK: { |
| 787 // Label fast; |
| 788 // // Check that the object is a smi or a heap number. |
| 789 //// __ tst(r1, Operand(kSmiTagMask)); |
| 790 //// __ b(eq, &fast); |
| 791 //// __ CompareObjectType(r1, r2, r2, HEAP_NUMBER_TYPE); |
| 792 //// __ b(ne, &miss); |
| 793 //// __ bind(&fast); |
| 794 // __ andi(t1, a1, Operand(kSmiTagMask)); |
| 795 // __ bcond(eq, &fast, t1, Operand(zero_reg)); |
| 796 // __ nop(); // NOP_ADDED |
| 797 // __ GetObjectType(a1, a2, a2); |
| 798 // __ bcond(ne, &miss, a2, Operand(HEAP_NUMBER_TYPE)); |
| 799 // __ nop(); // NOP_ADDED |
| 800 // __ bind(&fast); |
| 801 // // Check that the maps starting from the prototype haven't changed. |
| 802 // GenerateLoadGlobalFunctionPrototype(masm(), |
| 803 // Context::NUMBER_FUNCTION_INDEX, |
| 804 // a2); |
| 805 // CheckPrototypes(JSObject::cast(object->GetPrototype()), a2, holder, a3, |
| 806 // a1, name, &miss); |
| 807 // break; |
| 808 // } |
| 809 //// |
| 810 // case BOOLEAN_CHECK: { |
| 811 // Label fast; |
| 812 // // Check that the object is a boolean. |
| 813 //// __ LoadRoot(ip, Heap::kTrueValueRootIndex); |
| 814 //// __ cmp(r1, ip); |
| 815 //// __ b(eq, &fast); |
| 816 //// __ LoadRoot(ip, Heap::kFalseValueRootIndex); |
| 817 //// __ cmp(r1, ip); |
| 818 //// __ b(ne, &miss); |
| 819 //// __ bind(&fast); |
| 820 // __ LoadRoot(ip, Heap::kTrueValueRootIndex); |
| 821 // __ bcond(eq, &fast, a1, Operand(ip)); |
| 822 // __ nop(); // NOP_ADDED |
| 823 // __ LoadRoot(ip, Heap::kFalseValueRootIndex); |
| 824 // __ bcond(ne, &miss, a1, Operand(ip)); |
| 825 // __ nop(); // NOP_ADDED |
| 826 // __ bind(&fast); |
| 827 // // Check that the maps starting from the prototype haven't changed. |
| 828 // GenerateLoadGlobalFunctionPrototype(masm(), |
| 829 // Context::BOOLEAN_FUNCTION_INDEX, |
| 830 // a2); |
| 831 // CheckPrototypes(JSObject::cast(object->GetPrototype()), a2, holder, a3, |
| 832 // a1, name, &miss); |
| 833 // break; |
| 834 // } |
| 835 // |
| 836 // case JSARRAY_HAS_FAST_ELEMENTS_CHECK: |
| 837 // CheckPrototypes(JSObject::cast(object), a1, holder, a3, a2, name, &miss)
; |
| 838 // // Make sure object->HasFastElements(). |
| 839 // // Get the elements array of the object. |
| 840 //// __ ldr(r3, FieldMemOperand(r1, JSObject::kElementsOffset)); |
| 841 // __ lw(a3, FieldMemOperand(a1, JSObject::kElementsOffset)); |
| 842 // // Check that the object is in fast mode (not dictionary). |
| 843 //// __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset)); |
| 844 //// __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex); |
| 845 //// __ cmp(r2, ip); |
| 846 //// __ b(ne, &miss); |
| 847 // __ lw(a2, FieldMemOperand(a3, HeapObject::kMapOffset)); |
| 848 // __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex); |
| 849 // __ bcond(ne, &miss, a2, Operand(ip)); |
| 850 // __ nop(); // NOP_ADDED |
| 851 // break; |
| 852 //// |
| 853 // default: |
| 854 // UNREACHABLE(); |
| 855 // } |
| 856 // |
| 857 // // Get the function and setup the context. |
| 858 //// __ mov(r1, Operand(Handle<JSFunction>(function))); |
| 859 //// __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); |
| 860 // __ li(a1, Operand(Handle<JSFunction>(function))); |
| 861 // __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); |
| 862 // |
| 863 // // Jump to the cached code (tail call). |
| 864 // ASSERT(function->is_compiled()); |
| 865 // Handle<Code> code(function->code()); |
| 866 // ParameterCount expected(function->shared()->formal_parameter_count()); |
| 867 // __ InvokeCode(code, expected, arguments(), |
| 868 // RelocInfo::CODE_TARGET, JUMP_FUNCTION); |
| 869 // __ nop(); // NOP_ADDED |
| 870 // |
| 871 // // Handle call cache miss. |
| 872 // __ bind(&miss); |
| 873 // Handle<Code> ic = ComputeCallMiss(arguments().immediate()); |
| 874 // __ Jump(ic, RelocInfo::CODE_TARGET); |
| 875 // __ nop(); // NOP_ADDED |
| 876 // |
| 877 // // Return the generated code. |
| 878 // String* function_name = NULL; |
| 879 // if (function->shared()->name()->IsString()) { |
| 880 // function_name = String::cast(function->shared()->name()); |
| 881 // } |
| 882 // return GetCode(CONSTANT_FUNCTION, function_name); |
| 883 return (Object*)NULL; // UNIMPLEMENTED RETURN |
| 884 } |
| 885 |
| 886 |
| 887 Object* CallStubCompiler::CompileCallInterceptor(Object* object, |
| 888 JSObject* holder, |
| 889 String* name) { |
| 890 UNIMPLEMENTED(); |
| 891 __ break_(0x782); |
| 892 // // ----------- S t a t e ------------- |
| 893 // // -- lr: return address |
| 894 // // ----------------------------------- |
| 895 // Label miss; |
| 896 // |
| 897 // // TODO(1224669): Implement. |
| 898 // |
| 899 // // Handle call cache miss. |
| 900 // __ bind(&miss); |
| 901 // Handle<Code> ic = ComputeCallMiss(arguments().immediate()); |
| 902 // __ Jump(ic, RelocInfo::CODE_TARGET); |
| 903 // __ nop(); // NOP_ADDED |
| 904 // |
| 905 // // Return the generated code. |
| 906 return GetCode(INTERCEPTOR, name); |
| 907 } |
| 908 |
| 909 |
| 910 Object* CallStubCompiler::CompileCallGlobal(JSObject* object, |
| 911 GlobalObject* holder, |
| 912 JSGlobalPropertyCell* cell, |
| 913 JSFunction* function, |
| 914 String* name) { |
| 915 UNIMPLEMENTED_(); |
| 916 // // ----------- S t a t e ------------- |
| 917 // // -- ra: return address |
| 918 // // ----------------------------------- |
| 919 // Label miss; |
| 920 // |
| 921 // // Get the number of arguments. |
| 922 // const int argc = arguments().immediate(); |
| 923 // |
| 924 // // Get the receiver from the stack. |
| 925 //// __ ldr(r0, MemOperand(sp, argc * kPointerSize)); |
| 926 //// __ lw(a0, MemOperand(sp, argc * kPointerSize)); |
| 927 // __ teq(fp, zero_reg, 0x111); |
| 928 // __ lw(a0, MemOperand(sp, argc * kPointerSize)); |
| 929 //// + StandardFrameConstants::kRArgsSlotsSize)); |
| 930 // |
| 931 // // If the object is the holder then we know that it's a global |
| 932 // // object which can only happen for contextual calls. In this case, |
| 933 // // the receiver cannot be a smi. |
| 934 // if (object != holder) { |
| 935 //// __ tst(r0, Operand(kSmiTagMask)); |
| 936 //// __ b(eq, &miss); |
| 937 // __ andi(t0, a0, Operand(kSmiTagMask)); |
| 938 // __ bcond(eq, &miss, t0, Operand(zero_reg)); |
| 939 // __ nop(); // NOP_ADDED |
| 940 // } |
| 941 // |
| 942 // // Check that the maps haven't changed. |
| 943 // CheckPrototypes(object, a0, holder, a3, a2, name, &miss); |
| 944 // |
| 945 // // Get the value from the cell. |
| 946 //// __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell))); |
| 947 //// __ ldr(r1, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset)); |
| 948 // __ li(a3, Operand(Handle<JSGlobalPropertyCell>(cell))); |
| 949 // __ lw(a1, FieldMemOperand(a3, JSGlobalPropertyCell::kValueOffset)); |
| 950 // |
| 951 // // Check that the cell contains the same function. |
| 952 //// __ cmp(r1, Operand(Handle<JSFunction>(function))); |
| 953 //// __ b(ne, &miss); |
| 954 // __ bcond(ne, &miss, a1, Operand(Handle<JSFunction>(function))); |
| 955 // __ nop(); // NOP_ADDED |
| 956 // |
| 957 // // Patch the receiver on the stack with the global proxy if |
| 958 // // necessary. |
| 959 // if (object->IsGlobalObject()) { |
| 960 //// __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset)); |
| 961 //// __ str(r3, MemOperand(sp, argc * kPointerSize)); |
| 962 // __ lw(a3, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset)); |
| 963 // __ sw(a3, MemOperand(sp, argc * kPointerSize)); |
| 964 //// + StandardFrameConstants::kRArgsSlotsSize)); |
| 965 // } |
| 966 // |
| 967 // // Setup the context (function already in r1). |
| 968 //// __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); |
| 969 // __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); |
| 970 // |
| 971 // // Jump to the cached code (tail call). |
| 972 // __ IncrementCounter(&Counters::call_global_inline, 1, a2, a3); |
| 973 // ASSERT(function->is_compiled()); |
| 974 // Handle<Code> code(function->code()); |
| 975 // ParameterCount expected(function->shared()->formal_parameter_count()); |
| 976 // __ InvokeCode(code, expected, arguments(), |
| 977 // RelocInfo::CODE_TARGET, JUMP_FUNCTION); |
| 978 // __ nop(); // NOP_ADDED |
| 979 // |
| 980 // // Handle call cache miss. |
| 981 // __ bind(&miss); |
| 982 // __ IncrementCounter(&Counters::call_global_inline_miss, 1, a1, a3); |
| 983 // Handle<Code> ic = ComputeCallMiss(arguments().immediate()); |
| 984 // __ Jump(ic, RelocInfo::CODE_TARGET); |
| 985 // __ nop(); // NOP_ADDED |
| 986 // |
| 987 // // Return the generated code. |
| 988 // return GetCode(NORMAL, name); |
| 989 return (Object*)NULL; // UNIMPLEMENTED RETURN |
| 990 } |
| 991 |
| 992 |
| 993 Object* StoreStubCompiler::CompileStoreField(JSObject* object, |
| 994 int index, |
| 995 Map* transition, |
| 996 String* name) { |
| 997 UNIMPLEMENTED_(); |
| 998 // // ----------- S t a t e ------------- |
| 999 // // -- a0 : value |
| 1000 // // -- a2 : name |
| 1001 // // -- ra : return address |
| 1002 // // -- [sp] : receiver |
| 1003 // // ----------------------------------- |
| 1004 // Label miss; |
| 1005 // |
| 1006 // // Get the receiver from the stack. |
| 1007 //// __ ldr(r3, MemOperand(sp, 0 * kPointerSize)); |
| 1008 // __ lw(a3, MemOperand(sp, 0 * kPointerSize)); |
| 1009 // |
| 1010 // // name register might be clobbered. |
| 1011 // GenerateStoreField(masm(), |
| 1012 // Builtins::StoreIC_ExtendStorage, |
| 1013 // object, |
| 1014 // index, |
| 1015 // transition, |
| 1016 // a3, a2, a1, |
| 1017 // &miss); |
| 1018 // __ bind(&miss); |
| 1019 //// __ mov(r2, Operand(Handle<String>(name))); // restore name |
| 1020 // __ li(a2, Operand(Handle<String>(name))); // restore name |
| 1021 // Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); |
| 1022 // __ Jump(ic, RelocInfo::CODE_TARGET); |
| 1023 // __ nop(); // NOP_ADDED |
| 1024 // |
| 1025 // // Return the generated code. |
| 1026 // return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name); |
| 1027 return (Object*)NULL; // UNIMPLEMENTED RETURN |
| 1028 } |
| 1029 |
| 1030 |
| 1031 Object* StoreStubCompiler::CompileStoreCallback(JSObject* object, |
| 1032 AccessorInfo* callback, |
| 1033 String* name) { |
| 1034 UNIMPLEMENTED(); |
| 1035 __ break_(0x906); |
| 1036 // // ----------- S t a t e ------------- |
| 1037 // // -- r0 : value |
| 1038 // // -- r2 : name |
| 1039 // // -- lr : return address |
| 1040 // // -- [sp] : receiver |
| 1041 // // ----------------------------------- |
| 1042 // Label miss; |
| 1043 // |
| 1044 // // Get the object from the stack. |
| 1045 // __ ldr(r3, MemOperand(sp, 0 * kPointerSize)); |
| 1046 // |
| 1047 // // Check that the object isn't a smi. |
| 1048 // __ tst(r3, Operand(kSmiTagMask)); |
| 1049 // __ b(eq, &miss); |
| 1050 // |
| 1051 // // Check that the map of the object hasn't changed. |
| 1052 // __ ldr(r1, FieldMemOperand(r3, HeapObject::kMapOffset)); |
| 1053 // __ cmp(r1, Operand(Handle<Map>(object->map()))); |
| 1054 // __ b(ne, &miss); |
| 1055 // |
| 1056 // // Perform global security token check if needed. |
| 1057 // if (object->IsJSGlobalProxy()) { |
| 1058 // __ CheckAccessGlobalProxy(r3, r1, &miss); |
| 1059 // } |
| 1060 // |
| 1061 // // Stub never generated for non-global objects that require access |
| 1062 // // checks. |
| 1063 // ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); |
| 1064 // |
| 1065 // __ ldr(ip, MemOperand(sp)); // receiver |
| 1066 // __ push(ip); |
| 1067 // __ mov(ip, Operand(Handle<AccessorInfo>(callback))); // callback info |
| 1068 // __ push(ip); |
| 1069 // __ push(r2); // name |
| 1070 // __ push(r0); // value |
| 1071 // |
| 1072 // // Do tail-call to the runtime system. |
| 1073 // ExternalReference store_callback_property = |
| 1074 // ExternalReference(IC_Utility(IC::kStoreCallbackProperty)); |
| 1075 // __ TailCallRuntime(store_callback_property, 4, 1); |
| 1076 // |
| 1077 // // Handle store cache miss. |
| 1078 // __ bind(&miss); |
| 1079 // __ mov(r2, Operand(Handle<String>(name))); // restore name |
| 1080 // Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); |
| 1081 // __ Jump(ic, RelocInfo::CODE_TARGET); |
| 1082 // |
| 1083 // // Return the generated code. |
| 1084 // return GetCode(CALLBACKS, name); |
| 1085 return (Object*)NULL; // UNIMPLEMENTED RETURN |
| 1086 } |
| 1087 |
| 1088 |
| 1089 Object* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver, |
| 1090 String* name) { |
| 1091 UNIMPLEMENTED(); |
| 1092 __ break_(0x962); |
| 1093 // // ----------- S t a t e ------------- |
| 1094 // // -- r0 : value |
| 1095 // // -- r2 : name |
| 1096 // // -- lr : return address |
| 1097 // // -- [sp] : receiver |
| 1098 // // ----------------------------------- |
| 1099 // Label miss; |
| 1100 // |
| 1101 // // Get the object from the stack. |
| 1102 // __ ldr(r3, MemOperand(sp, 0 * kPointerSize)); |
| 1103 // |
| 1104 // // Check that the object isn't a smi. |
| 1105 // __ tst(r3, Operand(kSmiTagMask)); |
| 1106 // __ b(eq, &miss); |
| 1107 // |
| 1108 // // Check that the map of the object hasn't changed. |
| 1109 // __ ldr(r1, FieldMemOperand(r3, HeapObject::kMapOffset)); |
| 1110 // __ cmp(r1, Operand(Handle<Map>(receiver->map()))); |
| 1111 // __ b(ne, &miss); |
| 1112 // |
| 1113 // // Perform global security token check if needed. |
| 1114 // if (receiver->IsJSGlobalProxy()) { |
| 1115 // __ CheckAccessGlobalProxy(r3, r1, &miss); |
| 1116 // } |
| 1117 // |
| 1118 // // Stub never generated for non-global objects that require access |
| 1119 // // checks. |
| 1120 // ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded()); |
| 1121 // |
| 1122 // __ ldr(ip, MemOperand(sp)); // receiver |
| 1123 // __ push(ip); |
| 1124 // __ push(r2); // name |
| 1125 // __ push(r0); // value |
| 1126 // |
| 1127 // // Do tail-call to the runtime system. |
| 1128 // ExternalReference store_ic_property = |
| 1129 // ExternalReference(IC_Utility(IC::kStoreInterceptorProperty)); |
| 1130 // __ TailCallRuntime(store_ic_property, 3, 1); |
| 1131 // |
| 1132 // // Handle store cache miss. |
| 1133 // __ bind(&miss); |
| 1134 // __ mov(r2, Operand(Handle<String>(name))); // restore name |
| 1135 // Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); |
| 1136 // __ Jump(ic, RelocInfo::CODE_TARGET); |
| 1137 // |
| 1138 // // Return the generated code. |
| 1139 // return GetCode(INTERCEPTOR, name); |
| 1140 return (Object*)NULL; // UNIMPLEMENTED RETURN |
| 1141 } |
| 1142 |
| 1143 |
| 1144 Object* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object, |
| 1145 JSGlobalPropertyCell* cell, |
| 1146 String* name) { |
| 1147 UNIMPLEMENTED_(); |
| 1148 // // ----------- S t a t e ------------- |
| 1149 // // -- a0 : value |
| 1150 // // -- a2 : name |
| 1151 // // -- ra : return address |
| 1152 // // -- [sp] : receiver |
| 1153 // // ----------------------------------- |
| 1154 // Label miss; |
| 1155 // |
| 1156 // // Check that the map of the global has not changed. |
| 1157 //// __ ldr(r1, MemOperand(sp, 0 * kPointerSize)); |
| 1158 //// __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset)); |
| 1159 //// __ cmp(r3, Operand(Handle<Map>(object->map()))); |
| 1160 //// __ b(ne, &miss); |
| 1161 // __ lw(a1, MemOperand(sp, 0 * kPointerSize)); |
| 1162 // __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset)); |
| 1163 // __ bcond(ne, &miss, a3, Operand(Handle<Map>(object->map()))); |
| 1164 // __ nop(); // NOP_ADDED |
| 1165 // |
| 1166 // // Store the value in the cell. |
| 1167 //// __ mov(r2, Operand(Handle<JSGlobalPropertyCell>(cell))); |
| 1168 //// __ str(r0, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset)); |
| 1169 // __ li(a2, Operand(Handle<JSGlobalPropertyCell>(cell))); |
| 1170 // __ sw(a0, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset)); |
| 1171 // |
| 1172 // __ IncrementCounter(&Counters::named_store_global_inline, 1, a1, a3); |
| 1173 // __ Ret(); |
| 1174 // __ nop(); // NOP_ADDED |
| 1175 // |
| 1176 // // Handle store cache miss. |
| 1177 // __ bind(&miss); |
| 1178 //// __ IncrementCounter(&Counters::named_store_global_inline_miss, 1, r1, r3); |
| 1179 //// Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); |
| 1180 //// __ Jump(ic, RelocInfo::CODE_TARGET); |
| 1181 // __ IncrementCounter(&Counters::named_store_global_inline_miss, 1, a1, a3); |
| 1182 // Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); |
| 1183 // __ Jump(ic, RelocInfo::CODE_TARGET); |
| 1184 // __ nop(); // NOP_ADDED |
| 1185 // |
| 1186 // // Return the generated code. |
| 1187 // return GetCode(NORMAL, name); |
| 1188 return (Object*)NULL; // UNIMPLEMENTED RETURN |
| 1189 } |
| 1190 |
| 1191 |
| 1192 Object* LoadStubCompiler::CompileLoadField(JSObject* object, |
| 1193 JSObject* holder, |
| 1194 int index, |
| 1195 String* name) { |
| 1196 UNIMPLEMENTED(); |
| 1197 __ break_(0x1063); |
| 1198 // // ----------- S t a t e ------------- |
| 1199 // // -- r2 : name |
| 1200 // // -- lr : return address |
| 1201 // // -- [sp] : receiver |
| 1202 // // ----------------------------------- |
| 1203 // Label miss; |
| 1204 // |
| 1205 // __ ldr(r0, MemOperand(sp, 0)); |
| 1206 // |
| 1207 // GenerateLoadField(object, holder, r0, r3, r1, index, name, &miss); |
| 1208 // __ bind(&miss); |
| 1209 // GenerateLoadMiss(masm(), Code::LOAD_IC); |
| 1210 // |
| 1211 // // Return the generated code. |
| 1212 // return GetCode(FIELD, name); |
| 1213 return (Object*)NULL; // UNIMPLEMENTED RETURN |
| 1214 } |
| 1215 |
| 1216 |
| 1217 Object* LoadStubCompiler::CompileLoadCallback(JSObject* object, |
| 1218 JSObject* holder, |
| 1219 AccessorInfo* callback, |
| 1220 String* name) { |
| 1221 UNIMPLEMENTED(); |
| 1222 __ break_(0x1087); |
| 1223 // // ----------- S t a t e ------------- |
| 1224 // // -- r2 : name |
| 1225 // // -- lr : return address |
| 1226 // // -- [sp] : receiver |
| 1227 // // ----------------------------------- |
| 1228 // Label miss; |
| 1229 // |
| 1230 // __ ldr(r0, MemOperand(sp, 0)); |
| 1231 // GenerateLoadCallback(object, holder, r0, r2, r3, r1, callback, name, &miss); |
| 1232 // __ bind(&miss); |
| 1233 // GenerateLoadMiss(masm(), Code::LOAD_IC); |
| 1234 // |
| 1235 // // Return the generated code. |
| 1236 // return GetCode(CALLBACKS, name); |
| 1237 return (Object*)NULL; // UNIMPLEMENTED RETURN |
| 1238 } |
| 1239 |
| 1240 |
| 1241 Object* LoadStubCompiler::CompileLoadConstant(JSObject* object, |
| 1242 JSObject* holder, |
| 1243 Object* value, |
| 1244 String* name) { |
| 1245 UNIMPLEMENTED(); |
| 1246 // // ----------- S t a t e ------------- |
| 1247 // // -- r2 : name |
| 1248 // // -- lr : return address |
| 1249 // // -- [sp] : receiver |
| 1250 // // ----------------------------------- |
| 1251 // Label miss; |
| 1252 // |
| 1253 //// __ ldr(r0, MemOperand(sp, 0)); |
| 1254 // __ lw(a0, MemOperand(sp, 0)); |
| 1255 // |
| 1256 //// GenerateLoadConstant(object, holder, r0, r3, r1, value, name, &miss); |
| 1257 // GenerateLoadConstant(object, holder, a0, a3, a1, value, name, &miss); |
| 1258 // __ bind(&miss); |
| 1259 // GenerateLoadMiss(masm(), Code::LOAD_IC); |
| 1260 // |
| 1261 // // Return the generated code. |
| 1262 // return GetCode(CONSTANT_FUNCTION, name); |
| 1263 return (Object*)NULL; // UNIMPLEMENTED RETURN |
| 1264 } |
| 1265 |
| 1266 |
| 1267 Object* LoadStubCompiler::CompileLoadInterceptor(JSObject* object, |
| 1268 JSObject* holder, |
| 1269 String* name) { |
| 1270 UNIMPLEMENTED(); |
| 1271 __ break_(0x1133); |
| 1272 // // ----------- S t a t e ------------- |
| 1273 // // -- r2 : name |
| 1274 // // -- lr : return address |
| 1275 // // -- [sp] : receiver |
| 1276 // // ----------------------------------- |
| 1277 // Label miss; |
| 1278 // |
| 1279 // __ ldr(r0, MemOperand(sp, 0)); |
| 1280 // |
| 1281 // LookupResult lookup; |
| 1282 // holder->LocalLookupRealNamedProperty(name, &lookup); |
| 1283 // GenerateLoadInterceptor(object, |
| 1284 // holder, |
| 1285 // &lookup, |
| 1286 // r0, |
| 1287 // r2, |
| 1288 // r3, |
| 1289 // r1, |
| 1290 // name, |
| 1291 // &miss); |
| 1292 // __ bind(&miss); |
| 1293 // GenerateLoadMiss(masm(), Code::LOAD_IC); |
| 1294 // |
| 1295 // // Return the generated code. |
| 1296 // return GetCode(INTERCEPTOR, name); |
| 1297 return (Object*)NULL; // UNIMPLEMENTED RETURN |
| 1298 } |
| 1299 |
| 1300 |
| 1301 Object* LoadStubCompiler::CompileLoadGlobal(JSObject* object, |
| 1302 GlobalObject* holder, |
| 1303 JSGlobalPropertyCell* cell, |
| 1304 String* name, |
| 1305 bool is_dont_delete) { |
| 1306 UNIMPLEMENTED_(); |
| 1307 // // ----------- S t a t e ------------- |
| 1308 // // -- a2 : name |
| 1309 // // -- ra : return address |
| 1310 // // -- [sp] : receiver |
| 1311 // // ----------------------------------- |
| 1312 // Label miss; |
| 1313 // |
| 1314 // // Get the receiver from the stack. |
| 1315 //// __ ldr(r1, MemOperand(sp, 0 * kPointerSize)); |
| 1316 // __ lw(a1, MemOperand(sp, 0 * kPointerSize)); |
| 1317 // |
| 1318 // // If the object is the holder then we know that it's a global |
| 1319 // // object which can only happen for contextual calls. In this case, |
| 1320 // // the receiver cannot be a smi. |
| 1321 // if (object != holder) { |
| 1322 //// __ tst(r1, Operand(kSmiTagMask)); |
| 1323 //// __ b(eq, &miss); |
| 1324 // __ andi(t0, a1, Operand(kSmiTagMask)); |
| 1325 // __ bcond(eq, &miss, t0, Operand(zero_reg)); |
| 1326 // __ nop(); |
| 1327 // } |
| 1328 // |
| 1329 // // Check that the map of the global has not changed. |
| 1330 // CheckPrototypes(object, a1, holder, a3, a0, name, &miss); |
| 1331 // |
| 1332 // // Get the value from the cell. |
| 1333 //// __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell))); |
| 1334 //// __ ldr(r0, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset)); |
| 1335 // __ li(a3, Operand(Handle<JSGlobalPropertyCell>(cell))); |
| 1336 // __ lw(v0, FieldMemOperand(a3, JSGlobalPropertyCell::kValueOffset)); |
| 1337 // |
| 1338 // // Check for deleted property if property can actually be deleted. |
| 1339 // if (!is_dont_delete) { |
| 1340 //// __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
| 1341 //// __ cmp(r0, ip); |
| 1342 //// __ b(eq, &miss); |
| 1343 // __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
| 1344 // __ bcond(eq, &miss, v0, Operand(ip)); |
| 1345 // __ nop(); // NOP_ADDED |
| 1346 // } |
| 1347 // |
| 1348 //// __ IncrementCounter(&Counters::named_load_global_inline, 1, r1, r3); |
| 1349 // __ IncrementCounter(&Counters::named_load_global_inline, 1, a1, a3); |
| 1350 // __ Ret(); |
| 1351 // __ nop(); // NOP_ADDED |
| 1352 // |
| 1353 // __ bind(&miss); |
| 1354 //// __ IncrementCounter(&Counters::named_load_global_inline_miss, 1, r1, r3); |
| 1355 // __ IncrementCounter(&Counters::named_load_global_inline_miss, 1, a1, a3); |
| 1356 // GenerateLoadMiss(masm(), Code::LOAD_IC); |
| 1357 // |
| 1358 // // Return the generated code. |
| 1359 // return GetCode(NORMAL, name); |
| 1360 return (Object*)NULL; // UNIMPLEMENTED RETURN |
| 1361 } |
| 1362 |
| 1363 |
| 1364 Object* KeyedLoadStubCompiler::CompileLoadField(String* name, |
| 1365 JSObject* receiver, |
| 1366 JSObject* holder, |
| 1367 int index) { |
| 1368 UNIMPLEMENTED_(); |
| 1369 // // ----------- S t a t e ------------- |
| 1370 // // -- ra : return address |
| 1371 // // -- sp[0] : key |
| 1372 // // -- sp[4] : receiver |
| 1373 // // ----------------------------------- |
| 1374 // Label miss; |
| 1375 // |
| 1376 //// __ ldr(r2, MemOperand(sp, 0)); |
| 1377 //// __ ldr(r0, MemOperand(sp, kPointerSize)); |
| 1378 // __ lw(a2, MemOperand(sp, 0)); |
| 1379 // __ lw(a0, MemOperand(sp, kPointerSize)); |
| 1380 // |
| 1381 //// __ cmp(r2, Operand(Handle<String>(name))); |
| 1382 //// __ b(ne, &miss); |
| 1383 // __ bcond(ne, &miss, a2, Operand(Handle<String>(name))); |
| 1384 // __ nop(); // NOP_ADDED |
| 1385 // |
| 1386 //// GenerateLoadField(receiver, holder, r0, r3, r1, index, name, &miss); |
| 1387 // GenerateLoadField(receiver, holder, a0, a3, a1, index, name, &miss); |
| 1388 // __ bind(&miss); |
| 1389 // GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); |
| 1390 // |
| 1391 // return GetCode(FIELD, name); |
| 1392 return (Object*)NULL; // UNIMPLEMENTED RETURN |
| 1393 } |
| 1394 |
| 1395 |
| 1396 Object* KeyedLoadStubCompiler::CompileLoadCallback(String* name, |
| 1397 JSObject* receiver, |
| 1398 JSObject* holder, |
| 1399 AccessorInfo* callback) { |
| 1400 UNIMPLEMENTED_(); |
| 1401 // // ----------- S t a t e ------------- |
| 1402 // // -- ra : return address |
| 1403 // // -- sp[0] : key |
| 1404 // // -- sp[4] : receiver |
| 1405 // // ----------------------------------- |
| 1406 // Label miss; |
| 1407 // |
| 1408 //// __ ldr(r2, MemOperand(sp, 0)); |
| 1409 //// __ ldr(r0, MemOperand(sp, kPointerSize)); |
| 1410 // __ lw(a2, MemOperand(sp, 0)); |
| 1411 // __ lw(a0, MemOperand(sp, kPointerSize)); |
| 1412 // |
| 1413 //// __ cmp(r2, Operand(Handle<String>(name))); |
| 1414 //// __ b(ne, &miss); |
| 1415 // __ bcond(ne, &miss, a2, Operand(Handle<String>(name))); |
| 1416 // __ nop(); // NOP_ADDED |
| 1417 // |
| 1418 //// GenerateLoadCallback(receiver, holder, r0, r2, r3, r1, callback, name, &mi
ss); |
| 1419 // GenerateLoadCallback(receiver, holder, a0, a2, a3, a1, callback, name, &miss
); |
| 1420 // __ bind(&miss); |
| 1421 // GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); |
| 1422 // |
| 1423 // return GetCode(CALLBACKS, name); |
| 1424 return (Object*)NULL; // UNIMPLEMENTED RETURN |
| 1425 } |
| 1426 |
| 1427 |
| 1428 Object* KeyedLoadStubCompiler::CompileLoadConstant(String* name, |
| 1429 JSObject* receiver, |
| 1430 JSObject* holder, |
| 1431 Object* value) { |
| 1432 UNIMPLEMENTED_(); |
| 1433 // // ----------- S t a t e ------------- |
| 1434 // // -- ra : return address |
| 1435 // // -- sp[0] : key |
| 1436 // // -- sp[4] : receiver |
| 1437 // // ----------------------------------- |
| 1438 // Label miss; |
| 1439 // |
| 1440 // // Check the key is the cached one |
| 1441 //// __ ldr(r2, MemOperand(sp, 0)); |
| 1442 //// __ ldr(r0, MemOperand(sp, kPointerSize)); |
| 1443 // __ lw(a2, MemOperand(sp, 0)); |
| 1444 // __ lw(a0, MemOperand(sp, kPointerSize)); |
| 1445 // |
| 1446 //// __ cmp(r2, Operand(Handle<String>(name))); |
| 1447 //// __ b(ne, &miss); |
| 1448 // __ bcond(ne, &miss, a2, Operand(Handle<String>(name))); |
| 1449 // __ nop(); // NOP_ADDED |
| 1450 // |
| 1451 //// GenerateLoadConstant(receiver, holder, r0, r3, r1, value, name, &miss); |
| 1452 // GenerateLoadConstant(receiver, holder, a0, a3, a1, value, name, &miss); |
| 1453 // __ bind(&miss); |
| 1454 // GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); |
| 1455 // |
| 1456 // // Return the generated code. |
| 1457 // return GetCode(CONSTANT_FUNCTION, name); |
| 1458 return (Object*)NULL; // UNIMPLEMENTED RETURN |
| 1459 } |
| 1460 |
| 1461 |
| 1462 Object* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, |
| 1463 JSObject* holder, |
| 1464 String* name) { |
| 1465 UNIMPLEMENTED_(); |
| 1466 // // ----------- S t a t e ------------- |
| 1467 // // -- ra : return address |
| 1468 // // -- sp[0] : key |
| 1469 // // -- sp[4] : receiver |
| 1470 // // ----------------------------------- |
| 1471 // Label miss; |
| 1472 // |
| 1473 // // Check the key is the cached one |
| 1474 //// __ ldr(r2, MemOperand(sp, 0)); |
| 1475 //// __ ldr(r0, MemOperand(sp, kPointerSize)); |
| 1476 // __ lw(a2, MemOperand(sp, 0)); |
| 1477 // __ lw(a0, MemOperand(sp, kPointerSize)); |
| 1478 // |
| 1479 //// __ cmp(r2, Operand(Handle<String>(name))); |
| 1480 //// __ b(ne, &miss); |
| 1481 // __ bcond(ne, &miss, a2, Operand(Handle<String>(name))); |
| 1482 // __ nop(); // NOP_ADDED |
| 1483 // |
| 1484 // LookupResult lookup; |
| 1485 // holder->LocalLookupRealNamedProperty(name, &lookup); |
| 1486 // GenerateLoadInterceptor(receiver, |
| 1487 // holder, |
| 1488 // &lookup, |
| 1489 // a0, |
| 1490 // a2, |
| 1491 // a3, |
| 1492 // a1, |
| 1493 // name, |
| 1494 // &miss); |
| 1495 // __ bind(&miss); |
| 1496 // GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); |
| 1497 // |
| 1498 // return GetCode(INTERCEPTOR, name); |
| 1499 return (Object*)NULL; // UNIMPLEMENTED RETURN |
| 1500 } |
| 1501 |
| 1502 |
| 1503 Object* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) { |
| 1504 UNIMPLEMENTED_(); |
| 1505 // // ----------- S t a t e ------------- |
| 1506 // // -- ra : return address |
| 1507 // // -- sp[0] : key |
| 1508 // // -- sp[4] : receiver |
| 1509 // // ----------------------------------- |
| 1510 // Label miss; |
| 1511 // |
| 1512 // // Check the key is the cached one |
| 1513 //// __ ldr(r2, MemOperand(sp, 0)); |
| 1514 //// __ ldr(r0, MemOperand(sp, kPointerSize)); |
| 1515 // __ lw(a2, MemOperand(sp, 0)); |
| 1516 // __ lw(a0, MemOperand(sp, kPointerSize)); |
| 1517 // |
| 1518 //// __ cmp(r2, Operand(Handle<String>(name))); |
| 1519 //// __ b(ne, &miss); |
| 1520 // __ bcond(ne, &miss, a2, Operand(Handle<String>(name))); |
| 1521 // __ nop(); // NOP_ADDED |
| 1522 // |
| 1523 // GenerateLoadArrayLength(masm(), a0, a3, &miss); |
| 1524 // __ bind(&miss); |
| 1525 // GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); |
| 1526 // |
| 1527 // return GetCode(CALLBACKS, name); |
| 1528 return (Object*)NULL; // UNIMPLEMENTED RETURN |
| 1529 } |
| 1530 |
| 1531 |
| 1532 Object* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) { |
| 1533 UNIMPLEMENTED(); |
| 1534 __ break_(0x1370); |
| 1535 // // ----------- S t a t e ------------- |
| 1536 // // -- lr : return address |
| 1537 // // -- sp[0] : key |
| 1538 // // -- sp[4] : receiver |
| 1539 // // ----------------------------------- |
| 1540 // Label miss; |
| 1541 // __ IncrementCounter(&Counters::keyed_load_string_length, 1, r1, r3); |
| 1542 // |
| 1543 // __ ldr(r2, MemOperand(sp)); |
| 1544 // __ ldr(r0, MemOperand(sp, kPointerSize)); // receiver |
| 1545 // |
| 1546 // __ cmp(r2, Operand(Handle<String>(name))); |
| 1547 // __ b(ne, &miss); |
| 1548 // |
| 1549 // GenerateLoadStringLength2(masm(), r0, r1, r3, &miss); |
| 1550 // __ bind(&miss); |
| 1551 // __ DecrementCounter(&Counters::keyed_load_string_length, 1, r1, r3); |
| 1552 // |
| 1553 // GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); |
| 1554 // |
| 1555 // return GetCode(CALLBACKS, name); |
| 1556 return (Object*)NULL; // UNIMPLEMENTED RETURN |
| 1557 } |
| 1558 |
| 1559 |
| 1560 // TODO(1224671): implement the fast case. |
| 1561 Object* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) { |
| 1562 UNIMPLEMENTED(); |
| 1563 __ break_(0x1398); |
| 1564 // // ----------- S t a t e ------------- |
| 1565 // // -- lr : return address |
| 1566 // // -- sp[0] : key |
| 1567 // // -- sp[4] : receiver |
| 1568 // // ----------------------------------- |
| 1569 // GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); |
| 1570 // |
| 1571 // return GetCode(CALLBACKS, name); |
| 1572 return (Object*)NULL; // UNIMPLEMENTED RETURN |
| 1573 } |
| 1574 |
| 1575 |
| 1576 Object* KeyedStoreStubCompiler::CompileStoreField(JSObject* object, |
| 1577 int index, |
| 1578 Map* transition, |
| 1579 String* name) { |
| 1580 UNIMPLEMENTED(); |
| 1581 __ break_(0x1415); |
| 1582 // // ----------- S t a t e ------------- |
| 1583 // // -- a0 : value |
| 1584 // // -- a2 : name |
| 1585 // // -- ra : return address |
| 1586 // // -- [sp] : receiver |
| 1587 // // ----------------------------------- |
| 1588 // Label miss; |
| 1589 // |
| 1590 // __ IncrementCounter(&Counters::keyed_store_field, 1, a1, a3); |
| 1591 // |
| 1592 // // Check that the name has not changed. |
| 1593 //// __ cmp(r2, Operand(Handle<String>(name))); |
| 1594 //// __ b(ne, &miss); |
| 1595 // __ bcond(ne, &miss, a2, Operand(Handle<String>(name))); |
| 1596 // __ nop(); // NOP_ADDED |
| 1597 // |
| 1598 // // Load receiver from the stack. |
| 1599 //// __ ldr(r3, MemOperand(sp)); |
| 1600 // __ lw(a3, MemOperand(sp)); |
| 1601 // // a1 is used as scratch register, a3 and a2 might be clobbered. |
| 1602 // GenerateStoreField(masm(), |
| 1603 // Builtins::StoreIC_ExtendStorage, |
| 1604 // object, |
| 1605 // index, |
| 1606 // transition, |
| 1607 // a3, a2, a1, |
| 1608 // &miss); |
| 1609 // __ bind(&miss); |
| 1610 // |
| 1611 // __ DecrementCounter(&Counters::keyed_store_field, 1, a1, a3); |
| 1612 //// __ mov(r2, Operand(Handle<String>(name))); // restore name register. |
| 1613 // __ li(a2, Operand(Handle<String>(name))); // restore name register. |
| 1614 // Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss)); |
| 1615 // __ Jump(ic, RelocInfo::CODE_TARGET); |
| 1616 // __ nop(); // NOP_ADDED |
| 1617 // |
| 1618 // // Return the generated code. |
| 1619 // return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name); |
| 1620 return (Object*)NULL; // UNIMPLEMENTED RETURN |
| 1621 } |
| 1622 |
| 1623 |
| 1624 Object* ConstructStubCompiler::CompileConstructStub( |
| 1625 SharedFunctionInfo* shared) { |
| 1626 UNIMPLEMENTED(); |
| 1627 //#ifdef DEBUG |
| 1628 //// printf("ConstructStubCompiler::CompileConstructStub\n"); |
| 1629 //#endif |
| 1630 // // ----------- S t a t e ------------- |
| 1631 // // -- a0 : argc |
| 1632 // // -- a1 : constructor |
| 1633 // // -- ra : return address |
| 1634 // // -- [sp] : last argument |
| 1635 // // ----------------------------------- |
| 1636 // Label generic_stub_call; |
| 1637 // |
| 1638 // // Use t7 for holding undefined which is used in several places below. |
| 1639 // __ LoadRoot(t7, Heap::kUndefinedValueRootIndex); |
| 1640 // |
| 1641 //#ifdef ENABLE_DEBUGGER_SUPPORT |
| 1642 // // Check to see whether there are any break points in the function code. If |
| 1643 // // there are jump to the generic constructor stub which calls the actual |
| 1644 // // code for the function thereby hitting the break points. |
| 1645 //// __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); |
| 1646 //// __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kDebugInfoOffset)); |
| 1647 //// __ cmp(r2, r7); |
| 1648 //// __ b(ne, &generic_stub_call); |
| 1649 // __ lw(t5, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
| 1650 // __ lw(a2, FieldMemOperand(t5, SharedFunctionInfo::kDebugInfoOffset)); |
| 1651 // __ bcond(ne, &generic_stub_call, a2, Operand(t7)); |
| 1652 // __ nop(); // NOP_ADDED |
| 1653 //#endif |
| 1654 // |
| 1655 // // Load the initial map and verify that it is in fact a map. |
| 1656 // // a1: constructor function |
| 1657 // // t7: undefined |
| 1658 //// __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); |
| 1659 //// __ tst(r2, Operand(kSmiTagMask)); |
| 1660 //// __ b(eq, &generic_stub_call); |
| 1661 //// __ CompareObjectType(r2, r3, r4, MAP_TYPE); |
| 1662 //// __ b(ne, &generic_stub_call); |
| 1663 // __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); |
| 1664 // __ andi(t0, a2, Operand(kSmiTagMask)); |
| 1665 // __ bcond(eq, &generic_stub_call, t0, Operand(zero_reg)); |
| 1666 // __ nop(); // NOP_ADDED |
| 1667 // __ GetObjectType(a2, a3, t0); |
| 1668 // __ bcond(ne, &generic_stub_call, t0, Operand(MAP_TYPE)); |
| 1669 // __ nop(); // NOP_ADDED |
| 1670 // |
| 1671 //#ifdef DEBUG |
| 1672 // // Cannot construct functions this way. |
| 1673 // // a0: argc |
| 1674 // // a1: constructor function |
| 1675 // // a2: initial map |
| 1676 // // t7: undefined |
| 1677 //// __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE); |
| 1678 //// __ Check(ne, "Function constructed by construct stub."); |
| 1679 // __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset)); |
| 1680 // __ Check(ne, "Function constructed by construct stub.", a3, Operand(JS_FUNCT
ION_TYPE)); |
| 1681 //#endif |
| 1682 // |
| 1683 // // Now allocate the JSObject in new space. |
| 1684 // // a0: argc |
| 1685 // // a1: constructor function |
| 1686 // // a2: initial map |
| 1687 // // t7: undefined |
| 1688 //// __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset)); |
| 1689 // __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset)); |
| 1690 // __ AllocateInNewSpace(a3, |
| 1691 // t4, |
| 1692 // t5, |
| 1693 // t6, |
| 1694 // &generic_stub_call, |
| 1695 // NO_ALLOCATION_FLAGS); |
| 1696 // |
| 1697 // // Allocated the JSObject, now initialize the fields. Map is set to initial |
| 1698 // // map and properties and elements are set to empty fixed array. |
| 1699 // // a0: argc |
| 1700 // // a1: constructor function |
| 1701 // // a2: initial map |
| 1702 // // a3: object size (in words) |
| 1703 // // t4: JSObject (not tagged) |
| 1704 // // r7: undefined |
| 1705 //// __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex); |
| 1706 //// __ mov(r5, r4); |
| 1707 //// ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset); |
| 1708 //// __ str(r2, MemOperand(r5, kPointerSize, PostIndex)); |
| 1709 //// ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset); |
| 1710 //// __ str(r6, MemOperand(r5, kPointerSize, PostIndex)); |
| 1711 //// ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset); |
| 1712 //// __ str(r6, MemOperand(r5, kPointerSize, PostIndex)); |
| 1713 // __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex); |
| 1714 // __ mov(t5, t4); |
| 1715 // __ sw(a2, MemOperand(t5, JSObject::kMapOffset)); |
| 1716 // __ sw(t6, MemOperand(t5, JSObject::kPropertiesOffset)); |
| 1717 // __ sw(t6, MemOperand(t5, JSObject::kElementsOffset)); |
| 1718 // ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset); |
| 1719 // ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset); |
| 1720 // ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset); |
| 1721 // |
| 1722 // __ addiu(t5, t5, Operand(3 * kPointerSize)); |
| 1723 // |
| 1724 // // Calculate the location of the first argument. The stack contains only the |
| 1725 // // argc arguments. |
| 1726 //// __ add(r1, sp, Operand(r0, LSL, kPointerSizeLog2)); |
| 1727 // __ sll(a1, a0, kPointerSizeLog2); |
| 1728 // __ addu(a1, a1, Operand(sp)); |
| 1729 // |
| 1730 // // We need to add the 4 args slots size because they need to be setup when w
e |
| 1731 // // call the real time function the first time this kind of object is |
| 1732 // // initialized (cf Builtins::Generate_JSConstructStubGeneric). |
| 1733 // // TOCHECK: This need is maybe just because I first implemented it with args |
| 1734 // // slots. Try to do it without: we should not need this as the real time |
| 1735 // // function called has the stack setup just before it is called. |
| 1736 // __ addiu(a1, a1, StandardFrameConstants::kRArgsSlotsSize); |
| 1737 // |
| 1738 // // Fill all the in-object properties with undefined. |
| 1739 // // a0: argc |
| 1740 // // a1: first argument |
| 1741 // // a3: object size (in words) |
| 1742 // // t4: JSObject (not tagged) |
| 1743 // // t5: First in-object property of JSObject (not tagged) |
| 1744 // // t7: undefined |
| 1745 // // Fill the initialized properties with a constant value or a passed argumen
t |
| 1746 // // depending on the this.x = ...; assignment in the function. |
| 1747 // for (int i = 0; i < shared->this_property_assignments_count(); i++) { |
| 1748 // if (shared->IsThisPropertyAssignmentArgument(i)) { |
| 1749 // Label not_passed, next; |
| 1750 // // Check if the argument assigned to the property is actually passed. |
| 1751 // int arg_number = shared->GetThisPropertyAssignmentArgument(i); |
| 1752 //// __ cmp(r0, Operand(arg_number)); |
| 1753 //// __ b(le, ¬_passed); |
| 1754 // __ bcond(less_equal, ¬_passed, a0, Operand(arg_number)); |
| 1755 // __ nop(); // NOP_ADDED |
| 1756 // // Argument passed - find it on the stack. |
| 1757 //// __ ldr(r2, MemOperand(r1, (arg_number + 1) * -kPointerSize)); |
| 1758 //// __ str(r2, MemOperand(r5, kPointerSize, PostIndex)); |
| 1759 //// __ b(&next); |
| 1760 //// __ bind(¬_passed); |
| 1761 // __ lw(a2, MemOperand(a1, (arg_number + 1) * -kPointerSize)); |
| 1762 // __ sw(a2, MemOperand(t5)); |
| 1763 // __ addiu(t5, t5, Operand(kPointerSize)); |
| 1764 // __ b(&next); |
| 1765 // __ nop(); // NOP_ADDED |
| 1766 // __ bind(¬_passed); |
| 1767 // // Set the property to undefined. |
| 1768 //// __ str(r7, MemOperand(r5, kPointerSize, PostIndex)); |
| 1769 // __ sw(t7, MemOperand(t5)); |
| 1770 // __ addiu(t5, t5, Operand(kPointerSize)); |
| 1771 // __ bind(&next); |
| 1772 // } else { |
| 1773 // // Set the property to the constant value. |
| 1774 // Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i)); |
| 1775 //// __ mov(r2, Operand(constant)); |
| 1776 //// __ str(r2, MemOperand(r5, kPointerSize, PostIndex)); |
| 1777 // __ li(a2, Operand(constant)); |
| 1778 // __ sw(a2, MemOperand(t5)); |
| 1779 // __ addiu(t5, t5, Operand(kPointerSize)); |
| 1780 // } |
| 1781 // } |
| 1782 // |
| 1783 // // Fill the unused in-object property fields with undefined. |
| 1784 // for (int i = shared->this_property_assignments_count(); |
| 1785 // i < shared->CalculateInObjectProperties(); |
| 1786 // i++) { |
| 1787 //// __ str(r7, MemOperand(r5, kPointerSize, PostIndex)); |
| 1788 // __ sw(t7, MemOperand(t5)); |
| 1789 // __ addiu(t5, t5, Operand(kPointerSize)); |
| 1790 // } |
| 1791 // |
| 1792 // // a0: argc |
| 1793 // // t4: JSObject (not tagged) |
| 1794 // // Move argc to a1 and the JSObject to return to v0 and tag it. |
| 1795 //// __ mov(r1, r0); |
| 1796 //// __ mov(r0, r4); |
| 1797 //// __ orr(r0, r0, Operand(kHeapObjectTag)); |
| 1798 // __ mov(a1, a0); |
| 1799 // __ mov(v0, t4); |
| 1800 // __ or_(v0, v0, Operand(kHeapObjectTag)); |
| 1801 // |
| 1802 // // v0: JSObject |
| 1803 // // a1: argc |
| 1804 // // Remove caller arguments and receiver from the stack and return. |
| 1805 //// __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2)); |
| 1806 //// __ add(sp, sp, Operand(kPointerSize)); |
| 1807 //// __ IncrementCounter(&Counters::constructed_objects, 1, r1, r2); |
| 1808 //// __ IncrementCounter(&Counters::constructed_objects_stub, 1, r1, r2); |
| 1809 //// __ Jump(lr); |
| 1810 // __ sll(t0, a1, kPointerSizeLog2); |
| 1811 // __ add(sp, sp, Operand(t0)); |
| 1812 // __ addi(sp, sp, Operand(kPointerSize)); |
| 1813 // __ IncrementCounter(&Counters::constructed_objects, 1, a1, a2); |
| 1814 // __ IncrementCounter(&Counters::constructed_objects_stub, 1, a1, a2); |
| 1815 // __ Jump(ra); |
| 1816 // __ nop(); // NOP_ADDED |
| 1817 // |
| 1818 // // Jump to the generic stub in case the specialized code cannot handle the |
| 1819 // // construction. |
| 1820 // __ bind(&generic_stub_call); |
| 1821 // Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric); |
| 1822 // Handle<Code> generic_construct_stub(code); |
| 1823 // __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET); |
| 1824 // __ nop(); // NOP_ADDED |
| 1825 // |
| 1826 // // Return the generated code. |
| 1827 // return GetCode(); |
| 1828 return (Object*)NULL; // UNIMPLEMENTED RETURN |
| 1829 } |
| 1830 |
| 1831 |
| 1832 #undef __ |
| 1833 |
| 1834 } } // namespace v8::internal |
| OLD | NEW |