| OLD | NEW |
| (Empty) | |
| 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are |
| 4 // met: |
| 5 // |
| 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided |
| 11 // with the distribution. |
| 12 // * Neither the name of Google Inc. nor the names of its |
| 13 // contributors may be used to endorse or promote products derived |
| 14 // from this software without specific prior written permission. |
| 15 // |
| 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 |
| 28 |
| 29 |
| 30 #include "v8.h" |
| 31 |
| 32 #include "codegen-inl.h" |
| 33 #include "ic-inl.h" |
| 34 #include "runtime.h" |
| 35 #include "stub-cache.h" |
| 36 |
| 37 namespace v8 { |
| 38 namespace internal { |
| 39 |
| 40 |
| 41 // ---------------------------------------------------------------------------- |
| 42 // Static IC stub generators. |
| 43 // |
| 44 |
| 45 #define __ ACCESS_MASM(masm) |
| 46 |
| 47 |
| 48 // Helper function used from LoadIC/CallIC GenerateNormal. |
| 49 //static void GenerateDictionaryLoad(MacroAssembler* masm, |
| 50 // Label* miss, |
| 51 // Register t0, |
| 52 // Register t1) { |
| 53 // // Register use: |
| 54 // // |
| 55 // // t0 - used to hold the property dictionary. |
| 56 // // |
| 57 // // t1 - initially the receiver |
| 58 // // - used for the index into the property dictionary |
| 59 // // - holds the result on exit. |
| 60 // // |
| 61 // // r3 - used as temporary and to hold the capacity of the property |
| 62 // // dictionary. |
| 63 // // |
| 64 // // r2 - holds the name of the property and is unchanges. |
| 65 // |
| 66 // Label done; |
| 67 // |
| 68 // // Check for the absence of an interceptor. |
| 69 // // Load the map into t0. |
| 70 // __ ldr(t0, FieldMemOperand(t1, JSObject::kMapOffset)); |
| 71 // // Test the has_named_interceptor bit in the map. |
| 72 // __ ldr(r3, FieldMemOperand(t0, Map::kInstanceAttributesOffset)); |
| 73 // __ tst(r3, Operand(1 << (Map::kHasNamedInterceptor + (3 * 8)))); |
| 74 // // Jump to miss if the interceptor bit is set. |
| 75 // __ b(ne, miss); |
| 76 // |
| 77 // // Bail out if we have a JS global proxy object. |
| 78 // __ ldrb(r3, FieldMemOperand(t0, Map::kInstanceTypeOffset)); |
| 79 // __ cmp(r3, Operand(JS_GLOBAL_PROXY_TYPE)); |
| 80 // __ b(eq, miss); |
| 81 // |
| 82 // // Possible work-around for http://crbug.com/16276. |
| 83 // // See also: http://codereview.chromium.org/155418. |
| 84 // __ cmp(r3, Operand(JS_GLOBAL_OBJECT_TYPE)); |
| 85 // __ b(eq, miss); |
| 86 // __ cmp(r3, Operand(JS_BUILTINS_OBJECT_TYPE)); |
| 87 // __ b(eq, miss); |
| 88 // |
| 89 // // Check that the properties array is a dictionary. |
| 90 // __ ldr(t0, FieldMemOperand(t1, JSObject::kPropertiesOffset)); |
| 91 // __ ldr(r3, FieldMemOperand(t0, HeapObject::kMapOffset)); |
| 92 // __ LoadRoot(ip, Heap::kHashTableMapRootIndex); |
| 93 // __ cmp(r3, ip); |
| 94 // __ b(ne, miss); |
| 95 // |
| 96 // // Compute the capacity mask. |
| 97 // const int kCapacityOffset = StringDictionary::kHeaderSize + |
| 98 // StringDictionary::kCapacityIndex * kPointerSize; |
| 99 // __ ldr(r3, FieldMemOperand(t0, kCapacityOffset)); |
| 100 // __ mov(r3, Operand(r3, ASR, kSmiTagSize)); // convert smi to int |
| 101 // __ sub(r3, r3, Operand(1)); |
| 102 // |
| 103 // const int kElementsStartOffset = StringDictionary::kHeaderSize + |
| 104 // StringDictionary::kElementsStartIndex * kPointerSize; |
| 105 // |
| 106 // // Generate an unrolled loop that performs a few probes before |
| 107 // // giving up. Measurements done on Gmail indicate that 2 probes |
| 108 // // cover ~93% of loads from dictionaries. |
| 109 // static const int kProbes = 4; |
| 110 // for (int i = 0; i < kProbes; i++) { |
| 111 // // Compute the masked index: (hash + i + i * i) & mask. |
| 112 // __ ldr(t1, FieldMemOperand(r2, String::kLengthOffset)); |
| 113 // __ mov(t1, Operand(t1, LSR, String::kHashShift)); |
| 114 // if (i > 0) { |
| 115 // __ add(t1, t1, Operand(StringDictionary::GetProbeOffset(i))); |
| 116 // } |
| 117 // __ and_(t1, t1, Operand(r3)); |
| 118 // |
| 119 // // Scale the index by multiplying by the element size. |
| 120 // ASSERT(StringDictionary::kEntrySize == 3); |
| 121 // __ add(t1, t1, Operand(t1, LSL, 1)); // t1 = t1 * 3 |
| 122 // |
| 123 // // Check if the key is identical to the name. |
| 124 // __ add(t1, t0, Operand(t1, LSL, 2)); |
| 125 // __ ldr(ip, FieldMemOperand(t1, kElementsStartOffset)); |
| 126 // __ cmp(r2, Operand(ip)); |
| 127 // if (i != kProbes - 1) { |
| 128 // __ b(eq, &done); |
| 129 // } else { |
| 130 // __ b(ne, miss); |
| 131 // } |
| 132 // } |
| 133 // |
| 134 // // Check that the value is a normal property. |
| 135 // __ bind(&done); // t1 == t0 + 4*index |
| 136 // __ ldr(r3, FieldMemOperand(t1, kElementsStartOffset + 2 * kPointerSize)); |
| 137 // __ tst(r3, Operand(PropertyDetails::TypeField::mask() << kSmiTagSize)); |
| 138 // __ b(ne, miss); |
| 139 // |
| 140 // // Get the value at the masked, scaled index and return. |
| 141 // __ ldr(t1, FieldMemOperand(t1, kElementsStartOffset + 1 * kPointerSize)); |
| 142 //} |
| 143 |
| 144 |
| 145 // Helper function used to check that a value is either not an object |
| 146 // or is loaded if it is an object. |
| 147 //static void GenerateCheckNonObjectOrLoaded(MacroAssembler* masm, |
| 148 // Label* miss, |
| 149 // Register value, |
| 150 // Register scratch) { |
| 151 // Label done; |
| 152 // // Check if the value is a Smi. |
| 153 // __ tst(value, Operand(kSmiTagMask)); |
| 154 // __ b(eq, &done); |
| 155 // // Check if the object has been loaded. |
| 156 // __ ldr(scratch, FieldMemOperand(value, JSObject::kMapOffset)); |
| 157 // __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitField2Offset)); |
| 158 // __ tst(scratch, Operand(1 << Map::kNeedsLoading)); |
| 159 // __ b(ne, miss); |
| 160 // __ bind(&done); |
| 161 //} |
| 162 |
| 163 |
| 164 void LoadIC::GenerateArrayLength(MacroAssembler* masm) { |
| 165 UNIMPLEMENTED_(); |
| 166 // // ----------- S t a t e ------------- |
| 167 // // -- a2 : name |
| 168 // // -- ra : return address |
| 169 // // -- [sp] : receiver |
| 170 // // ----------------------------------- |
| 171 // |
| 172 // Label miss; |
| 173 // |
| 174 //// __ ldr(r0, MemOperand(sp, 0)); |
| 175 // __ lw(a0, MemOperand(sp, 0)); |
| 176 // |
| 177 // StubCompiler::GenerateLoadArrayLength(masm, a0, a3, &miss); |
| 178 // __ bind(&miss); |
| 179 // StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC); |
| 180 } |
| 181 |
| 182 |
| 183 void LoadIC::GenerateStringLength(MacroAssembler* masm) { |
| 184 UNIMPLEMENTED_(); |
| 185 // __ break_(0x00186); // UNIMPLEMENTED |
| 186 // // ----------- S t a t e ------------- |
| 187 // // -- r2 : name |
| 188 // // -- lr : return address |
| 189 // // -- [sp] : receiver |
| 190 // // ----------------------------------- |
| 191 // Label miss; |
| 192 // |
| 193 //// __ ldr(r0, MemOperand(sp, 0)); |
| 194 // __ lw(a0, MemOperand(sp, 0)); |
| 195 // |
| 196 // StubCompiler::GenerateLoadStringLength2(masm, a0, a1, a3, &miss); |
| 197 // // Cache miss: Jump to runtime. |
| 198 // __ bind(&miss); |
| 199 // StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC); |
| 200 } |
| 201 |
| 202 |
| 203 void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) { |
| 204 UNIMPLEMENTED_(); |
| 205 // // ----------- S t a t e ------------- |
| 206 // // -- r2 : name |
| 207 // // -- lr : return address |
| 208 // // -- [sp] : receiver |
| 209 // // ----------------------------------- |
| 210 // |
| 211 // Label miss; |
| 212 // |
| 213 // // Load receiver. |
| 214 //// __ ldr(r0, MemOperand(sp, 0)); |
| 215 // __ lw(a0, MemOperand(sp, 0)); |
| 216 // |
| 217 // StubCompiler::GenerateLoadFunctionPrototype(masm, a0, a1, a3, &miss); |
| 218 // __ bind(&miss); |
| 219 // StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC); |
| 220 } |
| 221 |
| 222 |
| 223 // Defined in ic.cc. |
| 224 Object* CallIC_Miss(Arguments args); |
| 225 |
| 226 void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) { |
| 227 UNIMPLEMENTED_(); |
| 228 // // ----------- S t a t e ------------- |
| 229 // // -- lr: return address |
| 230 // // ----------------------------------- |
| 231 // Label number, non_number, non_string, boolean, probe, miss; |
| 232 // |
| 233 // // Get the receiver of the function from the stack into r1. |
| 234 //// __ ldr(r1, MemOperand(sp, argc * kPointerSize)); |
| 235 // __ lw(a1, MemOperand(sp, argc * kPointerSize)); |
| 236 //// + StandardFrameConstants::kRArgsSlotsSize)); |
| 237 // // Get the name of the function from the stack; 1 ~ receiver. |
| 238 //// __ ldr(r2, MemOperand(sp, (argc + 1) * kPointerSize)); |
| 239 // __ lw(a2, MemOperand(sp, (argc + 1) * kPointerSize)); |
| 240 //// + StandardFrameConstants::kRArgsSlotsSize)); |
| 241 // |
| 242 // // Probe the stub cache. |
| 243 // Code::Flags flags = |
| 244 // Code::ComputeFlags(Code::CALL_IC, NOT_IN_LOOP, MONOMORPHIC, NORMAL, argc
); |
| 245 // StubCache::GenerateProbe(masm, flags, a1, a2, a3, no_reg); |
| 246 // |
| 247 // // If the stub cache probing failed, the receiver might be a value. |
| 248 // // For value objects, we use the map of the prototype objects for |
| 249 // // the corresponding JSValue for the cache and that is what we need |
| 250 // // to probe. |
| 251 // // |
| 252 // // Check for number. |
| 253 //// __ tst(r1, Operand(kSmiTagMask)); |
| 254 //// __ b(eq, &number); |
| 255 //// __ CompareObjectType(r1, r3, r3, HEAP_NUMBER_TYPE); |
| 256 //// __ b(ne, &non_number); |
| 257 //// __ bind(&number); |
| 258 //// StubCompiler::GenerateLoadGlobalFunctionPrototype( |
| 259 //// masm, Context::NUMBER_FUNCTION_INDEX, r1); |
| 260 //// __ b(&probe); |
| 261 // __ andi(t1, a1, Operand(kSmiTagMask)); |
| 262 // __ bcond(eq, &number, t1, Operand(zero_reg)); |
| 263 // __ GetObjectType(a1, a3, a3); |
| 264 // __ bcond(ne, &non_number, a3, Operand(HEAP_NUMBER_TYPE)); |
| 265 // __ nop(); // NOP_ADDED |
| 266 // __ bind(&number); |
| 267 // StubCompiler::GenerateLoadGlobalFunctionPrototype( |
| 268 // masm, Context::NUMBER_FUNCTION_INDEX, a1); |
| 269 // __ b(&probe); |
| 270 // __ nop(); // NOP_ADDED |
| 271 // |
| 272 // // Check for string. |
| 273 // __ bind(&non_number); |
| 274 //// __ cmp(r3, Operand(FIRST_NONSTRING_TYPE)); |
| 275 //// __ b(hs, &non_string); |
| 276 //// StubCompiler::GenerateLoadGlobalFunctionPrototype( |
| 277 //// masm, Context::STRING_FUNCTION_INDEX, r1); |
| 278 //// __ b(&probe); |
| 279 // __ bcond(Ugreater_equal, &non_string, a3, Operand(FIRST_NONSTRING_TYPE)); |
| 280 // __ nop(); // NOP_ADDED |
| 281 // StubCompiler::GenerateLoadGlobalFunctionPrototype( |
| 282 // masm, Context::STRING_FUNCTION_INDEX, a1); |
| 283 // __ b(&probe); |
| 284 // __ nop(); // NOP_ADDED |
| 285 // |
| 286 // |
| 287 // // Check for boolean. |
| 288 // __ bind(&non_string); |
| 289 //// __ LoadRoot(ip, Heap::kTrueValueRootIndex); |
| 290 //// __ cmp(r1, ip); |
| 291 //// __ b(eq, &boolean); |
| 292 //// __ LoadRoot(ip, Heap::kFalseValueRootIndex); |
| 293 //// __ cmp(r1, ip); |
| 294 //// __ b(ne, &miss); |
| 295 //// __ bind(&boolean); |
| 296 //// StubCompiler::GenerateLoadGlobalFunctionPrototype( |
| 297 //// masm, Context::BOOLEAN_FUNCTION_INDEX, r1); |
| 298 // __ LoadRoot(ip, Heap::kTrueValueRootIndex); |
| 299 // __ bcond(eq, &boolean, a1, Operand(ip)); |
| 300 // __ nop(); // NOP_ADDED |
| 301 // __ LoadRoot(ip, Heap::kFalseValueRootIndex); |
| 302 // __ bcond(ne, &miss, a1, Operand(ip)); |
| 303 // __ nop(); // NOP_ADDED |
| 304 // __ bind(&boolean); |
| 305 // StubCompiler::GenerateLoadGlobalFunctionPrototype( |
| 306 // masm, Context::BOOLEAN_FUNCTION_INDEX, a1); |
| 307 // |
| 308 // // Probe the stub cache for the value object. |
| 309 // __ bind(&probe); |
| 310 // StubCache::GenerateProbe(masm, flags, a1, a2, a3, no_reg); |
| 311 // |
| 312 // // Cache miss: Jump to runtime. |
| 313 // __ bind(&miss); |
| 314 // Generate(masm, argc, ExternalReference(IC_Utility(kCallIC_Miss))); |
| 315 } |
| 316 |
| 317 |
| 318 //static void GenerateNormalHelper(MacroAssembler* masm, |
| 319 // int argc, |
| 320 // bool is_global_object, |
| 321 // Label* miss) { |
| 322 // UNIMPLEMENTED(); |
| 323 // __ break_(0x00294); // UNIMPLEMENTED |
| 324 //// // Search dictionary - put result in register r1. |
| 325 //// GenerateDictionaryLoad(masm, miss, r0, r1); |
| 326 //// |
| 327 //// // Check that the value isn't a smi. |
| 328 //// __ tst(r1, Operand(kSmiTagMask)); |
| 329 //// __ b(eq, miss); |
| 330 //// |
| 331 //// // Check that the value is a JSFunction. |
| 332 //// __ CompareObjectType(r1, r0, r0, JS_FUNCTION_TYPE); |
| 333 //// __ b(ne, miss); |
| 334 //// |
| 335 //// // Check that the function has been loaded. |
| 336 //// __ ldr(r0, FieldMemOperand(r1, JSObject::kMapOffset)); |
| 337 //// __ ldrb(r0, FieldMemOperand(r0, Map::kBitField2Offset)); |
| 338 //// __ tst(r0, Operand(1 << Map::kNeedsLoading)); |
| 339 //// __ b(ne, miss); |
| 340 //// |
| 341 //// // Patch the receiver with the global proxy if necessary. |
| 342 //// if (is_global_object) { |
| 343 //// __ ldr(r2, MemOperand(sp, argc * kPointerSize)); |
| 344 //// __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset)); |
| 345 //// __ str(r2, MemOperand(sp, argc * kPointerSize)); |
| 346 //// } |
| 347 //// |
| 348 //// // Invoke the function. |
| 349 //// ParameterCount actual(argc); |
| 350 //// __ InvokeFunction(r1, actual, JUMP_FUNCTION); |
| 351 //} |
| 352 |
| 353 |
| 354 void CallIC::GenerateNormal(MacroAssembler* masm, int argc) { |
| 355 UNIMPLEMENTED_(); |
| 356 __ break_(0x00326); // UNIMPLEMENTED |
| 357 // // ----------- S t a t e ------------- |
| 358 // // -- lr: return address |
| 359 // // ----------------------------------- |
| 360 // |
| 361 // Label miss, global_object, non_global_object; |
| 362 // |
| 363 // // Get the receiver of the function from the stack into r1. |
| 364 // __ ldr(r1, MemOperand(sp, argc * kPointerSize)); |
| 365 // // Get the name of the function from the stack; 1 ~ receiver. |
| 366 // __ ldr(r2, MemOperand(sp, (argc + 1) * kPointerSize)); |
| 367 // |
| 368 // // Check that the receiver isn't a smi. |
| 369 // __ tst(r1, Operand(kSmiTagMask)); |
| 370 // __ b(eq, &miss); |
| 371 // |
| 372 // // Check that the receiver is a valid JS object. Put the map in r3. |
| 373 // __ CompareObjectType(r1, r3, r0, FIRST_JS_OBJECT_TYPE); |
| 374 // __ b(lt, &miss); |
| 375 // |
| 376 // // If this assert fails, we have to check upper bound too. |
| 377 // ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); |
| 378 // |
| 379 // // Check for access to global object. |
| 380 // __ cmp(r0, Operand(JS_GLOBAL_OBJECT_TYPE)); |
| 381 // __ b(eq, &global_object); |
| 382 // __ cmp(r0, Operand(JS_BUILTINS_OBJECT_TYPE)); |
| 383 // __ b(ne, &non_global_object); |
| 384 // |
| 385 // // Accessing global object: Load and invoke. |
| 386 // __ bind(&global_object); |
| 387 // // Check that the global object does not require access checks. |
| 388 // __ ldrb(r3, FieldMemOperand(r3, Map::kBitFieldOffset)); |
| 389 // __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded)); |
| 390 // __ b(ne, &miss); |
| 391 // GenerateNormalHelper(masm, argc, true, &miss); |
| 392 // |
| 393 // // Accessing non-global object: Check for access to global proxy. |
| 394 // Label global_proxy, invoke; |
| 395 // __ bind(&non_global_object); |
| 396 // __ cmp(r0, Operand(JS_GLOBAL_PROXY_TYPE)); |
| 397 // __ b(eq, &global_proxy); |
| 398 // // Check that the non-global, non-global-proxy object does not |
| 399 // // require access checks. |
| 400 // __ ldrb(r3, FieldMemOperand(r3, Map::kBitFieldOffset)); |
| 401 // __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded)); |
| 402 // __ b(ne, &miss); |
| 403 // __ bind(&invoke); |
| 404 // GenerateNormalHelper(masm, argc, false, &miss); |
| 405 // |
| 406 // // Global object access: Check access rights. |
| 407 // __ bind(&global_proxy); |
| 408 // __ CheckAccessGlobalProxy(r1, r0, &miss); |
| 409 // __ b(&invoke); |
| 410 // |
| 411 // // Cache miss: Jump to runtime. |
| 412 // __ bind(&miss); |
| 413 // Generate(masm, argc, ExternalReference(IC_Utility(kCallIC_Miss))); |
| 414 } |
| 415 |
| 416 |
| 417 void CallIC::Generate(MacroAssembler* masm, |
| 418 int argc, |
| 419 const ExternalReference& f) { |
| 420 UNIMPLEMENTED_(); |
| 421 //// Called from CodeGenerator::VisitCall(Property* node) through |
| 422 //// ComputeCallInitialize(...). No other code generation during intermediate |
| 423 //// function calls. |
| 424 // |
| 425 // // ----------- S t a t e ------------- |
| 426 // // -- ra: return address |
| 427 // // ----------------------------------- |
| 428 // |
| 429 // // Get the receiver of the function from the stack. |
| 430 // __ lw(a2, MemOperand(sp, argc*kPointerSize)); |
| 431 //// + StandardFrameConstants::kRegularArgsSlotsSize)
); |
| 432 // // Get the name of the function to call from the stack. |
| 433 // __ lw(a1, MemOperand(sp, (argc + 1)*kPointerSize)); |
| 434 //// + StandardFrameConstants::kRegularArgsSlotsSize)
); |
| 435 // |
| 436 // __ EnterInternalFrame(); |
| 437 // |
| 438 // // Push the receiver and the name of the function. |
| 439 // __ multi_push_reversed(a1.bit() | a2.bit()); |
| 440 // |
| 441 // // Call the entry. |
| 442 // __ li(a0, Operand(2)); |
| 443 // __ li(a1, Operand(f)); |
| 444 // |
| 445 // CEntryStub stub(1); |
| 446 // // This calls the code generated by void CEntryStub::GenerateBody(...) |
| 447 // __ CallStub(&stub); |
| 448 // __ nop(); // NOP_ADDED |
| 449 // |
| 450 // // Move result to r1 and leave the internal frame. |
| 451 // __ mov(a1, v0); |
| 452 // __ LeaveInternalFrame(); |
| 453 // |
| 454 // // Check if the receiver is a global object of some sort. |
| 455 // Label invoke, global; |
| 456 // __ lw(a2, MemOperand(sp, argc * kPointerSize)); |
| 457 //// + StandardFrameConstants::kRegularArgsSlotsSize)); // recei
ver |
| 458 // __ andi(ip, a2, Operand(kSmiTagMask)); |
| 459 // __ bcond(eq, &invoke, ip, Operand(zero_reg)); |
| 460 // __ nop(); // NOP_ADDED |
| 461 // __ GetObjectType(a2, a3, a3); |
| 462 // __ bcond(eq, &global, a3, Operand(JS_GLOBAL_OBJECT_TYPE)); |
| 463 // __ nop(); // NOP_ADDED |
| 464 // __ bcond(ne, &invoke, a3, Operand(JS_BUILTINS_OBJECT_TYPE)); |
| 465 // __ nop(); // NOP_ADDED |
| 466 // |
| 467 // // Patch the receiver on the stack. |
| 468 // __ bind(&global); |
| 469 // __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalReceiverOffset)); |
| 470 // __ sw(a2, MemOperand(sp, argc * kPointerSize)); |
| 471 //// + StandardFrameConstants::kRegularArgsSlotsSize)); |
| 472 // |
| 473 // // Invoke the function. |
| 474 // ParameterCount actual(argc); |
| 475 // __ bind(&invoke); |
| 476 //// // Dirty fix |
| 477 //// __ addiu(sp, sp, StandardFrameConstants::kRArgsSlotsSize); |
| 478 //// __ InvokeFunction(a1, actual, JUMP_FUNCTION, true); |
| 479 // __ InvokeFunction(a1, actual, JUMP_FUNCTION, false); |
| 480 // __ nop(); // NOP_ADDED |
| 481 } |
| 482 |
| 483 |
| 484 // Defined in ic.cc. |
| 485 Object* LoadIC_Miss(Arguments args); |
| 486 |
| 487 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) { |
| 488 UNIMPLEMENTED_(); |
| 489 __ break_(0x00473); // UNIMPLEMENTED |
| 490 // // ----------- S t a t e ------------- |
| 491 // // -- r2 : name |
| 492 // // -- lr : return address |
| 493 // // -- [sp] : receiver |
| 494 // // ----------------------------------- |
| 495 // |
| 496 //// __ ldr(r0, MemOperand(sp, 0)); |
| 497 // __ lw(a0, MemOperand(sp, 0)); |
| 498 // // Probe the stub cache. |
| 499 // Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC, |
| 500 // NOT_IN_LOOP, |
| 501 // MONOMORPHIC); |
| 502 // StubCache::GenerateProbe(masm, flags, a0, a2, a3, no_reg); |
| 503 // |
| 504 // // Cache miss: Jump to runtime. |
| 505 // Generate(masm, ExternalReference(IC_Utility(kLoadIC_Miss))); |
| 506 } |
| 507 |
| 508 |
| 509 void LoadIC::GenerateNormal(MacroAssembler* masm) { |
| 510 UNIMPLEMENTED_(); |
| 511 __ break_(0x00494); // UNIMPLEMENTED |
| 512 // // ----------- S t a t e ------------- |
| 513 // // -- r2 : name |
| 514 // // -- lr : return address |
| 515 // // -- [sp] : receiver |
| 516 // // ----------------------------------- |
| 517 // |
| 518 // Label miss, probe, global; |
| 519 // |
| 520 // __ ldr(r0, MemOperand(sp, 0)); |
| 521 // // Check that the receiver isn't a smi. |
| 522 // __ tst(r0, Operand(kSmiTagMask)); |
| 523 // __ b(eq, &miss); |
| 524 // |
| 525 // // Check that the receiver is a valid JS object. Put the map in r3. |
| 526 // __ CompareObjectType(r0, r3, r1, FIRST_JS_OBJECT_TYPE); |
| 527 // __ b(lt, &miss); |
| 528 // // If this assert fails, we have to check upper bound too. |
| 529 // ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); |
| 530 // |
| 531 // // Check for access to global object (unlikely). |
| 532 // __ cmp(r1, Operand(JS_GLOBAL_PROXY_TYPE)); |
| 533 // __ b(eq, &global); |
| 534 // |
| 535 // // Check for non-global object that requires access check. |
| 536 // __ ldrb(r3, FieldMemOperand(r3, Map::kBitFieldOffset)); |
| 537 // __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded)); |
| 538 // __ b(ne, &miss); |
| 539 // |
| 540 // __ bind(&probe); |
| 541 // GenerateDictionaryLoad(masm, &miss, r1, r0); |
| 542 // GenerateCheckNonObjectOrLoaded(masm, &miss, r0, r1); |
| 543 // __ Ret(); |
| 544 // |
| 545 // // Global object access: Check access rights. |
| 546 // __ bind(&global); |
| 547 // __ CheckAccessGlobalProxy(r0, r1, &miss); |
| 548 // __ b(&probe); |
| 549 // |
| 550 // // Cache miss: Restore receiver from stack and jump to runtime. |
| 551 // __ bind(&miss); |
| 552 // Generate(masm, ExternalReference(IC_Utility(kLoadIC_Miss))); |
| 553 } |
| 554 |
| 555 |
| 556 void LoadIC::GenerateMiss(MacroAssembler* masm) { |
| 557 Generate(masm, ExternalReference(IC_Utility(kLoadIC_Miss))); |
| 558 } |
| 559 |
| 560 |
| 561 void LoadIC::Generate(MacroAssembler* masm, const ExternalReference& f) { |
| 562 UNIMPLEMENTED_(); |
| 563 __ break_(0x00545); // UNIMPLEMENTED |
| 564 // // ----------- S t a t e ------------- |
| 565 // // -- r2 : name |
| 566 // // -- lr : return address |
| 567 // // -- [sp] : receiver |
| 568 // // ----------------------------------- |
| 569 // |
| 570 //// __ ldr(r3, MemOperand(sp, 0)); |
| 571 //// __ stm(db_w, sp, r2.bit() | r3.bit()); |
| 572 // __ lw(a3, MemOperand(sp, 0)); |
| 573 // __ multi_push_reversed(a2.bit() | a3.bit()); |
| 574 // |
| 575 // // Perform tail call to the entry. |
| 576 // __ TailCallRuntime(f, 2, 1); |
| 577 // __ nop(); // NOP_ADDED |
| 578 } |
| 579 |
| 580 |
| 581 // TODO(181): Implement map patching once loop nesting is tracked on the |
| 582 // ARM platform so we can generate inlined fast-case code loads in |
| 583 // loops. |
| 584 void LoadIC::ClearInlinedVersion(Address address) {} |
| 585 bool LoadIC::PatchInlinedLoad(Address address, Object* map, int offset) { |
| 586 return false; |
| 587 } |
| 588 |
| 589 void KeyedLoadIC::ClearInlinedVersion(Address address) {} |
| 590 bool KeyedLoadIC::PatchInlinedLoad(Address address, Object* map) { |
| 591 return false; |
| 592 } |
| 593 |
| 594 void KeyedStoreIC::ClearInlinedVersion(Address address) {} |
| 595 void KeyedStoreIC::RestoreInlinedVersion(Address address) {} |
| 596 bool KeyedStoreIC::PatchInlinedStore(Address address, Object* map) { |
| 597 return false; |
| 598 } |
| 599 |
| 600 |
| 601 Object* KeyedLoadIC_Miss(Arguments args); |
| 602 |
| 603 |
| 604 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { |
| 605 Generate(masm, ExternalReference(IC_Utility(kKeyedLoadIC_Miss))); |
| 606 } |
| 607 |
| 608 |
| 609 void KeyedLoadIC::Generate(MacroAssembler* masm, const ExternalReference& f) { |
| 610 UNIMPLEMENTED_(); |
| 611 // // ---------- S t a t e -------------- |
| 612 // // -- ra : return address |
| 613 // // -- sp[0] : key |
| 614 // // -- sp[4] : receiver |
| 615 // |
| 616 //// __ break_(0x04008); |
| 617 //// __ ldm(ia, sp, r2.bit() | r3.bit()); |
| 618 //// __ stm(db_w, sp, r2.bit() | r3.bit()); |
| 619 // __ lw(a2, MemOperand(sp, 0)); |
| 620 // __ lw(a3, MemOperand(sp, 4)); |
| 621 // __ multi_push_reversed(a2.bit() | a3.bit()); |
| 622 // |
| 623 // __ TailCallRuntime(f, 2, 1); |
| 624 // __ nop(); // NOP_ADDED |
| 625 } |
| 626 |
| 627 |
| 628 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { |
| 629 UNIMPLEMENTED_(); |
| 630 // // ---------- S t a t e -------------- |
| 631 // // -- ra : return address |
| 632 // // -- sp[0] : key |
| 633 // // -- sp[4] : receiver |
| 634 // Label slow, fast; |
| 635 // |
| 636 // // Get the key and receiver object from the stack. |
| 637 //// __ ldm(ia, sp, r0.bit() | r1.bit()); |
| 638 // __ lw(a0, MemOperand(sp, 0)); |
| 639 // __ lw(a1, MemOperand(sp, 4)); |
| 640 // // Check that the key is a smi. |
| 641 //// __ tst(r0, Operand(kSmiTagMask)); |
| 642 //// __ b(ne, &slow); |
| 643 //// __ mov(r0, Operand(r0, ASR, kSmiTagSize)); |
| 644 // __ andi(t0, a0, Operand(kSmiTagMask)); |
| 645 // __ bcond(ne, &slow, t0, Operand(zero_reg)); |
| 646 // __ nop(); // NOP_ADDED |
| 647 // __ sra(a0, a0, kSmiTagSize); |
| 648 // // Check that the object isn't a smi. |
| 649 //// __ tst(r1, Operand(kSmiTagMask)); |
| 650 //// __ b(eq, &slow); |
| 651 // __ andi(t1, a1, Operand(kSmiTagMask)); |
| 652 // __ bcond(eq, &slow, t1, Operand(zero_reg)); |
| 653 // __ nop(); // NOP_ADDED |
| 654 // |
| 655 // // Get the map of the receiver. |
| 656 //// __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); |
| 657 // __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset)); |
| 658 // // Check that the receiver does not require access checks. We need |
| 659 // // to check this explicitly since this generic stub does not perform |
| 660 // // map checks. |
| 661 //// __ ldrb(r3, FieldMemOperand(r2, Map::kBitFieldOffset)); |
| 662 //// __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded)); |
| 663 //// __ b(ne, &slow); |
| 664 // __ lbu(a3, FieldMemOperand(a2, Map::kBitFieldOffset)); |
| 665 // __ andi(t3, a3, Operand(1 << Map::kIsAccessCheckNeeded)); |
| 666 // __ bcond(ne, &slow, t3, Operand(zero_reg)); |
| 667 // __ nop(); // NOP_ADDED |
| 668 // // Check that the object is some kind of JS object EXCEPT JS Value type. |
| 669 // // In the case that the object is a value-wrapper object, |
| 670 // // we enter the runtime system to make sure that indexing into string |
| 671 // // objects work as intended. |
| 672 // ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE); |
| 673 //// __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset)); |
| 674 //// __ cmp(r2, Operand(JS_OBJECT_TYPE)); |
| 675 //// __ b(lt, &slow); |
| 676 // __ lbu(a2, FieldMemOperand(a2, Map::kInstanceTypeOffset)); |
| 677 // __ bcond(less, &slow, a2, Operand(JS_OBJECT_TYPE)); |
| 678 // __ nop(); // NOP_ADDED |
| 679 // |
| 680 // // Get the elements array of the object. |
| 681 //// __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset)); |
| 682 // __ lw(a1, FieldMemOperand(a1, JSObject::kElementsOffset)); |
| 683 // // Check that the object is in fast mode (not dictionary). |
| 684 //// __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset)); |
| 685 //// __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex); |
| 686 //// __ cmp(r3, ip); |
| 687 //// __ b(ne, &slow); |
| 688 // __ lw(t3, FieldMemOperand(a1, HeapObject::kMapOffset)); |
| 689 // __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex); |
| 690 // __ bcond(ne, &slow, t3, Operand(ip)); |
| 691 // __ nop(); // NOP_ADDED |
| 692 // // Check that the key (index) is within bounds. |
| 693 //// __ ldr(r3, FieldMemOperand(r1, Array::kLengthOffset)); |
| 694 //// __ cmp(r0, Operand(r3)); |
| 695 //// __ b(lo, &fast); |
| 696 // __ lw(t3, FieldMemOperand(a1, Array::kLengthOffset)); |
| 697 // __ bcond(Uless, &fast, a0, Operand(t3)); |
| 698 // __ nop(); // NOP_ADDED |
| 699 // |
| 700 // // Slow case: Push extra copies of the arguments (2). |
| 701 // __ bind(&slow); |
| 702 // __ IncrementCounter(&Counters::keyed_load_generic_slow, 1, a0, a1); |
| 703 //// __ ldm(ia, sp, r0.bit() | r1.bit()); |
| 704 //// __ stm(db_w, sp, r0.bit() | r1.bit()); |
| 705 // __ lw(a0, MemOperand(sp, 0)); |
| 706 // __ lw(a1, MemOperand(sp, 4)); |
| 707 // __ multi_push_reversed(a0.bit() | a1.bit()); |
| 708 // // Do tail-call to runtime routine. |
| 709 // __ TailCallRuntime(ExternalReference(Runtime::kGetProperty), 2, 1); |
| 710 // __ nop(); // NOP_ADDED |
| 711 // |
| 712 // // Fast case: Do the load. |
| 713 // __ bind(&fast); |
| 714 //// __ add(r3, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 715 //// __ ldr(r0, MemOperand(r3, r0, LSL, kPointerSizeLog2)); |
| 716 //// __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
| 717 //// __ cmp(r0, ip); |
| 718 //// __ break_(0x00001); |
| 719 // __ add(a3, a1, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 720 // __ sll(t3, a0, kPointerSizeLog2); |
| 721 // __ add(a0, a3, Operand(t3)); |
| 722 // __ lw(v0, MemOperand(a0)); |
| 723 // __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
| 724 // // In case the loaded value is the_hole we have to consult GetProperty |
| 725 // // to ensure the prototype chain is searched. |
| 726 // __ bcond(eq, &slow, v0, Operand(ip)); |
| 727 // __ nop(); // NOP_ADDED |
| 728 // |
| 729 // __ Ret(); |
| 730 // __ nop(); // NOP_ADDED |
| 731 } |
| 732 |
| 733 |
| 734 void KeyedLoadIC::GenerateExternalArray(MacroAssembler* masm, |
| 735 ExternalArrayType array_type) { |
| 736 UNIMPLEMENTED_(); |
| 737 // TODO(476): port specialized code. |
| 738 // GenerateGeneric(masm); |
| 739 } |
| 740 |
| 741 |
| 742 void KeyedStoreIC::Generate(MacroAssembler* masm, |
| 743 const ExternalReference& f) { |
| 744 UNIMPLEMENTED_(); |
| 745 // // ---------- S t a t e -------------- |
| 746 // // -- a0 : value |
| 747 // // -- ra : return address |
| 748 // // -- sp[0] : key |
| 749 // // -- sp[1] : receiver |
| 750 // |
| 751 // // TODO(MIPS.4): Optimize this. (easy) |
| 752 // |
| 753 //// __ ldm(ia, sp, r2.bit() | r3.bit()); |
| 754 //// __ stm(db_w, sp, r0.bit() | r2.bit() | r3.bit()); |
| 755 //// __ break_(0x00003); |
| 756 // __ lw(a2, MemOperand(sp, 0)); |
| 757 // __ lw(a3, MemOperand(sp, 4)); |
| 758 // __ multi_push_reversed(a0.bit() | a2.bit() | a3.bit()); |
| 759 // |
| 760 // __ TailCallRuntime(f, 3, 1); |
| 761 // __ nop(); // NOP_ADDED |
| 762 } |
| 763 |
| 764 |
| 765 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) { |
| 766 UNIMPLEMENTED_(); |
| 767 // // ---------- S t a t e -------------- |
| 768 // // -- a0 : value |
| 769 // // -- ra : return address |
| 770 // // -- sp[0] : key |
| 771 // // -- sp[1] : receiver |
| 772 // Label slow, fast, array, extra, exit; |
| 773 // // Get the key and the object from the stack. |
| 774 //// __ ldm(ia, sp, r1.bit() | r3.bit()); // r1 = key, r3 = receiver |
| 775 //// __ break_(0x00002); |
| 776 // __ lw(a1, MemOperand(sp, 0)); |
| 777 // __ lw(a3, MemOperand(sp, 4)); |
| 778 // // Check that the key is a smi. |
| 779 //// __ tst(r1, Operand(kSmiTagMask)); |
| 780 //// __ b(ne, &slow); |
| 781 // __ andi(t0, a1, Operand(kSmiTagMask)); |
| 782 // __ bcond(ne, &slow, t0, Operand(zero_reg)); |
| 783 // __ nop(); // NOP_ADDED |
| 784 // // Check that the object isn't a smi. |
| 785 //// __ tst(r3, Operand(kSmiTagMask)); |
| 786 //// __ b(eq, &slow); |
| 787 // __ andi(t1, a3, Operand(kSmiTagMask)); |
| 788 // __ bcond(eq, &slow, t1, Operand(zero_reg)); |
| 789 // __ nop(); // NOP_ADDED |
| 790 // // Get the map of the object. |
| 791 //// __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset)); |
| 792 // __ lw(a2, FieldMemOperand(a3, HeapObject::kMapOffset)); |
| 793 // // Check that the receiver does not require access checks. We need |
| 794 // // to do this because this generic stub does not perform map checks. |
| 795 //// __ ldrb(ip, FieldMemOperand(r2, Map::kBitFieldOffset)); |
| 796 //// __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded)); |
| 797 //// __ b(ne, &slow); |
| 798 // __ lbu(ip, FieldMemOperand(a2, Map::kBitFieldOffset)); |
| 799 // __ andi(t3, Operand(1 << Map::kIsAccessCheckNeeded)); |
| 800 // __ bcond(ne, &slow, t3, Operand(zero_reg)); |
| 801 // __ nop(); // NOP_ADDED |
| 802 // // Check if the object is a JS array or not. |
| 803 //// __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset)); |
| 804 //// __ cmp(r2, Operand(JS_ARRAY_TYPE)); |
| 805 // __ lbu(a2, FieldMemOperand(a2, Map::kInstanceTypeOffset)); |
| 806 // // a1 == key. |
| 807 // __ bcond(eq, &array, a2, Operand(JS_ARRAY_TYPE)); |
| 808 // __ nop(); // NOP_ADDED |
| 809 // // Check that the object is some kind of JS object. |
| 810 //// __ cmp(r2, Operand(FIRST_JS_OBJECT_TYPE)); |
| 811 // __ bcond(less, &slow, a2, Operand(FIRST_JS_OBJECT_TYPE)); |
| 812 // __ nop(); // NOP_ADDED |
| 813 // |
| 814 // |
| 815 // // Object case: Check key against length in the elements array. |
| 816 //// __ ldr(r3, FieldMemOperand(r3, JSObject::kElementsOffset)); |
| 817 // __ lw(a3, FieldMemOperand(a3, JSObject::kElementsOffset)); |
| 818 // // Check that the object is in fast mode (not dictionary). |
| 819 //// __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset)); |
| 820 //// __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex); |
| 821 //// __ cmp(r2, ip); |
| 822 //// __ b(ne, &slow); |
| 823 // __ lw(a2, FieldMemOperand(a3, HeapObject::kMapOffset)); |
| 824 // __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex); |
| 825 // __ bcond(ne, &slow, a2, Operand(ip)); |
| 826 // __ nop(); // NOP_ADDED |
| 827 // // Untag the key (for checking against untagged length in the fixed array). |
| 828 //// __ mov(r1, Operand(r1, ASR, kSmiTagSize)); |
| 829 // __ sra(a1, a1, kSmiTagSize); |
| 830 // // Compute address to store into and check array bounds. |
| 831 //// __ add(r2, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 832 //// __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2)); |
| 833 //// __ ldr(ip, FieldMemOperand(r3, FixedArray::kLengthOffset)); |
| 834 //// __ cmp(r1, Operand(ip)); |
| 835 //// __ b(lo, &fast); |
| 836 // __ add(a2, a3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 837 // __ sll(t1, a1, kPointerSizeLog2); |
| 838 // __ add(a2, a2, t1); |
| 839 // __ lw(ip, FieldMemOperand(a3, FixedArray::kLengthOffset)); |
| 840 // __ bcond(Uless, &fast, a1, Operand(ip)); |
| 841 // __ nop(); // NOP_ADDED |
| 842 // |
| 843 // |
| 844 // // Slow case: Push extra copies of the arguments (3). |
| 845 // __ bind(&slow); |
| 846 //// __ ldm(ia, sp, r1.bit() | r3.bit()); // r0 == value, r1 == key, r3 == obj
ect |
| 847 //// __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit()); |
| 848 // __ lw(a1, MemOperand(sp, 0)); |
| 849 // __ lw(a3, MemOperand(sp, 4)); |
| 850 // __ multi_push_reversed(a0.bit() | a1.bit() | a3.bit()); |
| 851 // // Do tail-call to runtime routine. |
| 852 // __ TailCallRuntime(ExternalReference(Runtime::kSetProperty), 3, 1); |
| 853 // __ nop(); // NOP_ADDED |
| 854 // |
| 855 // // Extra capacity case: Check if there is extra capacity to |
| 856 // // perform the store and update the length. Used for adding one |
| 857 // // element to the array by writing to array[array.length]. |
| 858 // // r0 == value, r1 == key, r2 == elements, r3 == object |
| 859 // __ bind(&extra); |
| 860 //// __ b(ne, &slow); // do not leave holes in the array |
| 861 //// __ mov(r1, Operand(r1, ASR, kSmiTagSize)); // untag |
| 862 //// __ ldr(ip, FieldMemOperand(r2, Array::kLengthOffset)); |
| 863 //// __ cmp(r1, Operand(ip)); |
| 864 //// __ b(hs, &slow); |
| 865 //// __ mov(r1, Operand(r1, LSL, kSmiTagSize)); // restore tag |
| 866 //// __ add(r1, r1, Operand(1 << kSmiTagSize)); // and increment |
| 867 //// __ str(r1, FieldMemOperand(r3, JSArray::kLengthOffset)); |
| 868 //// __ mov(r3, Operand(r2)); |
| 869 // __ bcond(ne, &slow, a1, Operand(ip)); // do not leave holes in the array |
| 870 // __ nop(); // NOP_ADDED |
| 871 // __ sra(t1, a1, kSmiTagSize); // untag |
| 872 // __ lw(ip, FieldMemOperand(a2, Array::kLengthOffset)); |
| 873 // __ bcond(Ugreater_equal, &slow, t1, Operand(ip)); |
| 874 // __ nop(); // NOP_ADDED |
| 875 // __ add(a1, a1, Operand(1 << kSmiTagSize)); // and increment |
| 876 // __ sw(a1, FieldMemOperand(a3, JSArray::kLengthOffset)); |
| 877 // __ mov(a3, a2); |
| 878 // // NOTE: Computing the address to store into must take the fact |
| 879 // // that the key has been incremented into account. |
| 880 // int displacement = FixedArray::kHeaderSize - kHeapObjectTag - |
| 881 // ((1 << kSmiTagSize) * 2); |
| 882 //// __ add(r2, r2, Operand(displacement)); |
| 883 //// __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize)); |
| 884 // __ add(a2, a2, Operand(displacement)); |
| 885 // __ sll(t1, a1, kPointerSizeLog2 - kSmiTagSize); |
| 886 // __ add(a2, a2, Operand(t1)); |
| 887 // __ b(&fast); |
| 888 // __ nop(); // NOP_ADDED |
| 889 // |
| 890 // |
| 891 // // Array case: Get the length and the elements array from the JS |
| 892 // // array. Check that the array is in fast mode; if it is the |
| 893 // // length is always a smi. |
| 894 // // a0 == value, a3 == object |
| 895 // __ bind(&array); |
| 896 //// __ ldr(r2, FieldMemOperand(r3, JSObject::kElementsOffset)); |
| 897 //// __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset)); |
| 898 //// __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex); |
| 899 //// __ cmp(r1, ip); |
| 900 //// __ b(ne, &slow); |
| 901 // __ lw(a2, FieldMemOperand(a3, JSObject::kElementsOffset)); |
| 902 // __ lw(a1, FieldMemOperand(a2, HeapObject::kMapOffset)); |
| 903 // __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex); |
| 904 // __ bcond(ne, &slow, a1, Operand(ip)); |
| 905 // __ nop(); // NOP_ADDED |
| 906 // |
| 907 // // Check the key against the length in the array, compute the |
| 908 // // address to store into and fall through to fast case. |
| 909 //// __ ldr(r1, MemOperand(sp)); // restore key |
| 910 // __ lw(a1, MemOperand(sp)); // restore key |
| 911 // // a0 == value, a1 == key, a2 == elements, a3 == object. |
| 912 //// __ ldr(ip, FieldMemOperand(r3, JSArray::kLengthOffset)); |
| 913 // __ lw(ip, FieldMemOperand(a3, JSArray::kLengthOffset)); |
| 914 //// __ cmp(r1, Operand(ip)); |
| 915 // // TODO(MIPS.1) Check the comparison. |
| 916 // __ bcond(greater_equal, &extra, a1, Operand(ip)); |
| 917 // __ nop(); // NOP_ADDED |
| 918 //// __ mov(r3, Operand(r2)); |
| 919 //// __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 920 //// __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize)); |
| 921 // __ mov(a3, a2); |
| 922 // __ add(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 923 // __ sll(t1, a1, kPointerSizeLog2 - kSmiTagSize); |
| 924 // __ addu(a2, a2, t1); |
| 925 // |
| 926 // |
| 927 // // Fast case: Do the store. |
| 928 // // a0 == value, a2 == address to store into, a3 == elements |
| 929 // __ bind(&fast); |
| 930 // __ sw(a0, MemOperand(a2)); |
| 931 // // Skip write barrier if the written value is a smi. |
| 932 //// __ tst(r0, Operand(kSmiTagMask)); |
| 933 //// __ b(eq, &exit); |
| 934 // __ andi(t0, a0, Operand(kSmiTagMask)); |
| 935 // __ bcond(eq, &exit, t0, Operand(zero_reg)); |
| 936 // __ nop(); // NOP_ADDED |
| 937 // // Update write barrier for the elements array address. |
| 938 //// __ sub(r1, r2, Operand(r3)); |
| 939 // __ sub(a1, a2, Operand(a3)); |
| 940 // __ RecordWrite(a3, a1, a2); |
| 941 // |
| 942 // __ bind(&exit); |
| 943 // __ Ret(); |
| 944 // __ nop(); // NOP_ADDED |
| 945 } |
| 946 |
| 947 |
| 948 void KeyedStoreIC::GenerateExternalArray(MacroAssembler* masm, |
| 949 ExternalArrayType array_type) { |
| 950 UNIMPLEMENTED_(); |
| 951 // TODO(476): port specialized code. |
| 952 // GenerateGeneric(masm); |
| 953 } |
| 954 |
| 955 |
| 956 void KeyedStoreIC::GenerateExtendStorage(MacroAssembler* masm) { |
| 957 UNIMPLEMENTED_(); |
| 958 __ break_(0x00827); // UNIMPLEMENTED |
| 959 // // ---------- S t a t e -------------- |
| 960 // // -- r0 : value |
| 961 // // -- lr : return address |
| 962 // // -- sp[0] : key |
| 963 // // -- sp[1] : receiver |
| 964 // // ----------- S t a t e ------------- |
| 965 // |
| 966 // __ ldm(ia, sp, r2.bit() | r3.bit()); |
| 967 // __ stm(db_w, sp, r0.bit() | r2.bit() | r3.bit()); |
| 968 // |
| 969 // // Perform tail call to the entry. |
| 970 // __ TailCallRuntime( |
| 971 // ExternalReference(IC_Utility(kSharedStoreIC_ExtendStorage)), 3, 1); |
| 972 } |
| 973 |
| 974 |
| 975 void StoreIC::GenerateMegamorphic(MacroAssembler* masm) { |
| 976 UNIMPLEMENTED_(); |
| 977 __ break_(0x00845); // UNIMPLEMENTED |
| 978 // // ----------- S t a t e ------------- |
| 979 // // -- r0 : value |
| 980 // // -- r2 : name |
| 981 // // -- lr : return address |
| 982 // // -- [sp] : receiver |
| 983 // // ----------------------------------- |
| 984 // |
| 985 // // Get the receiver from the stack and probe the stub cache. |
| 986 // __ ldr(r1, MemOperand(sp)); |
| 987 // Code::Flags flags = Code::ComputeFlags(Code::STORE_IC, |
| 988 // NOT_IN_LOOP, |
| 989 // MONOMORPHIC); |
| 990 // StubCache::GenerateProbe(masm, flags, r1, r2, r3, no_reg); |
| 991 // |
| 992 // // Cache miss: Jump to runtime. |
| 993 // Generate(masm, ExternalReference(IC_Utility(kStoreIC_Miss))); |
| 994 } |
| 995 |
| 996 |
| 997 void StoreIC::GenerateExtendStorage(MacroAssembler* masm) { |
| 998 UNIMPLEMENTED_(); |
| 999 __ break_(0x00866); // UNIMPLEMENTED |
| 1000 // // ----------- S t a t e ------------- |
| 1001 // // -- r0 : value |
| 1002 // // -- r2 : name |
| 1003 // // -- lr : return address |
| 1004 // // -- [sp] : receiver |
| 1005 // // ----------------------------------- |
| 1006 // |
| 1007 // __ ldr(r3, MemOperand(sp)); // copy receiver |
| 1008 // __ stm(db_w, sp, r0.bit() | r2.bit() | r3.bit()); |
| 1009 // |
| 1010 // // Perform tail call to the entry. |
| 1011 // __ TailCallRuntime( |
| 1012 // ExternalReference(IC_Utility(kSharedStoreIC_ExtendStorage)), 3, 1); |
| 1013 } |
| 1014 |
| 1015 |
| 1016 void StoreIC::Generate(MacroAssembler* masm, const ExternalReference& f) { |
| 1017 UNIMPLEMENTED_(); |
| 1018 // // ----------- S t a t e ------------- |
| 1019 // // -- a0 : value |
| 1020 // // -- a2 : name |
| 1021 // // -- ra : return address |
| 1022 // // -- [sp] : receiver |
| 1023 // // ----------------------------------- |
| 1024 // |
| 1025 //// __ ldr(r3, MemOperand(sp)); // copy receiver |
| 1026 //// __ stm(db_w, sp, r0.bit() | r2.bit() | r3.bit()); |
| 1027 // __ lw(a3, MemOperand(sp)); // copy receiver |
| 1028 // __ multi_push_reversed(a3.bit() | a2.bit() | a0.bit()); |
| 1029 // |
| 1030 // // Perform tail call to the entry. |
| 1031 // __ TailCallRuntime(f, 3, 1); |
| 1032 // __ nop(); // NOP_ADDED |
| 1033 } |
| 1034 |
| 1035 |
| 1036 #undef __ |
| 1037 |
| 1038 |
| 1039 } } // namespace v8::internal |
| OLD | NEW |