| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 | 5 |
| 6 #include "src/v8.h" | 6 #include "src/v8.h" |
| 7 | 7 |
| 8 #if V8_TARGET_ARCH_MIPS | 8 #if V8_TARGET_ARCH_MIPS |
| 9 | 9 |
| 10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
| (...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 156 DCHECK(JS_OBJECT_TYPE > JS_VALUE_TYPE); | 156 DCHECK(JS_OBJECT_TYPE > JS_VALUE_TYPE); |
| 157 __ lbu(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset)); | 157 __ lbu(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset)); |
| 158 __ Branch(slow, lt, scratch, Operand(JS_OBJECT_TYPE)); | 158 __ Branch(slow, lt, scratch, Operand(JS_OBJECT_TYPE)); |
| 159 } | 159 } |
| 160 | 160 |
| 161 | 161 |
| 162 // Loads an indexed element from a fast case array. | 162 // Loads an indexed element from a fast case array. |
| 163 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver, | 163 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver, |
| 164 Register key, Register elements, | 164 Register key, Register elements, |
| 165 Register scratch1, Register scratch2, | 165 Register scratch1, Register scratch2, |
| 166 Register result, Label* slow) { | 166 Register result, Label* slow, |
| 167 LanguageMode language_mode) { |
| 167 // Register use: | 168 // Register use: |
| 168 // | 169 // |
| 169 // receiver - holds the receiver on entry. | 170 // receiver - holds the receiver on entry. |
| 170 // Unchanged unless 'result' is the same register. | 171 // Unchanged unless 'result' is the same register. |
| 171 // | 172 // |
| 172 // key - holds the smi key on entry. | 173 // key - holds the smi key on entry. |
| 173 // Unchanged unless 'result' is the same register. | 174 // Unchanged unless 'result' is the same register. |
| 174 // | 175 // |
| 175 // result - holds the result on exit if the load succeeded. | 176 // result - holds the result on exit if the load succeeded. |
| 176 // Allowed to be the the same as 'receiver' or 'key'. | 177 // Allowed to be the the same as 'receiver' or 'key'. |
| 177 // Unchanged on bailout so 'receiver' and 'key' can be safely | 178 // Unchanged on bailout so 'receiver' and 'key' can be safely |
| 178 // used by further computation. | 179 // used by further computation. |
| 179 // | 180 // |
| 180 // Scratch registers: | 181 // Scratch registers: |
| 181 // | 182 // |
| 182 // elements - holds the elements of the receiver and its prototypes. | 183 // elements - holds the elements of the receiver and its prototypes. |
| 183 // | 184 // |
| 184 // scratch1 - used to hold elements length, bit fields, base addresses. | 185 // scratch1 - used to hold elements length, bit fields, base addresses. |
| 185 // | 186 // |
| 186 // scratch2 - used to hold maps, prototypes, and the loaded value. | 187 // scratch2 - used to hold maps, prototypes, and the loaded value. |
| 187 Label check_prototypes, check_next_prototype; | 188 Label check_prototypes, check_next_prototype; |
| 188 Label done, in_bounds, return_undefined; | 189 Label done, in_bounds, absent; |
| 189 | 190 |
| 190 __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); | 191 __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
| 191 __ AssertFastElements(elements); | 192 __ AssertFastElements(elements); |
| 192 | 193 |
| 193 // Check that the key (index) is within bounds. | 194 // Check that the key (index) is within bounds. |
| 194 __ lw(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset)); | 195 __ lw(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset)); |
| 195 __ Branch(&in_bounds, lo, key, Operand(scratch1)); | 196 __ Branch(&in_bounds, lo, key, Operand(scratch1)); |
| 196 // Out-of-bounds. Check the prototype chain to see if we can just return | 197 // Out-of-bounds. Check the prototype chain to see if we can just return |
| 197 // 'undefined'. | 198 // 'undefined'. |
| 198 // Negative keys can't take the fast OOB path. | 199 // Negative keys can't take the fast OOB path. |
| 199 __ Branch(slow, lt, key, Operand(zero_reg)); | 200 __ Branch(slow, lt, key, Operand(zero_reg)); |
| 200 __ bind(&check_prototypes); | 201 __ bind(&check_prototypes); |
| 201 __ lw(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 202 __ lw(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
| 202 __ bind(&check_next_prototype); | 203 __ bind(&check_next_prototype); |
| 203 __ lw(scratch2, FieldMemOperand(scratch2, Map::kPrototypeOffset)); | 204 __ lw(scratch2, FieldMemOperand(scratch2, Map::kPrototypeOffset)); |
| 204 // scratch2: current prototype | 205 // scratch2: current prototype |
| 205 __ LoadRoot(at, Heap::kNullValueRootIndex); | 206 __ LoadRoot(at, Heap::kNullValueRootIndex); |
| 206 __ Branch(&return_undefined, eq, scratch2, Operand(at)); | 207 __ Branch(&absent, eq, scratch2, Operand(at)); |
| 207 __ lw(elements, FieldMemOperand(scratch2, JSObject::kElementsOffset)); | 208 __ lw(elements, FieldMemOperand(scratch2, JSObject::kElementsOffset)); |
| 208 __ lw(scratch2, FieldMemOperand(scratch2, HeapObject::kMapOffset)); | 209 __ lw(scratch2, FieldMemOperand(scratch2, HeapObject::kMapOffset)); |
| 209 // elements: elements of current prototype | 210 // elements: elements of current prototype |
| 210 // scratch2: map of current prototype | 211 // scratch2: map of current prototype |
| 211 __ lbu(scratch1, FieldMemOperand(scratch2, Map::kInstanceTypeOffset)); | 212 __ lbu(scratch1, FieldMemOperand(scratch2, Map::kInstanceTypeOffset)); |
| 212 __ Branch(slow, lo, scratch1, Operand(JS_OBJECT_TYPE)); | 213 __ Branch(slow, lo, scratch1, Operand(JS_OBJECT_TYPE)); |
| 213 __ lbu(scratch1, FieldMemOperand(scratch2, Map::kBitFieldOffset)); | 214 __ lbu(scratch1, FieldMemOperand(scratch2, Map::kBitFieldOffset)); |
| 214 __ And(at, scratch1, Operand((1 << Map::kIsAccessCheckNeeded) | | 215 __ And(at, scratch1, Operand((1 << Map::kIsAccessCheckNeeded) | |
| 215 (1 << Map::kHasIndexedInterceptor))); | 216 (1 << Map::kHasIndexedInterceptor))); |
| 216 __ Branch(slow, ne, at, Operand(zero_reg)); | 217 __ Branch(slow, ne, at, Operand(zero_reg)); |
| 217 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex); | 218 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex); |
| 218 __ Branch(slow, ne, elements, Operand(at)); | 219 __ Branch(slow, ne, elements, Operand(at)); |
| 219 __ Branch(&check_next_prototype); | 220 __ Branch(&check_next_prototype); |
| 220 | 221 |
| 221 __ bind(&return_undefined); | 222 __ bind(&absent); |
| 222 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); | 223 if (is_strong(language_mode)) { |
| 223 __ Branch(&done); | 224 // Strong mode accesses must throw in this case, so call the runtime. |
| 225 __ Branch(slow); |
| 226 } else { |
| 227 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); |
| 228 __ Branch(&done); |
| 229 } |
| 224 | 230 |
| 225 __ bind(&in_bounds); | 231 __ bind(&in_bounds); |
| 226 // Fast case: Do the load. | 232 // Fast case: Do the load. |
| 227 __ Addu(scratch1, elements, | 233 __ Addu(scratch1, elements, |
| 228 Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 234 Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 229 // The key is a smi. | 235 // The key is a smi. |
| 230 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); | 236 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); |
| 231 __ sll(at, key, kPointerSizeLog2 - kSmiTagSize); | 237 __ sll(at, key, kPointerSizeLog2 - kSmiTagSize); |
| 232 __ addu(at, at, scratch1); | 238 __ addu(at, at, scratch1); |
| 233 __ lw(scratch2, MemOperand(at)); | 239 __ lw(scratch2, MemOperand(at)); |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 278 Label slow; | 284 Label slow; |
| 279 | 285 |
| 280 __ lw(dictionary, FieldMemOperand(LoadDescriptor::ReceiverRegister(), | 286 __ lw(dictionary, FieldMemOperand(LoadDescriptor::ReceiverRegister(), |
| 281 JSObject::kPropertiesOffset)); | 287 JSObject::kPropertiesOffset)); |
| 282 GenerateDictionaryLoad(masm, &slow, dictionary, | 288 GenerateDictionaryLoad(masm, &slow, dictionary, |
| 283 LoadDescriptor::NameRegister(), v0, a3, t0); | 289 LoadDescriptor::NameRegister(), v0, a3, t0); |
| 284 __ Ret(); | 290 __ Ret(); |
| 285 | 291 |
| 286 // Dictionary load failed, go slow (but don't miss). | 292 // Dictionary load failed, go slow (but don't miss). |
| 287 __ bind(&slow); | 293 __ bind(&slow); |
| 288 GenerateRuntimeGetProperty(masm); | 294 GenerateSlow(masm); |
| 289 } | 295 } |
| 290 | 296 |
| 291 | 297 |
| 292 // A register that isn't one of the parameters to the load ic. | 298 // A register that isn't one of the parameters to the load ic. |
| 293 static const Register LoadIC_TempRegister() { return a3; } | 299 static const Register LoadIC_TempRegister() { return a3; } |
| 294 | 300 |
| 295 | 301 |
| 296 static void LoadIC_PushArgs(MacroAssembler* masm) { | 302 static void LoadIC_PushArgs(MacroAssembler* masm) { |
| 297 Register receiver = LoadDescriptor::ReceiverRegister(); | 303 Register receiver = LoadDescriptor::ReceiverRegister(); |
| 298 Register name = LoadDescriptor::NameRegister(); | 304 Register name = LoadDescriptor::NameRegister(); |
| (...skipping 14 matching lines...) Expand all Loading... |
| 313 | 319 |
| 314 LoadIC_PushArgs(masm); | 320 LoadIC_PushArgs(masm); |
| 315 | 321 |
| 316 // Perform tail call to the entry. | 322 // Perform tail call to the entry. |
| 317 ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss), isolate); | 323 ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss), isolate); |
| 318 int arg_count = 4; | 324 int arg_count = 4; |
| 319 __ TailCallExternalReference(ref, arg_count, 1); | 325 __ TailCallExternalReference(ref, arg_count, 1); |
| 320 } | 326 } |
| 321 | 327 |
| 322 | 328 |
| 323 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { | 329 void LoadIC::GenerateSlow(MacroAssembler* masm) { |
| 324 // The return address is in ra. | 330 // The return address is in ra. |
| 325 | 331 |
| 326 __ mov(LoadIC_TempRegister(), LoadDescriptor::ReceiverRegister()); | 332 __ mov(LoadIC_TempRegister(), LoadDescriptor::ReceiverRegister()); |
| 327 __ Push(LoadIC_TempRegister(), LoadDescriptor::NameRegister()); | 333 __ Push(LoadIC_TempRegister(), LoadDescriptor::NameRegister()); |
| 328 | 334 |
| 329 __ TailCallRuntime(Runtime::kGetProperty, 2, 1); | 335 ExternalReference ref = |
| 336 ExternalReference(IC_Utility(kLoadIC_Slow), masm->isolate()); |
| 337 int arg_count = 2; |
| 338 __ TailCallExternalReference(ref, arg_count, 1); |
| 330 } | 339 } |
| 331 | 340 |
| 332 | 341 |
| 333 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { | 342 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { |
| 334 // The return address is in ra. | 343 // The return address is in ra. |
| 335 Isolate* isolate = masm->isolate(); | 344 Isolate* isolate = masm->isolate(); |
| 336 | 345 |
| 337 DCHECK(!AreAliased(t0, t1, LoadWithVectorDescriptor::SlotRegister(), | 346 DCHECK(!AreAliased(t0, t1, LoadWithVectorDescriptor::SlotRegister(), |
| 338 LoadWithVectorDescriptor::VectorRegister())); | 347 LoadWithVectorDescriptor::VectorRegister())); |
| 339 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, t0, t1); | 348 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, t0, t1); |
| 340 | 349 |
| 341 LoadIC_PushArgs(masm); | 350 LoadIC_PushArgs(masm); |
| 342 | 351 |
| 343 // Perform tail call to the entry. | 352 // Perform tail call to the entry. |
| 344 ExternalReference ref = | 353 ExternalReference ref = |
| 345 ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate); | 354 ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate); |
| 346 | 355 |
| 347 int arg_count = 4; | 356 int arg_count = 4; |
| 348 __ TailCallExternalReference(ref, arg_count, 1); | 357 __ TailCallExternalReference(ref, arg_count, 1); |
| 349 } | 358 } |
| 350 | 359 |
| 351 | 360 |
| 352 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { | 361 void KeyedLoadIC::GenerateSlow(MacroAssembler* masm) { |
| 353 // The return address is in ra. | 362 // The return address is in ra. |
| 354 | 363 |
| 355 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister()); | 364 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister()); |
| 356 | 365 |
| 357 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); | 366 ExternalReference ref = |
| 367 ExternalReference(IC_Utility(kKeyedLoadIC_Slow), masm->isolate()); |
| 368 int arg_count = 2; |
| 369 __ TailCallExternalReference(ref, arg_count, 1); |
| 358 } | 370 } |
| 359 | 371 |
| 360 | 372 |
| 361 void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) { | 373 void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm, |
| 374 LanguageMode language_mode) { |
| 362 // The return address is in ra. | 375 // The return address is in ra. |
| 363 Label slow, check_name, index_smi, index_name, property_array_property; | 376 Label slow, check_name, index_smi, index_name, property_array_property; |
| 364 Label probe_dictionary, check_number_dictionary; | 377 Label probe_dictionary, check_number_dictionary; |
| 365 | 378 |
| 366 Register key = LoadDescriptor::NameRegister(); | 379 Register key = LoadDescriptor::NameRegister(); |
| 367 Register receiver = LoadDescriptor::ReceiverRegister(); | 380 Register receiver = LoadDescriptor::ReceiverRegister(); |
| 368 DCHECK(key.is(a2)); | 381 DCHECK(key.is(a2)); |
| 369 DCHECK(receiver.is(a1)); | 382 DCHECK(receiver.is(a1)); |
| 370 | 383 |
| 371 Isolate* isolate = masm->isolate(); | 384 Isolate* isolate = masm->isolate(); |
| 372 | 385 |
| 373 // Check that the key is a smi. | 386 // Check that the key is a smi. |
| 374 __ JumpIfNotSmi(key, &check_name); | 387 __ JumpIfNotSmi(key, &check_name); |
| 375 __ bind(&index_smi); | 388 __ bind(&index_smi); |
| 376 // Now the key is known to be a smi. This place is also jumped to from below | 389 // Now the key is known to be a smi. This place is also jumped to from below |
| 377 // where a numeric string is converted to a smi. | 390 // where a numeric string is converted to a smi. |
| 378 | 391 |
| 379 GenerateKeyedLoadReceiverCheck(masm, receiver, a0, a3, | 392 GenerateKeyedLoadReceiverCheck(masm, receiver, a0, a3, |
| 380 Map::kHasIndexedInterceptor, &slow); | 393 Map::kHasIndexedInterceptor, &slow); |
| 381 | 394 |
| 382 // Check the receiver's map to see if it has fast elements. | 395 // Check the receiver's map to see if it has fast elements. |
| 383 __ CheckFastElements(a0, a3, &check_number_dictionary); | 396 __ CheckFastElements(a0, a3, &check_number_dictionary); |
| 384 | 397 |
| 385 GenerateFastArrayLoad(masm, receiver, key, a0, a3, t0, v0, &slow); | 398 GenerateFastArrayLoad(masm, receiver, key, a0, a3, t0, v0, &slow, |
| 399 language_mode); |
| 386 __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, t0, a3); | 400 __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, t0, a3); |
| 387 __ Ret(); | 401 __ Ret(); |
| 388 | 402 |
| 389 __ bind(&check_number_dictionary); | 403 __ bind(&check_number_dictionary); |
| 390 __ lw(t0, FieldMemOperand(receiver, JSObject::kElementsOffset)); | 404 __ lw(t0, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
| 391 __ lw(a3, FieldMemOperand(t0, JSObject::kMapOffset)); | 405 __ lw(a3, FieldMemOperand(t0, JSObject::kMapOffset)); |
| 392 | 406 |
| 393 // Check whether the elements is a number dictionary. | 407 // Check whether the elements is a number dictionary. |
| 394 // a3: elements map | 408 // a3: elements map |
| 395 // t0: elements | 409 // t0: elements |
| 396 __ LoadRoot(at, Heap::kHashTableMapRootIndex); | 410 __ LoadRoot(at, Heap::kHashTableMapRootIndex); |
| 397 __ Branch(&slow, ne, a3, Operand(at)); | 411 __ Branch(&slow, ne, a3, Operand(at)); |
| 398 __ sra(a0, key, kSmiTagSize); | 412 __ sra(a0, key, kSmiTagSize); |
| 399 __ LoadFromNumberDictionary(&slow, t0, key, v0, a0, a3, t1); | 413 __ LoadFromNumberDictionary(&slow, t0, key, v0, a0, a3, t1); |
| 400 __ Ret(); | 414 __ Ret(); |
| 401 | 415 |
| 402 // Slow case, key and receiver still in a2 and a1. | 416 // Slow case, key and receiver still in a2 and a1. |
| 403 __ bind(&slow); | 417 __ bind(&slow); |
| 404 __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(), 1, t0, | 418 __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(), 1, t0, |
| 405 a3); | 419 a3); |
| 406 GenerateRuntimeGetProperty(masm); | 420 GenerateSlow(masm); |
| 407 | 421 |
| 408 __ bind(&check_name); | 422 __ bind(&check_name); |
| 409 GenerateKeyNameCheck(masm, key, a0, a3, &index_name, &slow); | 423 GenerateKeyNameCheck(masm, key, a0, a3, &index_name, &slow); |
| 410 | 424 |
| 411 GenerateKeyedLoadReceiverCheck(masm, receiver, a0, a3, | 425 GenerateKeyedLoadReceiverCheck(masm, receiver, a0, a3, |
| 412 Map::kHasNamedInterceptor, &slow); | 426 Map::kHasNamedInterceptor, &slow); |
| 413 | 427 |
| 414 | 428 |
| 415 // If the receiver is a fast-case object, check the stub cache. Otherwise | 429 // If the receiver is a fast-case object, check the stub cache. Otherwise |
| 416 // probe the dictionary. | 430 // probe the dictionary. |
| (...skipping 450 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 867 patcher.ChangeBranchCondition(ne); | 881 patcher.ChangeBranchCondition(ne); |
| 868 } else { | 882 } else { |
| 869 DCHECK(Assembler::IsBne(branch_instr)); | 883 DCHECK(Assembler::IsBne(branch_instr)); |
| 870 patcher.ChangeBranchCondition(eq); | 884 patcher.ChangeBranchCondition(eq); |
| 871 } | 885 } |
| 872 } | 886 } |
| 873 } // namespace internal | 887 } // namespace internal |
| 874 } // namespace v8 | 888 } // namespace v8 |
| 875 | 889 |
| 876 #endif // V8_TARGET_ARCH_MIPS | 890 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |