OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 | 5 |
6 #include "src/v8.h" | 6 #include "src/v8.h" |
7 | 7 |
8 #if V8_TARGET_ARCH_MIPS | 8 #if V8_TARGET_ARCH_MIPS |
9 | 9 |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
156 DCHECK(JS_OBJECT_TYPE > JS_VALUE_TYPE); | 156 DCHECK(JS_OBJECT_TYPE > JS_VALUE_TYPE); |
157 __ lbu(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset)); | 157 __ lbu(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset)); |
158 __ Branch(slow, lt, scratch, Operand(JS_OBJECT_TYPE)); | 158 __ Branch(slow, lt, scratch, Operand(JS_OBJECT_TYPE)); |
159 } | 159 } |
160 | 160 |
161 | 161 |
162 // Loads an indexed element from a fast case array. | 162 // Loads an indexed element from a fast case array. |
163 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver, | 163 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver, |
164 Register key, Register elements, | 164 Register key, Register elements, |
165 Register scratch1, Register scratch2, | 165 Register scratch1, Register scratch2, |
166 Register result, Label* slow) { | 166 Register result, Label* slow, |
| 167 LanguageMode language_mode) { |
167 // Register use: | 168 // Register use: |
168 // | 169 // |
169 // receiver - holds the receiver on entry. | 170 // receiver - holds the receiver on entry. |
170 // Unchanged unless 'result' is the same register. | 171 // Unchanged unless 'result' is the same register. |
171 // | 172 // |
172 // key - holds the smi key on entry. | 173 // key - holds the smi key on entry. |
173 // Unchanged unless 'result' is the same register. | 174 // Unchanged unless 'result' is the same register. |
174 // | 175 // |
175 // result - holds the result on exit if the load succeeded. | 176 // result - holds the result on exit if the load succeeded. |
176 // Allowed to be the the same as 'receiver' or 'key'. | 177 // Allowed to be the the same as 'receiver' or 'key'. |
177 // Unchanged on bailout so 'receiver' and 'key' can be safely | 178 // Unchanged on bailout so 'receiver' and 'key' can be safely |
178 // used by further computation. | 179 // used by further computation. |
179 // | 180 // |
180 // Scratch registers: | 181 // Scratch registers: |
181 // | 182 // |
182 // elements - holds the elements of the receiver and its prototypes. | 183 // elements - holds the elements of the receiver and its prototypes. |
183 // | 184 // |
184 // scratch1 - used to hold elements length, bit fields, base addresses. | 185 // scratch1 - used to hold elements length, bit fields, base addresses. |
185 // | 186 // |
186 // scratch2 - used to hold maps, prototypes, and the loaded value. | 187 // scratch2 - used to hold maps, prototypes, and the loaded value. |
187 Label check_prototypes, check_next_prototype; | 188 Label check_prototypes, check_next_prototype; |
188 Label done, in_bounds, return_undefined; | 189 Label done, in_bounds, absent; |
189 | 190 |
190 __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); | 191 __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
191 __ AssertFastElements(elements); | 192 __ AssertFastElements(elements); |
192 | 193 |
193 // Check that the key (index) is within bounds. | 194 // Check that the key (index) is within bounds. |
194 __ lw(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset)); | 195 __ lw(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset)); |
195 __ Branch(&in_bounds, lo, key, Operand(scratch1)); | 196 __ Branch(&in_bounds, lo, key, Operand(scratch1)); |
196 // Out-of-bounds. Check the prototype chain to see if we can just return | 197 // Out-of-bounds. Check the prototype chain to see if we can just return |
197 // 'undefined'. | 198 // 'undefined'. |
198 // Negative keys can't take the fast OOB path. | 199 // Negative keys can't take the fast OOB path. |
199 __ Branch(slow, lt, key, Operand(zero_reg)); | 200 __ Branch(slow, lt, key, Operand(zero_reg)); |
200 __ bind(&check_prototypes); | 201 __ bind(&check_prototypes); |
201 __ lw(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 202 __ lw(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
202 __ bind(&check_next_prototype); | 203 __ bind(&check_next_prototype); |
203 __ lw(scratch2, FieldMemOperand(scratch2, Map::kPrototypeOffset)); | 204 __ lw(scratch2, FieldMemOperand(scratch2, Map::kPrototypeOffset)); |
204 // scratch2: current prototype | 205 // scratch2: current prototype |
205 __ LoadRoot(at, Heap::kNullValueRootIndex); | 206 __ LoadRoot(at, Heap::kNullValueRootIndex); |
206 __ Branch(&return_undefined, eq, scratch2, Operand(at)); | 207 __ Branch(&absent, eq, scratch2, Operand(at)); |
207 __ lw(elements, FieldMemOperand(scratch2, JSObject::kElementsOffset)); | 208 __ lw(elements, FieldMemOperand(scratch2, JSObject::kElementsOffset)); |
208 __ lw(scratch2, FieldMemOperand(scratch2, HeapObject::kMapOffset)); | 209 __ lw(scratch2, FieldMemOperand(scratch2, HeapObject::kMapOffset)); |
209 // elements: elements of current prototype | 210 // elements: elements of current prototype |
210 // scratch2: map of current prototype | 211 // scratch2: map of current prototype |
211 __ lbu(scratch1, FieldMemOperand(scratch2, Map::kInstanceTypeOffset)); | 212 __ lbu(scratch1, FieldMemOperand(scratch2, Map::kInstanceTypeOffset)); |
212 __ Branch(slow, lo, scratch1, Operand(JS_OBJECT_TYPE)); | 213 __ Branch(slow, lo, scratch1, Operand(JS_OBJECT_TYPE)); |
213 __ lbu(scratch1, FieldMemOperand(scratch2, Map::kBitFieldOffset)); | 214 __ lbu(scratch1, FieldMemOperand(scratch2, Map::kBitFieldOffset)); |
214 __ And(at, scratch1, Operand((1 << Map::kIsAccessCheckNeeded) | | 215 __ And(at, scratch1, Operand((1 << Map::kIsAccessCheckNeeded) | |
215 (1 << Map::kHasIndexedInterceptor))); | 216 (1 << Map::kHasIndexedInterceptor))); |
216 __ Branch(slow, ne, at, Operand(zero_reg)); | 217 __ Branch(slow, ne, at, Operand(zero_reg)); |
217 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex); | 218 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex); |
218 __ Branch(slow, ne, elements, Operand(at)); | 219 __ Branch(slow, ne, elements, Operand(at)); |
219 __ Branch(&check_next_prototype); | 220 __ Branch(&check_next_prototype); |
220 | 221 |
221 __ bind(&return_undefined); | 222 __ bind(&absent); |
222 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); | 223 if (is_strong(language_mode)) { |
223 __ Branch(&done); | 224 // Strong mode accesses must throw in this case, so call the runtime. |
| 225 __ Branch(slow); |
| 226 } else { |
| 227 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); |
| 228 __ Branch(&done); |
| 229 } |
224 | 230 |
225 __ bind(&in_bounds); | 231 __ bind(&in_bounds); |
226 // Fast case: Do the load. | 232 // Fast case: Do the load. |
227 __ Addu(scratch1, elements, | 233 __ Addu(scratch1, elements, |
228 Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 234 Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
229 // The key is a smi. | 235 // The key is a smi. |
230 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); | 236 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); |
231 __ sll(at, key, kPointerSizeLog2 - kSmiTagSize); | 237 __ sll(at, key, kPointerSizeLog2 - kSmiTagSize); |
232 __ addu(at, at, scratch1); | 238 __ addu(at, at, scratch1); |
233 __ lw(scratch2, MemOperand(at)); | 239 __ lw(scratch2, MemOperand(at)); |
(...skipping 29 matching lines...) Expand all Loading... |
263 // map: key map | 269 // map: key map |
264 __ lbu(hash, FieldMemOperand(map, Map::kInstanceTypeOffset)); | 270 __ lbu(hash, FieldMemOperand(map, Map::kInstanceTypeOffset)); |
265 STATIC_ASSERT(kInternalizedTag == 0); | 271 STATIC_ASSERT(kInternalizedTag == 0); |
266 __ And(at, hash, Operand(kIsNotInternalizedMask)); | 272 __ And(at, hash, Operand(kIsNotInternalizedMask)); |
267 __ Branch(not_unique, ne, at, Operand(zero_reg)); | 273 __ Branch(not_unique, ne, at, Operand(zero_reg)); |
268 | 274 |
269 __ bind(&unique); | 275 __ bind(&unique); |
270 } | 276 } |
271 | 277 |
272 | 278 |
273 void LoadIC::GenerateNormal(MacroAssembler* masm) { | 279 void LoadIC::GenerateNormal(MacroAssembler* masm, LanguageMode language_mode) { |
274 Register dictionary = a0; | 280 Register dictionary = a0; |
275 DCHECK(!dictionary.is(LoadDescriptor::ReceiverRegister())); | 281 DCHECK(!dictionary.is(LoadDescriptor::ReceiverRegister())); |
276 DCHECK(!dictionary.is(LoadDescriptor::NameRegister())); | 282 DCHECK(!dictionary.is(LoadDescriptor::NameRegister())); |
277 | 283 |
278 Label slow; | 284 Label slow; |
279 | 285 |
280 __ lw(dictionary, FieldMemOperand(LoadDescriptor::ReceiverRegister(), | 286 __ lw(dictionary, FieldMemOperand(LoadDescriptor::ReceiverRegister(), |
281 JSObject::kPropertiesOffset)); | 287 JSObject::kPropertiesOffset)); |
282 GenerateDictionaryLoad(masm, &slow, dictionary, | 288 GenerateDictionaryLoad(masm, &slow, dictionary, |
283 LoadDescriptor::NameRegister(), v0, a3, t0); | 289 LoadDescriptor::NameRegister(), v0, a3, t0); |
284 __ Ret(); | 290 __ Ret(); |
285 | 291 |
286 // Dictionary load failed, go slow (but don't miss). | 292 // Dictionary load failed, go slow (but don't miss). |
287 __ bind(&slow); | 293 __ bind(&slow); |
288 GenerateRuntimeGetProperty(masm); | 294 GenerateRuntimeGetProperty(masm, language_mode); |
289 } | 295 } |
290 | 296 |
291 | 297 |
292 // A register that isn't one of the parameters to the load ic. | 298 // A register that isn't one of the parameters to the load ic. |
293 static const Register LoadIC_TempRegister() { return a3; } | 299 static const Register LoadIC_TempRegister() { return a3; } |
294 | 300 |
295 | 301 |
296 static void LoadIC_PushArgs(MacroAssembler* masm) { | 302 static void LoadIC_PushArgs(MacroAssembler* masm) { |
297 Register receiver = LoadDescriptor::ReceiverRegister(); | 303 Register receiver = LoadDescriptor::ReceiverRegister(); |
298 Register name = LoadDescriptor::NameRegister(); | 304 Register name = LoadDescriptor::NameRegister(); |
(...skipping 14 matching lines...) Expand all Loading... |
313 | 319 |
314 LoadIC_PushArgs(masm); | 320 LoadIC_PushArgs(masm); |
315 | 321 |
316 // Perform tail call to the entry. | 322 // Perform tail call to the entry. |
317 ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss), isolate); | 323 ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss), isolate); |
318 int arg_count = 4; | 324 int arg_count = 4; |
319 __ TailCallExternalReference(ref, arg_count, 1); | 325 __ TailCallExternalReference(ref, arg_count, 1); |
320 } | 326 } |
321 | 327 |
322 | 328 |
323 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { | 329 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm, |
| 330 LanguageMode language_mode) { |
324 // The return address is in ra. | 331 // The return address is in ra. |
325 | 332 |
326 __ mov(LoadIC_TempRegister(), LoadDescriptor::ReceiverRegister()); | 333 __ mov(LoadIC_TempRegister(), LoadDescriptor::ReceiverRegister()); |
327 __ Push(LoadIC_TempRegister(), LoadDescriptor::NameRegister()); | 334 __ Push(LoadIC_TempRegister(), LoadDescriptor::NameRegister()); |
328 | 335 |
329 __ TailCallRuntime(Runtime::kGetProperty, 2, 1); | 336 // Do tail-call to runtime routine. |
| 337 __ TailCallRuntime(is_strong(language_mode) ? Runtime::kGetPropertyStrong |
| 338 : Runtime::kGetProperty, |
| 339 2, 1); |
330 } | 340 } |
331 | 341 |
332 | 342 |
333 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { | 343 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { |
334 // The return address is in ra. | 344 // The return address is in ra. |
335 Isolate* isolate = masm->isolate(); | 345 Isolate* isolate = masm->isolate(); |
336 | 346 |
337 DCHECK(!AreAliased(t0, t1, LoadWithVectorDescriptor::SlotRegister(), | 347 DCHECK(!AreAliased(t0, t1, LoadWithVectorDescriptor::SlotRegister(), |
338 LoadWithVectorDescriptor::VectorRegister())); | 348 LoadWithVectorDescriptor::VectorRegister())); |
339 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, t0, t1); | 349 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, t0, t1); |
340 | 350 |
341 LoadIC_PushArgs(masm); | 351 LoadIC_PushArgs(masm); |
342 | 352 |
343 // Perform tail call to the entry. | 353 // Perform tail call to the entry. |
344 ExternalReference ref = | 354 ExternalReference ref = |
345 ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate); | 355 ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate); |
346 | 356 |
347 int arg_count = 4; | 357 int arg_count = 4; |
348 __ TailCallExternalReference(ref, arg_count, 1); | 358 __ TailCallExternalReference(ref, arg_count, 1); |
349 } | 359 } |
350 | 360 |
351 | 361 |
352 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { | 362 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm, |
| 363 LanguageMode language_mode) { |
353 // The return address is in ra. | 364 // The return address is in ra. |
354 | 365 |
355 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister()); | 366 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister()); |
356 | 367 |
357 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); | 368 // Do tail-call to runtime routine. |
| 369 __ TailCallRuntime(is_strong(language_mode) ? Runtime::kKeyedGetPropertyStrong |
| 370 : Runtime::kKeyedGetProperty, |
| 371 2, 1); |
358 } | 372 } |
359 | 373 |
360 | 374 |
361 void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) { | 375 void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm, |
| 376 LanguageMode language_mode) { |
362 // The return address is in ra. | 377 // The return address is in ra. |
363 Label slow, check_name, index_smi, index_name, property_array_property; | 378 Label slow, check_name, index_smi, index_name, property_array_property; |
364 Label probe_dictionary, check_number_dictionary; | 379 Label probe_dictionary, check_number_dictionary; |
365 | 380 |
366 Register key = LoadDescriptor::NameRegister(); | 381 Register key = LoadDescriptor::NameRegister(); |
367 Register receiver = LoadDescriptor::ReceiverRegister(); | 382 Register receiver = LoadDescriptor::ReceiverRegister(); |
368 DCHECK(key.is(a2)); | 383 DCHECK(key.is(a2)); |
369 DCHECK(receiver.is(a1)); | 384 DCHECK(receiver.is(a1)); |
370 | 385 |
371 Isolate* isolate = masm->isolate(); | 386 Isolate* isolate = masm->isolate(); |
372 | 387 |
373 // Check that the key is a smi. | 388 // Check that the key is a smi. |
374 __ JumpIfNotSmi(key, &check_name); | 389 __ JumpIfNotSmi(key, &check_name); |
375 __ bind(&index_smi); | 390 __ bind(&index_smi); |
376 // Now the key is known to be a smi. This place is also jumped to from below | 391 // Now the key is known to be a smi. This place is also jumped to from below |
377 // where a numeric string is converted to a smi. | 392 // where a numeric string is converted to a smi. |
378 | 393 |
379 GenerateKeyedLoadReceiverCheck(masm, receiver, a0, a3, | 394 GenerateKeyedLoadReceiverCheck(masm, receiver, a0, a3, |
380 Map::kHasIndexedInterceptor, &slow); | 395 Map::kHasIndexedInterceptor, &slow); |
381 | 396 |
382 // Check the receiver's map to see if it has fast elements. | 397 // Check the receiver's map to see if it has fast elements. |
383 __ CheckFastElements(a0, a3, &check_number_dictionary); | 398 __ CheckFastElements(a0, a3, &check_number_dictionary); |
384 | 399 |
385 GenerateFastArrayLoad(masm, receiver, key, a0, a3, t0, v0, &slow); | 400 GenerateFastArrayLoad(masm, receiver, key, a0, a3, t0, v0, &slow, |
| 401 language_mode); |
386 __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, t0, a3); | 402 __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, t0, a3); |
387 __ Ret(); | 403 __ Ret(); |
388 | 404 |
389 __ bind(&check_number_dictionary); | 405 __ bind(&check_number_dictionary); |
390 __ lw(t0, FieldMemOperand(receiver, JSObject::kElementsOffset)); | 406 __ lw(t0, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
391 __ lw(a3, FieldMemOperand(t0, JSObject::kMapOffset)); | 407 __ lw(a3, FieldMemOperand(t0, JSObject::kMapOffset)); |
392 | 408 |
393 // Check whether the elements is a number dictionary. | 409 // Check whether the elements is a number dictionary. |
394 // a3: elements map | 410 // a3: elements map |
395 // t0: elements | 411 // t0: elements |
396 __ LoadRoot(at, Heap::kHashTableMapRootIndex); | 412 __ LoadRoot(at, Heap::kHashTableMapRootIndex); |
397 __ Branch(&slow, ne, a3, Operand(at)); | 413 __ Branch(&slow, ne, a3, Operand(at)); |
398 __ sra(a0, key, kSmiTagSize); | 414 __ sra(a0, key, kSmiTagSize); |
399 __ LoadFromNumberDictionary(&slow, t0, key, v0, a0, a3, t1); | 415 __ LoadFromNumberDictionary(&slow, t0, key, v0, a0, a3, t1); |
400 __ Ret(); | 416 __ Ret(); |
401 | 417 |
402 // Slow case, key and receiver still in a2 and a1. | 418 // Slow case, key and receiver still in a2 and a1. |
403 __ bind(&slow); | 419 __ bind(&slow); |
404 __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(), 1, t0, | 420 __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(), 1, t0, |
405 a3); | 421 a3); |
406 GenerateRuntimeGetProperty(masm); | 422 GenerateRuntimeGetProperty(masm, language_mode); |
407 | 423 |
408 __ bind(&check_name); | 424 __ bind(&check_name); |
409 GenerateKeyNameCheck(masm, key, a0, a3, &index_name, &slow); | 425 GenerateKeyNameCheck(masm, key, a0, a3, &index_name, &slow); |
410 | 426 |
411 GenerateKeyedLoadReceiverCheck(masm, receiver, a0, a3, | 427 GenerateKeyedLoadReceiverCheck(masm, receiver, a0, a3, |
412 Map::kHasNamedInterceptor, &slow); | 428 Map::kHasNamedInterceptor, &slow); |
413 | 429 |
414 | 430 |
415 // If the receiver is a fast-case object, check the stub cache. Otherwise | 431 // If the receiver is a fast-case object, check the stub cache. Otherwise |
416 // probe the dictionary. | 432 // probe the dictionary. |
(...skipping 464 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
881 patcher.ChangeBranchCondition(ne); | 897 patcher.ChangeBranchCondition(ne); |
882 } else { | 898 } else { |
883 DCHECK(Assembler::IsBne(branch_instr)); | 899 DCHECK(Assembler::IsBne(branch_instr)); |
884 patcher.ChangeBranchCondition(eq); | 900 patcher.ChangeBranchCondition(eq); |
885 } | 901 } |
886 } | 902 } |
887 } // namespace internal | 903 } // namespace internal |
888 } // namespace v8 | 904 } // namespace v8 |
889 | 905 |
890 #endif // V8_TARGET_ARCH_MIPS | 906 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |