Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(155)

Side by Side Diff: src/ic/arm64/ic-arm64.cc

Issue 1189153002: Revert of [strong] Implement strong mode restrictions on property access (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/ic/arm64/handler-compiler-arm64.cc ('k') | src/ic/handler-compiler.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_ARM64 7 #if V8_TARGET_ARCH_ARM64
8 8
9 #include "src/codegen.h" 9 #include "src/codegen.h"
10 #include "src/ic/ic.h" 10 #include "src/ic/ic.h"
(...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after
160 // 160 //
161 // elements - holds the elements of the receiver and its prototypes. Clobbered. 161 // elements - holds the elements of the receiver and its prototypes. Clobbered.
162 // 162 //
163 // result - holds the result on exit if the load succeeded. 163 // result - holds the result on exit if the load succeeded.
164 // Allowed to be the the same as 'receiver' or 'key'. 164 // Allowed to be the the same as 'receiver' or 'key'.
165 // Unchanged on bailout so 'receiver' and 'key' can be safely 165 // Unchanged on bailout so 'receiver' and 'key' can be safely
166 // used by further computation. 166 // used by further computation.
167 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver, 167 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver,
168 Register key, Register elements, 168 Register key, Register elements,
169 Register scratch1, Register scratch2, 169 Register scratch1, Register scratch2,
170 Register result, Label* slow, 170 Register result, Label* slow) {
171 LanguageMode language_mode) {
172 DCHECK(!AreAliased(receiver, key, elements, scratch1, scratch2)); 171 DCHECK(!AreAliased(receiver, key, elements, scratch1, scratch2));
173 172
174 Label check_prototypes, check_next_prototype; 173 Label check_prototypes, check_next_prototype;
175 Label done, in_bounds, absent; 174 Label done, in_bounds, return_undefined;
176 175
177 // Check for fast array. 176 // Check for fast array.
178 __ Ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); 177 __ Ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
179 __ AssertFastElements(elements); 178 __ AssertFastElements(elements);
180 179
181 // Check that the key (index) is within bounds. 180 // Check that the key (index) is within bounds.
182 __ Ldr(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset)); 181 __ Ldr(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset));
183 __ Cmp(key, scratch1); 182 __ Cmp(key, scratch1);
184 __ B(lo, &in_bounds); 183 __ B(lo, &in_bounds);
185 184
186 // Out of bounds. Check the prototype chain to see if we can just return 185 // Out of bounds. Check the prototype chain to see if we can just return
187 // 'undefined'. 186 // 'undefined'.
188 __ Cmp(key, Operand(Smi::FromInt(0))); 187 __ Cmp(key, Operand(Smi::FromInt(0)));
189 __ B(lt, slow); // Negative keys can't take the fast OOB path. 188 __ B(lt, slow); // Negative keys can't take the fast OOB path.
190 __ Bind(&check_prototypes); 189 __ Bind(&check_prototypes);
191 __ Ldr(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset)); 190 __ Ldr(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
192 __ Bind(&check_next_prototype); 191 __ Bind(&check_next_prototype);
193 __ Ldr(scratch2, FieldMemOperand(scratch2, Map::kPrototypeOffset)); 192 __ Ldr(scratch2, FieldMemOperand(scratch2, Map::kPrototypeOffset));
194 // scratch2: current prototype 193 // scratch2: current prototype
195 __ JumpIfRoot(scratch2, Heap::kNullValueRootIndex, &absent); 194 __ JumpIfRoot(scratch2, Heap::kNullValueRootIndex, &return_undefined);
196 __ Ldr(elements, FieldMemOperand(scratch2, JSObject::kElementsOffset)); 195 __ Ldr(elements, FieldMemOperand(scratch2, JSObject::kElementsOffset));
197 __ Ldr(scratch2, FieldMemOperand(scratch2, HeapObject::kMapOffset)); 196 __ Ldr(scratch2, FieldMemOperand(scratch2, HeapObject::kMapOffset));
198 // elements: elements of current prototype 197 // elements: elements of current prototype
199 // scratch2: map of current prototype 198 // scratch2: map of current prototype
200 __ CompareInstanceType(scratch2, scratch1, JS_OBJECT_TYPE); 199 __ CompareInstanceType(scratch2, scratch1, JS_OBJECT_TYPE);
201 __ B(lo, slow); 200 __ B(lo, slow);
202 __ Ldrb(scratch1, FieldMemOperand(scratch2, Map::kBitFieldOffset)); 201 __ Ldrb(scratch1, FieldMemOperand(scratch2, Map::kBitFieldOffset));
203 __ Tbnz(scratch1, Map::kIsAccessCheckNeeded, slow); 202 __ Tbnz(scratch1, Map::kIsAccessCheckNeeded, slow);
204 __ Tbnz(scratch1, Map::kHasIndexedInterceptor, slow); 203 __ Tbnz(scratch1, Map::kHasIndexedInterceptor, slow);
205 __ JumpIfNotRoot(elements, Heap::kEmptyFixedArrayRootIndex, slow); 204 __ JumpIfNotRoot(elements, Heap::kEmptyFixedArrayRootIndex, slow);
206 __ B(&check_next_prototype); 205 __ B(&check_next_prototype);
207 206
208 __ Bind(&absent); 207 __ Bind(&return_undefined);
209 if (is_strong(language_mode)) { 208 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
210 // Strong mode accesses must throw in this case, so call the runtime. 209 __ B(&done);
211 __ B(slow);
212 } else {
213 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
214 __ B(&done);
215 }
216 210
217 __ Bind(&in_bounds); 211 __ Bind(&in_bounds);
218 // Fast case: Do the load. 212 // Fast case: Do the load.
219 __ Add(scratch1, elements, FixedArray::kHeaderSize - kHeapObjectTag); 213 __ Add(scratch1, elements, FixedArray::kHeaderSize - kHeapObjectTag);
220 __ SmiUntag(scratch2, key); 214 __ SmiUntag(scratch2, key);
221 __ Ldr(scratch2, MemOperand(scratch1, scratch2, LSL, kPointerSizeLog2)); 215 __ Ldr(scratch2, MemOperand(scratch1, scratch2, LSL, kPointerSizeLog2));
222 216
223 // In case the loaded value is the_hole we have to check the prototype chain. 217 // In case the loaded value is the_hole we have to check the prototype chain.
224 __ JumpIfRoot(scratch2, Heap::kTheHoleValueRootIndex, &check_prototypes); 218 __ JumpIfRoot(scratch2, Heap::kTheHoleValueRootIndex, &check_prototypes);
225 219
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
271 Label slow; 265 Label slow;
272 266
273 __ Ldr(dictionary, FieldMemOperand(LoadDescriptor::ReceiverRegister(), 267 __ Ldr(dictionary, FieldMemOperand(LoadDescriptor::ReceiverRegister(),
274 JSObject::kPropertiesOffset)); 268 JSObject::kPropertiesOffset));
275 GenerateDictionaryLoad(masm, &slow, dictionary, 269 GenerateDictionaryLoad(masm, &slow, dictionary,
276 LoadDescriptor::NameRegister(), x0, x3, x4); 270 LoadDescriptor::NameRegister(), x0, x3, x4);
277 __ Ret(); 271 __ Ret();
278 272
279 // Dictionary load failed, go slow (but don't miss). 273 // Dictionary load failed, go slow (but don't miss).
280 __ Bind(&slow); 274 __ Bind(&slow);
281 GenerateSlow(masm); 275 GenerateRuntimeGetProperty(masm);
282 } 276 }
283 277
284 278
285 void LoadIC::GenerateMiss(MacroAssembler* masm) { 279 void LoadIC::GenerateMiss(MacroAssembler* masm) {
286 // The return address is in lr. 280 // The return address is in lr.
287 Isolate* isolate = masm->isolate(); 281 Isolate* isolate = masm->isolate();
288 ASM_LOCATION("LoadIC::GenerateMiss"); 282 ASM_LOCATION("LoadIC::GenerateMiss");
289 283
290 DCHECK(!AreAliased(x4, x5, LoadWithVectorDescriptor::SlotRegister(), 284 DCHECK(!AreAliased(x4, x5, LoadWithVectorDescriptor::SlotRegister(),
291 LoadWithVectorDescriptor::VectorRegister())); 285 LoadWithVectorDescriptor::VectorRegister()));
292 __ IncrementCounter(isolate->counters()->load_miss(), 1, x4, x5); 286 __ IncrementCounter(isolate->counters()->load_miss(), 1, x4, x5);
293 287
294 // Perform tail call to the entry. 288 // Perform tail call to the entry.
295 __ Push(LoadWithVectorDescriptor::ReceiverRegister(), 289 __ Push(LoadWithVectorDescriptor::ReceiverRegister(),
296 LoadWithVectorDescriptor::NameRegister(), 290 LoadWithVectorDescriptor::NameRegister(),
297 LoadWithVectorDescriptor::SlotRegister(), 291 LoadWithVectorDescriptor::SlotRegister(),
298 LoadWithVectorDescriptor::VectorRegister()); 292 LoadWithVectorDescriptor::VectorRegister());
299 ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss), isolate); 293 ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss), isolate);
300 int arg_count = 4; 294 int arg_count = 4;
301 __ TailCallExternalReference(ref, arg_count, 1); 295 __ TailCallExternalReference(ref, arg_count, 1);
302 } 296 }
303 297
304 298
305 void LoadIC::GenerateSlow(MacroAssembler* masm) { 299 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
306 // The return address is in lr. 300 // The return address is in lr.
307 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister()); 301 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister());
308 302 __ TailCallRuntime(Runtime::kGetProperty, 2, 1);
309 // Perform tail call to the entry.
310 ExternalReference ref =
311 ExternalReference(IC_Utility(kLoadIC_Slow), masm->isolate());
312 int arg_count = 2;
313 __ TailCallExternalReference(ref, arg_count, 1);
314 } 303 }
315 304
316 305
317 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { 306 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
318 // The return address is in lr. 307 // The return address is in lr.
319 Isolate* isolate = masm->isolate(); 308 Isolate* isolate = masm->isolate();
320 309
321 DCHECK(!AreAliased(x10, x11, LoadWithVectorDescriptor::SlotRegister(), 310 DCHECK(!AreAliased(x10, x11, LoadWithVectorDescriptor::SlotRegister(),
322 LoadWithVectorDescriptor::VectorRegister())); 311 LoadWithVectorDescriptor::VectorRegister()));
323 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, x10, x11); 312 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, x10, x11);
324 313
325 __ Push(LoadWithVectorDescriptor::ReceiverRegister(), 314 __ Push(LoadWithVectorDescriptor::ReceiverRegister(),
326 LoadWithVectorDescriptor::NameRegister(), 315 LoadWithVectorDescriptor::NameRegister(),
327 LoadWithVectorDescriptor::SlotRegister(), 316 LoadWithVectorDescriptor::SlotRegister(),
328 LoadWithVectorDescriptor::VectorRegister()); 317 LoadWithVectorDescriptor::VectorRegister());
329 318
330 // Perform tail call to the entry. 319 // Perform tail call to the entry.
331 ExternalReference ref = 320 ExternalReference ref =
332 ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate); 321 ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate);
333 int arg_count = 4; 322 int arg_count = 4;
334 __ TailCallExternalReference(ref, arg_count, 1); 323 __ TailCallExternalReference(ref, arg_count, 1);
335 } 324 }
336 325
337 326
338 void KeyedLoadIC::GenerateSlow(MacroAssembler* masm) { 327 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
339 // The return address is in lr. 328 // The return address is in lr.
340 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister()); 329 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister());
341 330 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
342 // Perform tail call to the entry.
343 ExternalReference ref =
344 ExternalReference(IC_Utility(kKeyedLoadIC_Slow), masm->isolate());
345 int arg_count = 2;
346 __ TailCallExternalReference(ref, arg_count, 1);
347 } 331 }
348 332
349 333
350 static void GenerateKeyedLoadWithSmiKey(MacroAssembler* masm, Register key, 334 static void GenerateKeyedLoadWithSmiKey(MacroAssembler* masm, Register key,
351 Register receiver, Register scratch1, 335 Register receiver, Register scratch1,
352 Register scratch2, Register scratch3, 336 Register scratch2, Register scratch3,
353 Register scratch4, Register scratch5, 337 Register scratch4, Register scratch5,
354 Label* slow, 338 Label* slow) {
355 LanguageMode language_mode) {
356 DCHECK(!AreAliased(key, receiver, scratch1, scratch2, scratch3, scratch4, 339 DCHECK(!AreAliased(key, receiver, scratch1, scratch2, scratch3, scratch4,
357 scratch5)); 340 scratch5));
358 341
359 Isolate* isolate = masm->isolate(); 342 Isolate* isolate = masm->isolate();
360 Label check_number_dictionary; 343 Label check_number_dictionary;
361 // If we can load the value, it should be returned in x0. 344 // If we can load the value, it should be returned in x0.
362 Register result = x0; 345 Register result = x0;
363 346
364 GenerateKeyedLoadReceiverCheck(masm, receiver, scratch1, scratch2, 347 GenerateKeyedLoadReceiverCheck(masm, receiver, scratch1, scratch2,
365 Map::kHasIndexedInterceptor, slow); 348 Map::kHasIndexedInterceptor, slow);
366 349
367 // Check the receiver's map to see if it has fast elements. 350 // Check the receiver's map to see if it has fast elements.
368 __ CheckFastElements(scratch1, scratch2, &check_number_dictionary); 351 __ CheckFastElements(scratch1, scratch2, &check_number_dictionary);
369 352
370 GenerateFastArrayLoad(masm, receiver, key, scratch3, scratch2, scratch1, 353 GenerateFastArrayLoad(masm, receiver, key, scratch3, scratch2, scratch1,
371 result, slow, language_mode); 354 result, slow);
372 __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, 355 __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1,
373 scratch1, scratch2); 356 scratch1, scratch2);
374 __ Ret(); 357 __ Ret();
375 358
376 __ Bind(&check_number_dictionary); 359 __ Bind(&check_number_dictionary);
377 __ Ldr(scratch3, FieldMemOperand(receiver, JSObject::kElementsOffset)); 360 __ Ldr(scratch3, FieldMemOperand(receiver, JSObject::kElementsOffset));
378 __ Ldr(scratch2, FieldMemOperand(scratch3, JSObject::kMapOffset)); 361 __ Ldr(scratch2, FieldMemOperand(scratch3, JSObject::kMapOffset));
379 362
380 // Check whether we have a number dictionary. 363 // Check whether we have a number dictionary.
381 __ JumpIfNotRoot(scratch2, Heap::kHashTableMapRootIndex, slow); 364 __ JumpIfNotRoot(scratch2, Heap::kHashTableMapRootIndex, slow);
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
432 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); 415 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
433 GenerateGlobalInstanceTypeCheck(masm, scratch1, slow); 416 GenerateGlobalInstanceTypeCheck(masm, scratch1, slow);
434 // Load the property. 417 // Load the property.
435 GenerateDictionaryLoad(masm, slow, scratch2, key, result, scratch1, scratch3); 418 GenerateDictionaryLoad(masm, slow, scratch2, key, result, scratch1, scratch3);
436 __ IncrementCounter(isolate->counters()->keyed_load_generic_symbol(), 1, 419 __ IncrementCounter(isolate->counters()->keyed_load_generic_symbol(), 1,
437 scratch1, scratch2); 420 scratch1, scratch2);
438 __ Ret(); 421 __ Ret();
439 } 422 }
440 423
441 424
442 void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm, 425 void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) {
443 LanguageMode language_mode) {
444 // The return address is in lr. 426 // The return address is in lr.
445 Label slow, check_name, index_smi, index_name; 427 Label slow, check_name, index_smi, index_name;
446 428
447 Register key = LoadDescriptor::NameRegister(); 429 Register key = LoadDescriptor::NameRegister();
448 Register receiver = LoadDescriptor::ReceiverRegister(); 430 Register receiver = LoadDescriptor::ReceiverRegister();
449 DCHECK(key.is(x2)); 431 DCHECK(key.is(x2));
450 DCHECK(receiver.is(x1)); 432 DCHECK(receiver.is(x1));
451 433
452 __ JumpIfNotSmi(key, &check_name); 434 __ JumpIfNotSmi(key, &check_name);
453 __ Bind(&index_smi); 435 __ Bind(&index_smi);
454 // Now the key is known to be a smi. This place is also jumped to from below 436 // Now the key is known to be a smi. This place is also jumped to from below
455 // where a numeric string is converted to a smi. 437 // where a numeric string is converted to a smi.
456 GenerateKeyedLoadWithSmiKey(masm, key, receiver, x7, x3, x4, x5, x6, &slow, 438 GenerateKeyedLoadWithSmiKey(masm, key, receiver, x7, x3, x4, x5, x6, &slow);
457 language_mode);
458 439
459 // Slow case. 440 // Slow case.
460 __ Bind(&slow); 441 __ Bind(&slow);
461 __ IncrementCounter(masm->isolate()->counters()->keyed_load_generic_slow(), 1, 442 __ IncrementCounter(masm->isolate()->counters()->keyed_load_generic_slow(), 1,
462 x4, x3); 443 x4, x3);
463 GenerateSlow(masm); 444 GenerateRuntimeGetProperty(masm);
464 445
465 __ Bind(&check_name); 446 __ Bind(&check_name);
466 GenerateKeyNameCheck(masm, key, x0, x3, &index_name, &slow); 447 GenerateKeyNameCheck(masm, key, x0, x3, &index_name, &slow);
467 448
468 GenerateKeyedLoadWithNameKey(masm, key, receiver, x4, x5, x6, x7, x3, &slow); 449 GenerateKeyedLoadWithNameKey(masm, key, receiver, x4, x5, x6, x7, x3, &slow);
469 450
470 __ Bind(&index_name); 451 __ Bind(&index_name);
471 __ IndexFromHash(x3, key); 452 __ IndexFromHash(x3, key);
472 // Now jump to the place where smi keys are handled. 453 // Now jump to the place where smi keys are handled.
473 __ B(&index_smi); 454 __ B(&index_smi);
(...skipping 393 matching lines...) Expand 10 before | Expand all | Expand 10 after
867 } else { 848 } else {
868 DCHECK(to_patch->Mask(TestBranchMask) == TBNZ); 849 DCHECK(to_patch->Mask(TestBranchMask) == TBNZ);
869 // This is JumpIfSmi(smi_reg, branch_imm). 850 // This is JumpIfSmi(smi_reg, branch_imm).
870 patcher.tbz(smi_reg, 0, branch_imm); 851 patcher.tbz(smi_reg, 0, branch_imm);
871 } 852 }
872 } 853 }
873 } // namespace internal 854 } // namespace internal
874 } // namespace v8 855 } // namespace v8
875 856
876 #endif // V8_TARGET_ARCH_ARM64 857 #endif // V8_TARGET_ARCH_ARM64
OLDNEW
« no previous file with comments | « src/ic/arm64/handler-compiler-arm64.cc ('k') | src/ic/handler-compiler.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698