Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(172)

Side by Side Diff: src/ic/arm64/ic-arm64.cc

Issue 1168093002: [strong] Implement strong mode restrictions on property access (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: fix arm64 port Created 5 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_ARM64 7 #if V8_TARGET_ARCH_ARM64
8 8
9 #include "src/codegen.h" 9 #include "src/codegen.h"
10 #include "src/ic/ic.h" 10 #include "src/ic/ic.h"
(...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after
160 // 160 //
161 // elements - holds the elements of the receiver and its prototypes. Clobbered. 161 // elements - holds the elements of the receiver and its prototypes. Clobbered.
162 // 162 //
163 // result - holds the result on exit if the load succeeded. 163 // result - holds the result on exit if the load succeeded.
164 // Allowed to be the the same as 'receiver' or 'key'. 164 // Allowed to be the the same as 'receiver' or 'key'.
165 // Unchanged on bailout so 'receiver' and 'key' can be safely 165 // Unchanged on bailout so 'receiver' and 'key' can be safely
166 // used by further computation. 166 // used by further computation.
167 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver, 167 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver,
168 Register key, Register elements, 168 Register key, Register elements,
169 Register scratch1, Register scratch2, 169 Register scratch1, Register scratch2,
170 Register result, Label* slow) { 170 Register result, Label* slow,
171 Strength strength) {
171 DCHECK(!AreAliased(receiver, key, elements, scratch1, scratch2)); 172 DCHECK(!AreAliased(receiver, key, elements, scratch1, scratch2));
172 173
173 Label check_prototypes, check_next_prototype; 174 Label check_prototypes, check_next_prototype;
174 Label done, in_bounds, return_undefined; 175 Label done, in_bounds, return_absent;
175 176
176 // Check for fast array. 177 // Check for fast array.
177 __ Ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); 178 __ Ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
178 __ AssertFastElements(elements); 179 __ AssertFastElements(elements);
179 180
180 // Check that the key (index) is within bounds. 181 // Check that the key (index) is within bounds.
181 __ Ldr(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset)); 182 __ Ldr(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset));
182 __ Cmp(key, scratch1); 183 __ Cmp(key, scratch1);
183 __ B(lo, &in_bounds); 184 __ B(lo, &in_bounds);
184 185
185 // Out of bounds. Check the prototype chain to see if we can just return 186 // Out of bounds. Check the prototype chain to see if we can just return
186 // 'undefined'. 187 // 'undefined'.
187 __ Cmp(key, Operand(Smi::FromInt(0))); 188 __ Cmp(key, Operand(Smi::FromInt(0)));
188 __ B(lt, slow); // Negative keys can't take the fast OOB path. 189 __ B(lt, slow); // Negative keys can't take the fast OOB path.
189 __ Bind(&check_prototypes); 190 __ Bind(&check_prototypes);
190 __ Ldr(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset)); 191 __ Ldr(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
191 __ Bind(&check_next_prototype); 192 __ Bind(&check_next_prototype);
192 __ Ldr(scratch2, FieldMemOperand(scratch2, Map::kPrototypeOffset)); 193 __ Ldr(scratch2, FieldMemOperand(scratch2, Map::kPrototypeOffset));
193 // scratch2: current prototype 194 // scratch2: current prototype
194 __ JumpIfRoot(scratch2, Heap::kNullValueRootIndex, &return_undefined); 195 __ JumpIfRoot(scratch2, Heap::kNullValueRootIndex, &return_absent);
195 __ Ldr(elements, FieldMemOperand(scratch2, JSObject::kElementsOffset)); 196 __ Ldr(elements, FieldMemOperand(scratch2, JSObject::kElementsOffset));
196 __ Ldr(scratch2, FieldMemOperand(scratch2, HeapObject::kMapOffset)); 197 __ Ldr(scratch2, FieldMemOperand(scratch2, HeapObject::kMapOffset));
197 // elements: elements of current prototype 198 // elements: elements of current prototype
198 // scratch2: map of current prototype 199 // scratch2: map of current prototype
199 __ CompareInstanceType(scratch2, scratch1, JS_OBJECT_TYPE); 200 __ CompareInstanceType(scratch2, scratch1, JS_OBJECT_TYPE);
200 __ B(lo, slow); 201 __ B(lo, slow);
201 __ Ldrb(scratch1, FieldMemOperand(scratch2, Map::kBitFieldOffset)); 202 __ Ldrb(scratch1, FieldMemOperand(scratch2, Map::kBitFieldOffset));
202 __ Tbnz(scratch1, Map::kIsAccessCheckNeeded, slow); 203 __ Tbnz(scratch1, Map::kIsAccessCheckNeeded, slow);
203 __ Tbnz(scratch1, Map::kHasIndexedInterceptor, slow); 204 __ Tbnz(scratch1, Map::kHasIndexedInterceptor, slow);
204 __ JumpIfNotRoot(elements, Heap::kEmptyFixedArrayRootIndex, slow); 205 __ JumpIfNotRoot(elements, Heap::kEmptyFixedArrayRootIndex, slow);
205 __ B(&check_next_prototype); 206 __ B(&check_next_prototype);
206 207
207 __ Bind(&return_undefined); 208 __ Bind(&return_absent);
208 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); 209 if (is_strong(strength)) {
209 __ B(&done); 210 // Strong mode accesses must throw in this case, so call the runtime.
211 __ B(slow);
212 } else {
213 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
214 __ B(&done);
215 }
210 216
211 __ Bind(&in_bounds); 217 __ Bind(&in_bounds);
212 // Fast case: Do the load. 218 // Fast case: Do the load.
213 __ Add(scratch1, elements, FixedArray::kHeaderSize - kHeapObjectTag); 219 __ Add(scratch1, elements, FixedArray::kHeaderSize - kHeapObjectTag);
214 __ SmiUntag(scratch2, key); 220 __ SmiUntag(scratch2, key);
215 __ Ldr(scratch2, MemOperand(scratch1, scratch2, LSL, kPointerSizeLog2)); 221 __ Ldr(scratch2, MemOperand(scratch1, scratch2, LSL, kPointerSizeLog2));
216 222
217 // In case the loaded value is the_hole we have to check the prototype chain. 223 // In case the loaded value is the_hole we have to check the prototype chain.
218 __ JumpIfRoot(scratch2, Heap::kTheHoleValueRootIndex, &check_prototypes); 224 __ JumpIfRoot(scratch2, Heap::kTheHoleValueRootIndex, &check_prototypes);
219 225
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
265 Label slow; 271 Label slow;
266 272
267 __ Ldr(dictionary, FieldMemOperand(LoadDescriptor::ReceiverRegister(), 273 __ Ldr(dictionary, FieldMemOperand(LoadDescriptor::ReceiverRegister(),
268 JSObject::kPropertiesOffset)); 274 JSObject::kPropertiesOffset));
269 GenerateDictionaryLoad(masm, &slow, dictionary, 275 GenerateDictionaryLoad(masm, &slow, dictionary,
270 LoadDescriptor::NameRegister(), x0, x3, x4); 276 LoadDescriptor::NameRegister(), x0, x3, x4);
271 __ Ret(); 277 __ Ret();
272 278
273 // Dictionary load failed, go slow (but don't miss). 279 // Dictionary load failed, go slow (but don't miss).
274 __ Bind(&slow); 280 __ Bind(&slow);
275 GenerateRuntimeGetProperty(masm); 281 GenerateSlow(masm);
276 } 282 }
277 283
278 284
279 void LoadIC::GenerateMiss(MacroAssembler* masm) { 285 void LoadIC::GenerateMiss(MacroAssembler* masm) {
280 // The return address is in lr. 286 // The return address is in lr.
281 Isolate* isolate = masm->isolate(); 287 Isolate* isolate = masm->isolate();
282 ASM_LOCATION("LoadIC::GenerateMiss"); 288 ASM_LOCATION("LoadIC::GenerateMiss");
283 289
284 DCHECK(!AreAliased(x4, x5, LoadWithVectorDescriptor::SlotRegister(), 290 DCHECK(!AreAliased(x4, x5, LoadWithVectorDescriptor::SlotRegister(),
285 LoadWithVectorDescriptor::VectorRegister())); 291 LoadWithVectorDescriptor::VectorRegister()));
286 __ IncrementCounter(isolate->counters()->load_miss(), 1, x4, x5); 292 __ IncrementCounter(isolate->counters()->load_miss(), 1, x4, x5);
287 293
288 // Perform tail call to the entry. 294 // Perform tail call to the entry.
289 __ Push(LoadWithVectorDescriptor::ReceiverRegister(), 295 __ Push(LoadWithVectorDescriptor::ReceiverRegister(),
290 LoadWithVectorDescriptor::NameRegister(), 296 LoadWithVectorDescriptor::NameRegister(),
291 LoadWithVectorDescriptor::SlotRegister(), 297 LoadWithVectorDescriptor::SlotRegister(),
292 LoadWithVectorDescriptor::VectorRegister()); 298 LoadWithVectorDescriptor::VectorRegister());
293 ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss), isolate); 299 ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss), isolate);
294 int arg_count = 4; 300 int arg_count = 4;
295 __ TailCallExternalReference(ref, arg_count, 1); 301 __ TailCallExternalReference(ref, arg_count, 1);
296 } 302 }
297 303
298 304
299 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { 305 void LoadIC::GenerateSlow(MacroAssembler* masm) {
300 // The return address is in lr. 306 // The return address is in lr.
301 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister()); 307 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister());
302 __ TailCallRuntime(Runtime::kGetProperty, 2, 1); 308
309 // Perform tail call to the entry.
310 ExternalReference ref =
311 ExternalReference(IC_Utility(kLoadIC_Slow), masm->isolate());
312 int arg_count = 2;
313 __ TailCallExternalReference(ref, arg_count, 1);
303 } 314 }
304 315
305 316
306 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { 317 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
307 // The return address is in lr. 318 // The return address is in lr.
308 Isolate* isolate = masm->isolate(); 319 Isolate* isolate = masm->isolate();
309 320
310 DCHECK(!AreAliased(x10, x11, LoadWithVectorDescriptor::SlotRegister(), 321 DCHECK(!AreAliased(x10, x11, LoadWithVectorDescriptor::SlotRegister(),
311 LoadWithVectorDescriptor::VectorRegister())); 322 LoadWithVectorDescriptor::VectorRegister()));
312 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, x10, x11); 323 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, x10, x11);
313 324
314 __ Push(LoadWithVectorDescriptor::ReceiverRegister(), 325 __ Push(LoadWithVectorDescriptor::ReceiverRegister(),
315 LoadWithVectorDescriptor::NameRegister(), 326 LoadWithVectorDescriptor::NameRegister(),
316 LoadWithVectorDescriptor::SlotRegister(), 327 LoadWithVectorDescriptor::SlotRegister(),
317 LoadWithVectorDescriptor::VectorRegister()); 328 LoadWithVectorDescriptor::VectorRegister());
318 329
319 // Perform tail call to the entry. 330 // Perform tail call to the entry.
320 ExternalReference ref = 331 ExternalReference ref =
321 ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate); 332 ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate);
322 int arg_count = 4; 333 int arg_count = 4;
323 __ TailCallExternalReference(ref, arg_count, 1); 334 __ TailCallExternalReference(ref, arg_count, 1);
324 } 335 }
325 336
326 337
327 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { 338 void KeyedLoadIC::GenerateSlow(MacroAssembler* masm) {
328 // The return address is in lr. 339 // The return address is in lr.
329 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister()); 340 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister());
330 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); 341
342 // Perform tail call to the entry.
343 ExternalReference ref =
344 ExternalReference(IC_Utility(kKeyedLoadIC_Slow), masm->isolate());
345 int arg_count = 2;
346 __ TailCallExternalReference(ref, arg_count, 1);
331 } 347 }
332 348
333 349
334 static void GenerateKeyedLoadWithSmiKey(MacroAssembler* masm, Register key, 350 static void GenerateKeyedLoadWithSmiKey(MacroAssembler* masm, Register key,
335 Register receiver, Register scratch1, 351 Register receiver, Register scratch1,
336 Register scratch2, Register scratch3, 352 Register scratch2, Register scratch3,
337 Register scratch4, Register scratch5, 353 Register scratch4, Register scratch5,
338 Label* slow) { 354 Label* slow, Strength strength) {
339 DCHECK(!AreAliased(key, receiver, scratch1, scratch2, scratch3, scratch4, 355 DCHECK(!AreAliased(key, receiver, scratch1, scratch2, scratch3, scratch4,
340 scratch5)); 356 scratch5));
341 357
342 Isolate* isolate = masm->isolate(); 358 Isolate* isolate = masm->isolate();
343 Label check_number_dictionary; 359 Label check_number_dictionary;
344 // If we can load the value, it should be returned in x0. 360 // If we can load the value, it should be returned in x0.
345 Register result = x0; 361 Register result = x0;
346 362
347 GenerateKeyedLoadReceiverCheck(masm, receiver, scratch1, scratch2, 363 GenerateKeyedLoadReceiverCheck(masm, receiver, scratch1, scratch2,
348 Map::kHasIndexedInterceptor, slow); 364 Map::kHasIndexedInterceptor, slow);
349 365
350 // Check the receiver's map to see if it has fast elements. 366 // Check the receiver's map to see if it has fast elements.
351 __ CheckFastElements(scratch1, scratch2, &check_number_dictionary); 367 __ CheckFastElements(scratch1, scratch2, &check_number_dictionary);
352 368
353 GenerateFastArrayLoad(masm, receiver, key, scratch3, scratch2, scratch1, 369 GenerateFastArrayLoad(masm, receiver, key, scratch3, scratch2, scratch1,
354 result, slow); 370 result, slow, strength);
355 __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, 371 __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1,
356 scratch1, scratch2); 372 scratch1, scratch2);
357 __ Ret(); 373 __ Ret();
358 374
359 __ Bind(&check_number_dictionary); 375 __ Bind(&check_number_dictionary);
360 __ Ldr(scratch3, FieldMemOperand(receiver, JSObject::kElementsOffset)); 376 __ Ldr(scratch3, FieldMemOperand(receiver, JSObject::kElementsOffset));
361 __ Ldr(scratch2, FieldMemOperand(scratch3, JSObject::kMapOffset)); 377 __ Ldr(scratch2, FieldMemOperand(scratch3, JSObject::kMapOffset));
362 378
363 // Check whether we have a number dictionary. 379 // Check whether we have a number dictionary.
364 __ JumpIfNotRoot(scratch2, Heap::kHashTableMapRootIndex, slow); 380 __ JumpIfNotRoot(scratch2, Heap::kHashTableMapRootIndex, slow);
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
415 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); 431 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
416 GenerateGlobalInstanceTypeCheck(masm, scratch1, slow); 432 GenerateGlobalInstanceTypeCheck(masm, scratch1, slow);
417 // Load the property. 433 // Load the property.
418 GenerateDictionaryLoad(masm, slow, scratch2, key, result, scratch1, scratch3); 434 GenerateDictionaryLoad(masm, slow, scratch2, key, result, scratch1, scratch3);
419 __ IncrementCounter(isolate->counters()->keyed_load_generic_symbol(), 1, 435 __ IncrementCounter(isolate->counters()->keyed_load_generic_symbol(), 1,
420 scratch1, scratch2); 436 scratch1, scratch2);
421 __ Ret(); 437 __ Ret();
422 } 438 }
423 439
424 440
425 void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) { 441 void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm, Strength strength) {
426 // The return address is in lr. 442 // The return address is in lr.
427 Label slow, check_name, index_smi, index_name; 443 Label slow, check_name, index_smi, index_name;
428 444
429 Register key = LoadDescriptor::NameRegister(); 445 Register key = LoadDescriptor::NameRegister();
430 Register receiver = LoadDescriptor::ReceiverRegister(); 446 Register receiver = LoadDescriptor::ReceiverRegister();
431 DCHECK(key.is(x2)); 447 DCHECK(key.is(x2));
432 DCHECK(receiver.is(x1)); 448 DCHECK(receiver.is(x1));
433 449
434 __ JumpIfNotSmi(key, &check_name); 450 __ JumpIfNotSmi(key, &check_name);
435 __ Bind(&index_smi); 451 __ Bind(&index_smi);
436 // Now the key is known to be a smi. This place is also jumped to from below 452 // Now the key is known to be a smi. This place is also jumped to from below
437 // where a numeric string is converted to a smi. 453 // where a numeric string is converted to a smi.
438 GenerateKeyedLoadWithSmiKey(masm, key, receiver, x7, x3, x4, x5, x6, &slow); 454 GenerateKeyedLoadWithSmiKey(masm, key, receiver, x7, x3, x4, x5, x6, &slow,
455 strength);
439 456
440 // Slow case. 457 // Slow case.
441 __ Bind(&slow); 458 __ Bind(&slow);
442 __ IncrementCounter(masm->isolate()->counters()->keyed_load_generic_slow(), 1, 459 __ IncrementCounter(masm->isolate()->counters()->keyed_load_generic_slow(), 1,
443 x4, x3); 460 x4, x3);
444 GenerateRuntimeGetProperty(masm); 461 GenerateSlow(masm);
445 462
446 __ Bind(&check_name); 463 __ Bind(&check_name);
447 GenerateKeyNameCheck(masm, key, x0, x3, &index_name, &slow); 464 GenerateKeyNameCheck(masm, key, x0, x3, &index_name, &slow);
448 465
449 GenerateKeyedLoadWithNameKey(masm, key, receiver, x4, x5, x6, x7, x3, &slow); 466 GenerateKeyedLoadWithNameKey(masm, key, receiver, x4, x5, x6, x7, x3, &slow);
450 467
451 __ Bind(&index_name); 468 __ Bind(&index_name);
452 __ IndexFromHash(x3, key); 469 __ IndexFromHash(x3, key);
453 // Now jump to the place where smi keys are handled. 470 // Now jump to the place where smi keys are handled.
454 __ B(&index_smi); 471 __ B(&index_smi);
(...skipping 393 matching lines...) Expand 10 before | Expand all | Expand 10 after
848 } else { 865 } else {
849 DCHECK(to_patch->Mask(TestBranchMask) == TBNZ); 866 DCHECK(to_patch->Mask(TestBranchMask) == TBNZ);
850 // This is JumpIfSmi(smi_reg, branch_imm). 867 // This is JumpIfSmi(smi_reg, branch_imm).
851 patcher.tbz(smi_reg, 0, branch_imm); 868 patcher.tbz(smi_reg, 0, branch_imm);
852 } 869 }
853 } 870 }
854 } // namespace internal 871 } // namespace internal
855 } // namespace v8 872 } // namespace v8
856 873
857 #endif // V8_TARGET_ARCH_ARM64 874 #endif // V8_TARGET_ARCH_ARM64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698