| OLD | NEW |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/globals.h" | 5 #include "vm/globals.h" |
| 6 #if defined(TARGET_ARCH_MIPS) | 6 #if defined(TARGET_ARCH_MIPS) |
| 7 | 7 |
| 8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
| 9 #include "vm/code_generator.h" | 9 #include "vm/code_generator.h" |
| 10 #include "vm/compiler.h" | 10 #include "vm/compiler.h" |
| (...skipping 23 matching lines...) Expand all Loading... |
| 34 // S5 : address of the runtime function to call. | 34 // S5 : address of the runtime function to call. |
| 35 // S4 : number of arguments to the call. | 35 // S4 : number of arguments to the call. |
| 36 void StubCode::GenerateCallToRuntimeStub(Assembler* assembler) { | 36 void StubCode::GenerateCallToRuntimeStub(Assembler* assembler) { |
| 37 const intptr_t isolate_offset = NativeArguments::isolate_offset(); | 37 const intptr_t isolate_offset = NativeArguments::isolate_offset(); |
| 38 const intptr_t argc_tag_offset = NativeArguments::argc_tag_offset(); | 38 const intptr_t argc_tag_offset = NativeArguments::argc_tag_offset(); |
| 39 const intptr_t argv_offset = NativeArguments::argv_offset(); | 39 const intptr_t argv_offset = NativeArguments::argv_offset(); |
| 40 const intptr_t retval_offset = NativeArguments::retval_offset(); | 40 const intptr_t retval_offset = NativeArguments::retval_offset(); |
| 41 const intptr_t exitframe_last_param_slot_from_fp = 2; | 41 const intptr_t exitframe_last_param_slot_from_fp = 2; |
| 42 | 42 |
| 43 __ SetPrologueOffset(); | 43 __ SetPrologueOffset(); |
| 44 __ TraceSimMsg("CallToRuntimeStub"); | 44 __ Comment("CallToRuntimeStub"); |
| 45 __ addiu(SP, SP, Immediate(-3 * kWordSize)); | 45 __ addiu(SP, SP, Immediate(-3 * kWordSize)); |
| 46 __ sw(ZR, Address(SP, 2 * kWordSize)); // Push 0 for the PC marker | 46 __ sw(ZR, Address(SP, 2 * kWordSize)); // Push 0 for the PC marker |
| 47 __ sw(RA, Address(SP, 1 * kWordSize)); | 47 __ sw(RA, Address(SP, 1 * kWordSize)); |
| 48 __ sw(FP, Address(SP, 0 * kWordSize)); | 48 __ sw(FP, Address(SP, 0 * kWordSize)); |
| 49 __ mov(FP, SP); | 49 __ mov(FP, SP); |
| 50 | 50 |
| 51 COMPILE_ASSERT((kAbiPreservedCpuRegs & (1 << S6)) != 0); | 51 COMPILE_ASSERT((kAbiPreservedCpuRegs & (1 << S6)) != 0); |
| 52 __ LoadIsolate(S6); | 52 __ LoadIsolate(S6); |
| 53 | 53 |
| 54 // Save exit frame information to enable stack walking as we are about | 54 // Save exit frame information to enable stack walking as we are about |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 94 | 94 |
| 95 // Call runtime or redirection via simulator. | 95 // Call runtime or redirection via simulator. |
| 96 // We defensively always jalr through T9 because it is sometimes required by | 96 // We defensively always jalr through T9 because it is sometimes required by |
| 97 // the MIPS ABI. | 97 // the MIPS ABI. |
| 98 __ mov(T9, S5); | 98 __ mov(T9, S5); |
| 99 __ jalr(T9); | 99 __ jalr(T9); |
| 100 | 100 |
| 101 ASSERT(retval_offset == 3 * kWordSize); | 101 ASSERT(retval_offset == 3 * kWordSize); |
| 102 // Retval is next to 1st argument. | 102 // Retval is next to 1st argument. |
| 103 __ delay_slot()->addiu(A3, A2, Immediate(kWordSize)); | 103 __ delay_slot()->addiu(A3, A2, Immediate(kWordSize)); |
| 104 __ TraceSimMsg("CallToRuntimeStub return"); | 104 __ Comment("CallToRuntimeStub return"); |
| 105 | 105 |
| 106 // Mark that the isolate is executing Dart code. | 106 // Mark that the isolate is executing Dart code. |
| 107 __ LoadImmediate(A2, VMTag::kDartTagId); | 107 __ LoadImmediate(A2, VMTag::kDartTagId); |
| 108 __ sw(A2, Address(S6, Isolate::vm_tag_offset())); | 108 __ sw(A2, Address(S6, Isolate::vm_tag_offset())); |
| 109 | 109 |
| 110 // Reset exit frame information in Isolate structure. | 110 // Reset exit frame information in Isolate structure. |
| 111 __ sw(ZR, Address(S6, Isolate::top_exit_frame_info_offset())); | 111 __ sw(ZR, Address(S6, Isolate::top_exit_frame_info_offset())); |
| 112 | 112 |
| 113 __ mov(SP, FP); | 113 __ mov(SP, FP); |
| 114 __ lw(RA, Address(SP, 1 * kWordSize)); | 114 __ lw(RA, Address(SP, 1 * kWordSize)); |
| (...skipping 28 matching lines...) Expand all Loading... |
| 143 // T5 : address of the native function to call. | 143 // T5 : address of the native function to call. |
| 144 // A2 : address of first argument in argument array. | 144 // A2 : address of first argument in argument array. |
| 145 // A1 : argc_tag including number of arguments and function kind. | 145 // A1 : argc_tag including number of arguments and function kind. |
| 146 void StubCode::GenerateCallNativeCFunctionStub(Assembler* assembler) { | 146 void StubCode::GenerateCallNativeCFunctionStub(Assembler* assembler) { |
| 147 const intptr_t isolate_offset = NativeArguments::isolate_offset(); | 147 const intptr_t isolate_offset = NativeArguments::isolate_offset(); |
| 148 const intptr_t argc_tag_offset = NativeArguments::argc_tag_offset(); | 148 const intptr_t argc_tag_offset = NativeArguments::argc_tag_offset(); |
| 149 const intptr_t argv_offset = NativeArguments::argv_offset(); | 149 const intptr_t argv_offset = NativeArguments::argv_offset(); |
| 150 const intptr_t retval_offset = NativeArguments::retval_offset(); | 150 const intptr_t retval_offset = NativeArguments::retval_offset(); |
| 151 | 151 |
| 152 __ SetPrologueOffset(); | 152 __ SetPrologueOffset(); |
| 153 __ TraceSimMsg("CallNativeCFunctionStub"); | 153 __ Comment("CallNativeCFunctionStub"); |
| 154 __ addiu(SP, SP, Immediate(-3 * kWordSize)); | 154 __ addiu(SP, SP, Immediate(-3 * kWordSize)); |
| 155 __ sw(ZR, Address(SP, 2 * kWordSize)); // Push 0 for the PC marker | 155 __ sw(ZR, Address(SP, 2 * kWordSize)); // Push 0 for the PC marker |
| 156 __ sw(RA, Address(SP, 1 * kWordSize)); | 156 __ sw(RA, Address(SP, 1 * kWordSize)); |
| 157 __ sw(FP, Address(SP, 0 * kWordSize)); | 157 __ sw(FP, Address(SP, 0 * kWordSize)); |
| 158 __ mov(FP, SP); | 158 __ mov(FP, SP); |
| 159 | 159 |
| 160 COMPILE_ASSERT((kAbiPreservedCpuRegs & (1 << S6)) != 0); | 160 COMPILE_ASSERT((kAbiPreservedCpuRegs & (1 << S6)) != 0); |
| 161 __ LoadIsolate(S6); | 161 __ LoadIsolate(S6); |
| 162 | 162 |
| 163 // Save exit frame information to enable stack walking as we are about | 163 // Save exit frame information to enable stack walking as we are about |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 211 // Call native wrapper function or redirection via simulator. | 211 // Call native wrapper function or redirection via simulator. |
| 212 #if defined(USING_SIMULATOR) | 212 #if defined(USING_SIMULATOR) |
| 213 uword entry = reinterpret_cast<uword>(NativeEntry::NativeCallWrapper); | 213 uword entry = reinterpret_cast<uword>(NativeEntry::NativeCallWrapper); |
| 214 entry = Simulator::RedirectExternalReference( | 214 entry = Simulator::RedirectExternalReference( |
| 215 entry, Simulator::kNativeCall, NativeEntry::kNumCallWrapperArguments); | 215 entry, Simulator::kNativeCall, NativeEntry::kNumCallWrapperArguments); |
| 216 __ LoadImmediate(T9, entry); | 216 __ LoadImmediate(T9, entry); |
| 217 __ jalr(T9); | 217 __ jalr(T9); |
| 218 #else | 218 #else |
| 219 __ BranchLink(&NativeEntry::NativeCallWrapperLabel()); | 219 __ BranchLink(&NativeEntry::NativeCallWrapperLabel()); |
| 220 #endif | 220 #endif |
| 221 __ TraceSimMsg("CallNativeCFunctionStub return"); | 221 __ Comment("CallNativeCFunctionStub return"); |
| 222 | 222 |
| 223 // Mark that the isolate is executing Dart code. | 223 // Mark that the isolate is executing Dart code. |
| 224 __ LoadImmediate(A2, VMTag::kDartTagId); | 224 __ LoadImmediate(A2, VMTag::kDartTagId); |
| 225 __ sw(A2, Address(S6, Isolate::vm_tag_offset())); | 225 __ sw(A2, Address(S6, Isolate::vm_tag_offset())); |
| 226 | 226 |
| 227 // Reset exit frame information in Isolate structure. | 227 // Reset exit frame information in Isolate structure. |
| 228 __ sw(ZR, Address(S6, Isolate::top_exit_frame_info_offset())); | 228 __ sw(ZR, Address(S6, Isolate::top_exit_frame_info_offset())); |
| 229 | 229 |
| 230 __ mov(SP, FP); | 230 __ mov(SP, FP); |
| 231 __ lw(RA, Address(SP, 1 * kWordSize)); | 231 __ lw(RA, Address(SP, 1 * kWordSize)); |
| 232 __ lw(FP, Address(SP, 0 * kWordSize)); | 232 __ lw(FP, Address(SP, 0 * kWordSize)); |
| 233 __ Ret(); | 233 __ Ret(); |
| 234 __ delay_slot()->addiu(SP, SP, Immediate(3 * kWordSize)); | 234 __ delay_slot()->addiu(SP, SP, Immediate(3 * kWordSize)); |
| 235 } | 235 } |
| 236 | 236 |
| 237 | 237 |
| 238 // Input parameters: | 238 // Input parameters: |
| 239 // RA : return address. | 239 // RA : return address. |
| 240 // SP : address of return value. | 240 // SP : address of return value. |
| 241 // T5 : address of the native function to call. | 241 // T5 : address of the native function to call. |
| 242 // A2 : address of first argument in argument array. | 242 // A2 : address of first argument in argument array. |
| 243 // A1 : argc_tag including number of arguments and function kind. | 243 // A1 : argc_tag including number of arguments and function kind. |
| 244 void StubCode::GenerateCallBootstrapCFunctionStub(Assembler* assembler) { | 244 void StubCode::GenerateCallBootstrapCFunctionStub(Assembler* assembler) { |
| 245 const intptr_t isolate_offset = NativeArguments::isolate_offset(); | 245 const intptr_t isolate_offset = NativeArguments::isolate_offset(); |
| 246 const intptr_t argc_tag_offset = NativeArguments::argc_tag_offset(); | 246 const intptr_t argc_tag_offset = NativeArguments::argc_tag_offset(); |
| 247 const intptr_t argv_offset = NativeArguments::argv_offset(); | 247 const intptr_t argv_offset = NativeArguments::argv_offset(); |
| 248 const intptr_t retval_offset = NativeArguments::retval_offset(); | 248 const intptr_t retval_offset = NativeArguments::retval_offset(); |
| 249 | 249 |
| 250 __ SetPrologueOffset(); | 250 __ SetPrologueOffset(); |
| 251 __ TraceSimMsg("CallNativeCFunctionStub"); | 251 __ Comment("CallNativeCFunctionStub"); |
| 252 __ addiu(SP, SP, Immediate(-3 * kWordSize)); | 252 __ addiu(SP, SP, Immediate(-3 * kWordSize)); |
| 253 __ sw(ZR, Address(SP, 2 * kWordSize)); // Push 0 for the PC marker | 253 __ sw(ZR, Address(SP, 2 * kWordSize)); // Push 0 for the PC marker |
| 254 __ sw(RA, Address(SP, 1 * kWordSize)); | 254 __ sw(RA, Address(SP, 1 * kWordSize)); |
| 255 __ sw(FP, Address(SP, 0 * kWordSize)); | 255 __ sw(FP, Address(SP, 0 * kWordSize)); |
| 256 __ mov(FP, SP); | 256 __ mov(FP, SP); |
| 257 | 257 |
| 258 COMPILE_ASSERT((kAbiPreservedCpuRegs & (1 << S6)) != 0); | 258 COMPILE_ASSERT((kAbiPreservedCpuRegs & (1 << S6)) != 0); |
| 259 __ LoadIsolate(S6); | 259 __ LoadIsolate(S6); |
| 260 | 260 |
| 261 // Save exit frame information to enable stack walking as we are about | 261 // Save exit frame information to enable stack walking as we are about |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 304 __ mov(A0, SP); // Pass the pointer to the NativeArguments. | 304 __ mov(A0, SP); // Pass the pointer to the NativeArguments. |
| 305 | 305 |
| 306 __ ReserveAlignedFrameSpace(kWordSize); // Just passing A0. | 306 __ ReserveAlignedFrameSpace(kWordSize); // Just passing A0. |
| 307 | 307 |
| 308 // Call native function or redirection via simulator. | 308 // Call native function or redirection via simulator. |
| 309 | 309 |
| 310 // We defensively always jalr through T9 because it is sometimes required by | 310 // We defensively always jalr through T9 because it is sometimes required by |
| 311 // the MIPS ABI. | 311 // the MIPS ABI. |
| 312 __ mov(T9, T5); | 312 __ mov(T9, T5); |
| 313 __ jalr(T9); | 313 __ jalr(T9); |
| 314 __ TraceSimMsg("CallNativeCFunctionStub return"); | 314 __ Comment("CallNativeCFunctionStub return"); |
| 315 | 315 |
| 316 // Mark that the isolate is executing Dart code. | 316 // Mark that the isolate is executing Dart code. |
| 317 __ LoadImmediate(A2, VMTag::kDartTagId); | 317 __ LoadImmediate(A2, VMTag::kDartTagId); |
| 318 __ sw(A2, Address(S6, Isolate::vm_tag_offset())); | 318 __ sw(A2, Address(S6, Isolate::vm_tag_offset())); |
| 319 | 319 |
| 320 // Reset exit frame information in Isolate structure. | 320 // Reset exit frame information in Isolate structure. |
| 321 __ sw(ZR, Address(S6, Isolate::top_exit_frame_info_offset())); | 321 __ sw(ZR, Address(S6, Isolate::top_exit_frame_info_offset())); |
| 322 | 322 |
| 323 __ mov(SP, FP); | 323 __ mov(SP, FP); |
| 324 __ lw(RA, Address(SP, 1 * kWordSize)); | 324 __ lw(RA, Address(SP, 1 * kWordSize)); |
| 325 __ lw(FP, Address(SP, 0 * kWordSize)); | 325 __ lw(FP, Address(SP, 0 * kWordSize)); |
| 326 __ Ret(); | 326 __ Ret(); |
| 327 __ delay_slot()->addiu(SP, SP, Immediate(3 * kWordSize)); | 327 __ delay_slot()->addiu(SP, SP, Immediate(3 * kWordSize)); |
| 328 } | 328 } |
| 329 | 329 |
| 330 | 330 |
| 331 // Input parameters: | 331 // Input parameters: |
| 332 // S4: arguments descriptor array. | 332 // S4: arguments descriptor array. |
| 333 void StubCode::GenerateCallStaticFunctionStub(Assembler* assembler) { | 333 void StubCode::GenerateCallStaticFunctionStub(Assembler* assembler) { |
| 334 __ TraceSimMsg("CallStaticFunctionStub"); | 334 __ Comment("CallStaticFunctionStub"); |
| 335 __ EnterStubFrame(); | 335 __ EnterStubFrame(); |
| 336 // Setup space on stack for return value and preserve arguments descriptor. | 336 // Setup space on stack for return value and preserve arguments descriptor. |
| 337 | 337 |
| 338 __ addiu(SP, SP, Immediate(-2 * kWordSize)); | 338 __ addiu(SP, SP, Immediate(-2 * kWordSize)); |
| 339 __ sw(S4, Address(SP, 1 * kWordSize)); | 339 __ sw(S4, Address(SP, 1 * kWordSize)); |
| 340 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); | 340 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); |
| 341 __ sw(TMP, Address(SP, 0 * kWordSize)); | 341 __ sw(TMP, Address(SP, 0 * kWordSize)); |
| 342 | 342 |
| 343 __ CallRuntime(kPatchStaticCallRuntimeEntry, 0); | 343 __ CallRuntime(kPatchStaticCallRuntimeEntry, 0); |
| 344 __ TraceSimMsg("CallStaticFunctionStub return"); | 344 __ Comment("CallStaticFunctionStub return"); |
| 345 | 345 |
| 346 // Get Code object result and restore arguments descriptor array. | 346 // Get Code object result and restore arguments descriptor array. |
| 347 __ lw(T0, Address(SP, 0 * kWordSize)); | 347 __ lw(T0, Address(SP, 0 * kWordSize)); |
| 348 __ lw(S4, Address(SP, 1 * kWordSize)); | 348 __ lw(S4, Address(SP, 1 * kWordSize)); |
| 349 __ addiu(SP, SP, Immediate(2 * kWordSize)); | 349 __ addiu(SP, SP, Immediate(2 * kWordSize)); |
| 350 | 350 |
| 351 __ lw(T0, FieldAddress(T0, Code::instructions_offset())); | 351 __ lw(T0, FieldAddress(T0, Code::instructions_offset())); |
| 352 __ AddImmediate(T0, Instructions::HeaderSize() - kHeapObjectTag); | 352 __ AddImmediate(T0, Instructions::HeaderSize() - kHeapObjectTag); |
| 353 | 353 |
| 354 // Remove the stub frame as we are about to jump to the dart function. | 354 // Remove the stub frame as we are about to jump to the dart function. |
| 355 __ LeaveStubFrameAndReturn(T0); | 355 __ LeaveStubFrameAndReturn(T0); |
| 356 } | 356 } |
| 357 | 357 |
| 358 | 358 |
| 359 // Called from a static call only when an invalid code has been entered | 359 // Called from a static call only when an invalid code has been entered |
| 360 // (invalid because its function was optimized or deoptimized). | 360 // (invalid because its function was optimized or deoptimized). |
| 361 // S4: arguments descriptor array. | 361 // S4: arguments descriptor array. |
| 362 void StubCode::GenerateFixCallersTargetStub(Assembler* assembler) { | 362 void StubCode::GenerateFixCallersTargetStub(Assembler* assembler) { |
| 363 // Create a stub frame as we are pushing some objects on the stack before | 363 // Create a stub frame as we are pushing some objects on the stack before |
| 364 // calling into the runtime. | 364 // calling into the runtime. |
| 365 __ TraceSimMsg("FixCallersTarget"); | 365 __ Comment("FixCallersTarget"); |
| 366 __ EnterStubFrame(); | 366 __ EnterStubFrame(); |
| 367 // Setup space on stack for return value and preserve arguments descriptor. | 367 // Setup space on stack for return value and preserve arguments descriptor. |
| 368 __ addiu(SP, SP, Immediate(-2 * kWordSize)); | 368 __ addiu(SP, SP, Immediate(-2 * kWordSize)); |
| 369 __ sw(S4, Address(SP, 1 * kWordSize)); | 369 __ sw(S4, Address(SP, 1 * kWordSize)); |
| 370 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); | 370 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); |
| 371 __ sw(TMP, Address(SP, 0 * kWordSize)); | 371 __ sw(TMP, Address(SP, 0 * kWordSize)); |
| 372 __ CallRuntime(kFixCallersTargetRuntimeEntry, 0); | 372 __ CallRuntime(kFixCallersTargetRuntimeEntry, 0); |
| 373 // Get Code object result and restore arguments descriptor array. | 373 // Get Code object result and restore arguments descriptor array. |
| 374 __ lw(T0, Address(SP, 0 * kWordSize)); | 374 __ lw(T0, Address(SP, 0 * kWordSize)); |
| 375 __ lw(S4, Address(SP, 1 * kWordSize)); | 375 __ lw(S4, Address(SP, 1 * kWordSize)); |
| 376 __ addiu(SP, SP, Immediate(2 * kWordSize)); | 376 __ addiu(SP, SP, Immediate(2 * kWordSize)); |
| 377 | 377 |
| 378 // Jump to the dart function. | 378 // Jump to the dart function. |
| 379 __ lw(T0, FieldAddress(T0, Code::instructions_offset())); | 379 __ lw(T0, FieldAddress(T0, Code::instructions_offset())); |
| 380 __ AddImmediate(T0, T0, Instructions::HeaderSize() - kHeapObjectTag); | 380 __ AddImmediate(T0, T0, Instructions::HeaderSize() - kHeapObjectTag); |
| 381 | 381 |
| 382 // Remove the stub frame. | 382 // Remove the stub frame. |
| 383 __ LeaveStubFrameAndReturn(T0); | 383 __ LeaveStubFrameAndReturn(T0); |
| 384 } | 384 } |
| 385 | 385 |
| 386 | 386 |
| 387 // Called from object allocate instruction when the allocation stub has been | 387 // Called from object allocate instruction when the allocation stub has been |
| 388 // disabled. | 388 // disabled. |
| 389 void StubCode::GenerateFixAllocationStubTargetStub(Assembler* assembler) { | 389 void StubCode::GenerateFixAllocationStubTargetStub(Assembler* assembler) { |
| 390 __ TraceSimMsg("FixAllocationStubTarget"); | 390 __ Comment("FixAllocationStubTarget"); |
| 391 __ EnterStubFrame(); | 391 __ EnterStubFrame(); |
| 392 // Setup space on stack for return value. | 392 // Setup space on stack for return value. |
| 393 __ addiu(SP, SP, Immediate(-1 * kWordSize)); | 393 __ addiu(SP, SP, Immediate(-1 * kWordSize)); |
| 394 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); | 394 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); |
| 395 __ sw(TMP, Address(SP, 0 * kWordSize)); | 395 __ sw(TMP, Address(SP, 0 * kWordSize)); |
| 396 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0); | 396 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0); |
| 397 // Get Code object result. | 397 // Get Code object result. |
| 398 __ lw(T0, Address(SP, 0 * kWordSize)); | 398 __ lw(T0, Address(SP, 0 * kWordSize)); |
| 399 __ addiu(SP, SP, Immediate(1 * kWordSize)); | 399 __ addiu(SP, SP, Immediate(1 * kWordSize)); |
| 400 | 400 |
| 401 // Jump to the dart function. | 401 // Jump to the dart function. |
| 402 __ lw(T0, FieldAddress(T0, Code::instructions_offset())); | 402 __ lw(T0, FieldAddress(T0, Code::instructions_offset())); |
| 403 __ AddImmediate(T0, T0, Instructions::HeaderSize() - kHeapObjectTag); | 403 __ AddImmediate(T0, T0, Instructions::HeaderSize() - kHeapObjectTag); |
| 404 | 404 |
| 405 // Remove the stub frame. | 405 // Remove the stub frame. |
| 406 __ LeaveStubFrameAndReturn(T0); | 406 __ LeaveStubFrameAndReturn(T0); |
| 407 } | 407 } |
| 408 | 408 |
| 409 | 409 |
| 410 // Called from array allocate instruction when the allocation stub has been | 410 // Called from array allocate instruction when the allocation stub has been |
| 411 // disabled. | 411 // disabled. |
| 412 // A0: element type (preserved). | 412 // A0: element type (preserved). |
| 413 // A1: length (preserved). | 413 // A1: length (preserved). |
| 414 void StubCode::GenerateFixAllocateArrayStubTargetStub(Assembler* assembler) { | 414 void StubCode::GenerateFixAllocateArrayStubTargetStub(Assembler* assembler) { |
| 415 __ TraceSimMsg("FixAllocationStubTarget"); | 415 __ Comment("FixAllocationStubTarget"); |
| 416 __ EnterStubFrame(); | 416 __ EnterStubFrame(); |
| 417 // Setup space on stack for return value. | 417 // Setup space on stack for return value. |
| 418 __ addiu(SP, SP, Immediate(-3 * kWordSize)); | 418 __ addiu(SP, SP, Immediate(-3 * kWordSize)); |
| 419 __ sw(A0, Address(SP, 2 * kWordSize)); | 419 __ sw(A0, Address(SP, 2 * kWordSize)); |
| 420 __ sw(A1, Address(SP, 1 * kWordSize)); | 420 __ sw(A1, Address(SP, 1 * kWordSize)); |
| 421 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); | 421 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); |
| 422 __ sw(TMP, Address(SP, 0 * kWordSize)); | 422 __ sw(TMP, Address(SP, 0 * kWordSize)); |
| 423 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0); | 423 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0); |
| 424 // Get Code object result. | 424 // Get Code object result. |
| 425 __ lw(T0, Address(SP, 0 * kWordSize)); | 425 __ lw(T0, Address(SP, 0 * kWordSize)); |
| 426 __ lw(A1, Address(SP, 1 * kWordSize)); | 426 __ lw(A1, Address(SP, 1 * kWordSize)); |
| 427 __ lw(A0, Address(SP, 2 * kWordSize)); | 427 __ lw(A0, Address(SP, 2 * kWordSize)); |
| 428 __ addiu(SP, SP, Immediate(3 * kWordSize)); | 428 __ addiu(SP, SP, Immediate(3 * kWordSize)); |
| 429 | 429 |
| 430 // Jump to the dart function. | 430 // Jump to the dart function. |
| 431 __ lw(T0, FieldAddress(T0, Code::instructions_offset())); | 431 __ lw(T0, FieldAddress(T0, Code::instructions_offset())); |
| 432 __ AddImmediate(T0, T0, Instructions::HeaderSize() - kHeapObjectTag); | 432 __ AddImmediate(T0, T0, Instructions::HeaderSize() - kHeapObjectTag); |
| 433 | 433 |
| 434 // Remove the stub frame. | 434 // Remove the stub frame. |
| 435 __ LeaveStubFrameAndReturn(T0); | 435 __ LeaveStubFrameAndReturn(T0); |
| 436 } | 436 } |
| 437 | 437 |
| 438 | 438 |
| 439 // Input parameters: | 439 // Input parameters: |
| 440 // A1: Smi-tagged argument count, may be zero. | 440 // A1: Smi-tagged argument count, may be zero. |
| 441 // FP[kParamEndSlotFromFp + 1]: Last argument. | 441 // FP[kParamEndSlotFromFp + 1]: Last argument. |
| 442 static void PushArgumentsArray(Assembler* assembler) { | 442 static void PushArgumentsArray(Assembler* assembler) { |
| 443 StubCode* stub_code = Isolate::Current()->stub_code(); | 443 StubCode* stub_code = Isolate::Current()->stub_code(); |
| 444 __ TraceSimMsg("PushArgumentsArray"); | 444 __ Comment("PushArgumentsArray"); |
| 445 // Allocate array to store arguments of caller. | 445 // Allocate array to store arguments of caller. |
| 446 __ LoadImmediate(A0, reinterpret_cast<intptr_t>(Object::null())); | 446 __ LoadImmediate(A0, reinterpret_cast<intptr_t>(Object::null())); |
| 447 // A0: Null element type for raw Array. | 447 // A0: Null element type for raw Array. |
| 448 // A1: Smi-tagged argument count, may be zero. | 448 // A1: Smi-tagged argument count, may be zero. |
| 449 const Code& array_stub = Code::Handle(stub_code->GetAllocateArrayStub()); | 449 const Code& array_stub = Code::Handle(stub_code->GetAllocateArrayStub()); |
| 450 const ExternalLabel array_label(array_stub.EntryPoint()); | 450 const ExternalLabel array_label(array_stub.EntryPoint()); |
| 451 __ BranchLink(&array_label); | 451 __ BranchLink(&array_label); |
| 452 __ TraceSimMsg("PushArgumentsArray return"); | 452 __ Comment("PushArgumentsArray return"); |
| 453 // V0: newly allocated array. | 453 // V0: newly allocated array. |
| 454 // A1: Smi-tagged argument count, may be zero (was preserved by the stub). | 454 // A1: Smi-tagged argument count, may be zero (was preserved by the stub). |
| 455 __ Push(V0); // Array is in V0 and on top of stack. | 455 __ Push(V0); // Array is in V0 and on top of stack. |
| 456 __ sll(T1, A1, 1); | 456 __ sll(T1, A1, 1); |
| 457 __ addu(T1, FP, T1); | 457 __ addu(T1, FP, T1); |
| 458 __ AddImmediate(T1, kParamEndSlotFromFp * kWordSize); | 458 __ AddImmediate(T1, kParamEndSlotFromFp * kWordSize); |
| 459 // T1: address of first argument on stack. | 459 // T1: address of first argument on stack. |
| 460 // T2: address of first argument in array. | 460 // T2: address of first argument in array. |
| 461 | 461 |
| 462 Label loop, loop_exit; | 462 Label loop, loop_exit; |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 506 // | 506 // |
| 507 // Parts of the code cannot GC, part of the code can GC. | 507 // Parts of the code cannot GC, part of the code can GC. |
| 508 static void GenerateDeoptimizationSequence(Assembler* assembler, | 508 static void GenerateDeoptimizationSequence(Assembler* assembler, |
| 509 bool preserve_result) { | 509 bool preserve_result) { |
| 510 const intptr_t kPushedRegistersSize = | 510 const intptr_t kPushedRegistersSize = |
| 511 kNumberOfCpuRegisters * kWordSize + | 511 kNumberOfCpuRegisters * kWordSize + |
| 512 4 * kWordSize + // PP, FP, RA, PC marker. | 512 4 * kWordSize + // PP, FP, RA, PC marker. |
| 513 kNumberOfFRegisters * kWordSize; | 513 kNumberOfFRegisters * kWordSize; |
| 514 | 514 |
| 515 __ SetPrologueOffset(); | 515 __ SetPrologueOffset(); |
| 516 __ TraceSimMsg("GenerateDeoptimizationSequence"); | 516 __ Comment("GenerateDeoptimizationSequence"); |
| 517 // DeoptimizeCopyFrame expects a Dart frame, i.e. EnterDartFrame(0), but there | 517 // DeoptimizeCopyFrame expects a Dart frame, i.e. EnterDartFrame(0), but there |
| 518 // is no need to set the correct PC marker or load PP, since they get patched. | 518 // is no need to set the correct PC marker or load PP, since they get patched. |
| 519 __ addiu(SP, SP, Immediate(-kPushedRegistersSize * kWordSize)); | 519 __ addiu(SP, SP, Immediate(-kPushedRegistersSize * kWordSize)); |
| 520 __ sw(ZR, Address(SP, kPushedRegistersSize - 1 * kWordSize)); | 520 __ sw(ZR, Address(SP, kPushedRegistersSize - 1 * kWordSize)); |
| 521 __ sw(RA, Address(SP, kPushedRegistersSize - 2 * kWordSize)); | 521 __ sw(RA, Address(SP, kPushedRegistersSize - 2 * kWordSize)); |
| 522 __ sw(FP, Address(SP, kPushedRegistersSize - 3 * kWordSize)); | 522 __ sw(FP, Address(SP, kPushedRegistersSize - 3 * kWordSize)); |
| 523 __ sw(PP, Address(SP, kPushedRegistersSize - 4 * kWordSize)); | 523 __ sw(PP, Address(SP, kPushedRegistersSize - 4 * kWordSize)); |
| 524 __ addiu(FP, SP, Immediate(kPushedRegistersSize - 3 * kWordSize)); | 524 __ addiu(FP, SP, Immediate(kPushedRegistersSize - 3 * kWordSize)); |
| 525 | 525 |
| 526 // The code in this frame may not cause GC. kDeoptimizeCopyFrameRuntimeEntry | 526 // The code in this frame may not cause GC. kDeoptimizeCopyFrameRuntimeEntry |
| (...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 663 | 663 |
| 664 // Called for inline allocation of arrays. | 664 // Called for inline allocation of arrays. |
| 665 // Input parameters: | 665 // Input parameters: |
| 666 // RA: return address. | 666 // RA: return address. |
| 667 // A1: Array length as Smi (must be preserved). | 667 // A1: Array length as Smi (must be preserved). |
| 668 // A0: array element type (either NULL or an instantiated type). | 668 // A0: array element type (either NULL or an instantiated type). |
| 669 // NOTE: A1 cannot be clobbered here as the caller relies on it being saved. | 669 // NOTE: A1 cannot be clobbered here as the caller relies on it being saved. |
| 670 // The newly allocated object is returned in V0. | 670 // The newly allocated object is returned in V0. |
| 671 void StubCode::GeneratePatchableAllocateArrayStub(Assembler* assembler, | 671 void StubCode::GeneratePatchableAllocateArrayStub(Assembler* assembler, |
| 672 uword* entry_patch_offset, uword* patch_code_pc_offset) { | 672 uword* entry_patch_offset, uword* patch_code_pc_offset) { |
| 673 __ TraceSimMsg("AllocateArrayStub"); | 673 __ Comment("AllocateArrayStub"); |
| 674 *entry_patch_offset = assembler->CodeSize(); | 674 *entry_patch_offset = assembler->CodeSize(); |
| 675 Label slow_case; | 675 Label slow_case; |
| 676 | 676 |
| 677 // Compute the size to be allocated, it is based on the array length | 677 // Compute the size to be allocated, it is based on the array length |
| 678 // and is computed as: | 678 // and is computed as: |
| 679 // RoundedAllocationSize((array_length * kwordSize) + sizeof(RawArray)). | 679 // RoundedAllocationSize((array_length * kwordSize) + sizeof(RawArray)). |
| 680 __ mov(T3, A1); // Array length. | 680 __ mov(T3, A1); // Array length. |
| 681 | 681 |
| 682 // Check that length is a positive Smi. | 682 // Check that length is a positive Smi. |
| 683 __ andi(CMPRES1, T3, Immediate(kSmiTagMask)); | 683 __ andi(CMPRES1, T3, Immediate(kSmiTagMask)); |
| (...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 786 // calling into the runtime. | 786 // calling into the runtime. |
| 787 __ EnterStubFrame(); | 787 __ EnterStubFrame(); |
| 788 // Setup space on stack for return value. | 788 // Setup space on stack for return value. |
| 789 // Push array length as Smi and element type. | 789 // Push array length as Smi and element type. |
| 790 __ addiu(SP, SP, Immediate(-3 * kWordSize)); | 790 __ addiu(SP, SP, Immediate(-3 * kWordSize)); |
| 791 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); | 791 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); |
| 792 __ sw(TMP, Address(SP, 2 * kWordSize)); | 792 __ sw(TMP, Address(SP, 2 * kWordSize)); |
| 793 __ sw(A1, Address(SP, 1 * kWordSize)); | 793 __ sw(A1, Address(SP, 1 * kWordSize)); |
| 794 __ sw(A0, Address(SP, 0 * kWordSize)); | 794 __ sw(A0, Address(SP, 0 * kWordSize)); |
| 795 __ CallRuntime(kAllocateArrayRuntimeEntry, 2); | 795 __ CallRuntime(kAllocateArrayRuntimeEntry, 2); |
| 796 __ TraceSimMsg("AllocateArrayStub return"); | 796 __ Comment("AllocateArrayStub return"); |
| 797 // Pop arguments; result is popped in IP. | 797 // Pop arguments; result is popped in IP. |
| 798 __ lw(V0, Address(SP, 2 * kWordSize)); | 798 __ lw(V0, Address(SP, 2 * kWordSize)); |
| 799 __ lw(A1, Address(SP, 1 * kWordSize)); | 799 __ lw(A1, Address(SP, 1 * kWordSize)); |
| 800 __ lw(A0, Address(SP, 0 * kWordSize)); | 800 __ lw(A0, Address(SP, 0 * kWordSize)); |
| 801 __ addiu(SP, SP, Immediate(3 * kWordSize)); | 801 __ addiu(SP, SP, Immediate(3 * kWordSize)); |
| 802 | 802 |
| 803 __ LeaveStubFrameAndReturn(); | 803 __ LeaveStubFrameAndReturn(); |
| 804 *patch_code_pc_offset = assembler->CodeSize(); | 804 *patch_code_pc_offset = assembler->CodeSize(); |
| 805 StubCode* stub_code = Isolate::Current()->stub_code(); | 805 StubCode* stub_code = Isolate::Current()->stub_code(); |
| 806 __ BranchPatchable(&stub_code->FixAllocateArrayStubTargetLabel()); | 806 __ BranchPatchable(&stub_code->FixAllocateArrayStubTargetLabel()); |
| 807 } | 807 } |
| 808 | 808 |
| 809 | 809 |
| 810 // Called when invoking Dart code from C++ (VM code). | 810 // Called when invoking Dart code from C++ (VM code). |
| 811 // Input parameters: | 811 // Input parameters: |
| 812 // RA : points to return address. | 812 // RA : points to return address. |
| 813 // A0 : entrypoint of the Dart function to call. | 813 // A0 : entrypoint of the Dart function to call. |
| 814 // A1 : arguments descriptor array. | 814 // A1 : arguments descriptor array. |
| 815 // A2 : arguments array. | 815 // A2 : arguments array. |
| 816 void StubCode::GenerateInvokeDartCodeStub(Assembler* assembler) { | 816 void StubCode::GenerateInvokeDartCodeStub(Assembler* assembler) { |
| 817 // Save frame pointer coming in. | 817 // Save frame pointer coming in. |
| 818 __ TraceSimMsg("InvokeDartCodeStub"); | 818 __ Comment("InvokeDartCodeStub"); |
| 819 __ EnterFrame(); | 819 __ EnterFrame(); |
| 820 | 820 |
| 821 // Save new context and C++ ABI callee-saved registers. | 821 // Save new context and C++ ABI callee-saved registers. |
| 822 | 822 |
| 823 // The saved vm tag, top resource, and top exit frame info. | 823 // The saved vm tag, top resource, and top exit frame info. |
| 824 const intptr_t kPreservedSlots = 3; | 824 const intptr_t kPreservedSlots = 3; |
| 825 const intptr_t kPreservedRegSpace = | 825 const intptr_t kPreservedRegSpace = |
| 826 kWordSize * (kAbiPreservedCpuRegCount + kAbiPreservedFpuRegCount + | 826 kWordSize * (kAbiPreservedCpuRegCount + kAbiPreservedFpuRegCount + |
| 827 kPreservedSlots); | 827 kPreservedSlots); |
| 828 | 828 |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 894 __ addiu(A1, A1, Immediate(1)); | 894 __ addiu(A1, A1, Immediate(1)); |
| 895 __ BranchSignedLess(A1, T1, &push_arguments); | 895 __ BranchSignedLess(A1, T1, &push_arguments); |
| 896 __ delay_slot()->addiu(A2, A2, Immediate(kWordSize)); | 896 __ delay_slot()->addiu(A2, A2, Immediate(kWordSize)); |
| 897 | 897 |
| 898 __ Bind(&done_push_arguments); | 898 __ Bind(&done_push_arguments); |
| 899 | 899 |
| 900 // Call the Dart code entrypoint. | 900 // Call the Dart code entrypoint. |
| 901 // We are calling into Dart code, here, so there is no need to call through | 901 // We are calling into Dart code, here, so there is no need to call through |
| 902 // T9 to match the ABI. | 902 // T9 to match the ABI. |
| 903 __ jalr(A0); // S4 is the arguments descriptor array. | 903 __ jalr(A0); // S4 is the arguments descriptor array. |
| 904 __ TraceSimMsg("InvokeDartCodeStub return"); | 904 __ Comment("InvokeDartCodeStub return"); |
| 905 | 905 |
| 906 // Get rid of arguments pushed on the stack. | 906 // Get rid of arguments pushed on the stack. |
| 907 __ AddImmediate(SP, FP, kExitLinkSlotFromEntryFp * kWordSize); | 907 __ AddImmediate(SP, FP, kExitLinkSlotFromEntryFp * kWordSize); |
| 908 | 908 |
| 909 __ LoadIsolate(S6); | 909 __ LoadIsolate(S6); |
| 910 | 910 |
| 911 // Restore the current VMTag from the stack. | 911 // Restore the current VMTag from the stack. |
| 912 __ lw(T1, Address(SP, 2 * kWordSize)); | 912 __ lw(T1, Address(SP, 2 * kWordSize)); |
| 913 __ sw(T1, Address(S6, Isolate::vm_tag_offset())); | 913 __ sw(T1, Address(S6, Isolate::vm_tag_offset())); |
| 914 | 914 |
| (...skipping 26 matching lines...) Expand all Loading... |
| 941 __ LeaveFrameAndReturn(); | 941 __ LeaveFrameAndReturn(); |
| 942 } | 942 } |
| 943 | 943 |
| 944 | 944 |
| 945 // Called for inline allocation of contexts. | 945 // Called for inline allocation of contexts. |
| 946 // Input: | 946 // Input: |
| 947 // T1: number of context variables. | 947 // T1: number of context variables. |
| 948 // Output: | 948 // Output: |
| 949 // V0: new allocated RawContext object. | 949 // V0: new allocated RawContext object. |
| 950 void StubCode::GenerateAllocateContextStub(Assembler* assembler) { | 950 void StubCode::GenerateAllocateContextStub(Assembler* assembler) { |
| 951 __ TraceSimMsg("AllocateContext"); | 951 __ Comment("AllocateContext"); |
| 952 if (FLAG_inline_alloc) { | 952 if (FLAG_inline_alloc) { |
| 953 Label slow_case; | 953 Label slow_case; |
| 954 Heap* heap = Isolate::Current()->heap(); | 954 Heap* heap = Isolate::Current()->heap(); |
| 955 // First compute the rounded instance size. | 955 // First compute the rounded instance size. |
| 956 // T1: number of context variables. | 956 // T1: number of context variables. |
| 957 intptr_t fixed_size = sizeof(RawContext) + kObjectAlignment - 1; | 957 intptr_t fixed_size = sizeof(RawContext) + kObjectAlignment - 1; |
| 958 __ LoadImmediate(T2, fixed_size); | 958 __ LoadImmediate(T2, fixed_size); |
| 959 __ sll(T0, T1, 2); | 959 __ sll(T0, T1, 2); |
| 960 __ addu(T2, T2, T0); | 960 __ addu(T2, T2, T0); |
| 961 ASSERT(kSmiTagShift == 1); | 961 ASSERT(kSmiTagShift == 1); |
| (...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1060 | 1060 |
| 1061 | 1061 |
| 1062 DECLARE_LEAF_RUNTIME_ENTRY(void, StoreBufferBlockProcess, Isolate* isolate); | 1062 DECLARE_LEAF_RUNTIME_ENTRY(void, StoreBufferBlockProcess, Isolate* isolate); |
| 1063 | 1063 |
| 1064 | 1064 |
| 1065 // Helper stub to implement Assembler::StoreIntoObject. | 1065 // Helper stub to implement Assembler::StoreIntoObject. |
| 1066 // Input parameters: | 1066 // Input parameters: |
| 1067 // T0: Address (i.e. object) being stored into. | 1067 // T0: Address (i.e. object) being stored into. |
| 1068 void StubCode::GenerateUpdateStoreBufferStub(Assembler* assembler) { | 1068 void StubCode::GenerateUpdateStoreBufferStub(Assembler* assembler) { |
| 1069 // Save values being destroyed. | 1069 // Save values being destroyed. |
| 1070 __ TraceSimMsg("UpdateStoreBufferStub"); | 1070 __ Comment("UpdateStoreBufferStub"); |
| 1071 __ addiu(SP, SP, Immediate(-3 * kWordSize)); | 1071 __ addiu(SP, SP, Immediate(-3 * kWordSize)); |
| 1072 __ sw(T3, Address(SP, 2 * kWordSize)); | 1072 __ sw(T3, Address(SP, 2 * kWordSize)); |
| 1073 __ sw(T2, Address(SP, 1 * kWordSize)); | 1073 __ sw(T2, Address(SP, 1 * kWordSize)); |
| 1074 __ sw(T1, Address(SP, 0 * kWordSize)); | 1074 __ sw(T1, Address(SP, 0 * kWordSize)); |
| 1075 | 1075 |
| 1076 Label add_to_buffer; | 1076 Label add_to_buffer; |
| 1077 // Check whether this object has already been remembered. Skip adding to the | 1077 // Check whether this object has already been remembered. Skip adding to the |
| 1078 // store buffer if the object is in the store buffer already. | 1078 // store buffer if the object is in the store buffer already. |
| 1079 // Spilled: T1, T2, T3. | 1079 // Spilled: T1, T2, T3. |
| 1080 // T0: Address being stored. | 1080 // T0: Address being stored. |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1120 __ delay_slot()->addiu(SP, SP, Immediate(3 * kWordSize)); | 1120 __ delay_slot()->addiu(SP, SP, Immediate(3 * kWordSize)); |
| 1121 __ Ret(); | 1121 __ Ret(); |
| 1122 | 1122 |
| 1123 // Handle overflow: Call the runtime leaf function. | 1123 // Handle overflow: Call the runtime leaf function. |
| 1124 __ Bind(&L); | 1124 __ Bind(&L); |
| 1125 // Setup frame, push callee-saved registers. | 1125 // Setup frame, push callee-saved registers. |
| 1126 | 1126 |
| 1127 __ EnterCallRuntimeFrame(1 * kWordSize); | 1127 __ EnterCallRuntimeFrame(1 * kWordSize); |
| 1128 __ LoadIsolate(A0); | 1128 __ LoadIsolate(A0); |
| 1129 __ CallRuntime(kStoreBufferBlockProcessRuntimeEntry, 1); | 1129 __ CallRuntime(kStoreBufferBlockProcessRuntimeEntry, 1); |
| 1130 __ TraceSimMsg("UpdateStoreBufferStub return"); | 1130 __ Comment("UpdateStoreBufferStub return"); |
| 1131 // Restore callee-saved registers, tear down frame. | 1131 // Restore callee-saved registers, tear down frame. |
| 1132 __ LeaveCallRuntimeFrame(); | 1132 __ LeaveCallRuntimeFrame(); |
| 1133 __ Ret(); | 1133 __ Ret(); |
| 1134 } | 1134 } |
| 1135 | 1135 |
| 1136 | 1136 |
| 1137 // Called for inline allocation of objects. | 1137 // Called for inline allocation of objects. |
| 1138 // Input parameters: | 1138 // Input parameters: |
| 1139 // RA : return address. | 1139 // RA : return address. |
| 1140 // SP + 0 : type arguments object (only if class is parameterized). | 1140 // SP + 0 : type arguments object (only if class is parameterized). |
| 1141 // Returns patch_code_pc offset where patching code for disabling the stub | 1141 // Returns patch_code_pc offset where patching code for disabling the stub |
| 1142 // has been generated (similar to regularly generated Dart code). | 1142 // has been generated (similar to regularly generated Dart code). |
| 1143 void StubCode::GenerateAllocationStubForClass( | 1143 void StubCode::GenerateAllocationStubForClass( |
| 1144 Assembler* assembler, const Class& cls, | 1144 Assembler* assembler, const Class& cls, |
| 1145 uword* entry_patch_offset, uword* patch_code_pc_offset) { | 1145 uword* entry_patch_offset, uword* patch_code_pc_offset) { |
| 1146 __ TraceSimMsg("AllocationStubForClass"); | 1146 __ Comment("AllocationStubForClass"); |
| 1147 *entry_patch_offset = assembler->CodeSize(); | 1147 *entry_patch_offset = assembler->CodeSize(); |
| 1148 // The generated code is different if the class is parameterized. | 1148 // The generated code is different if the class is parameterized. |
| 1149 const bool is_cls_parameterized = cls.NumTypeArguments() > 0; | 1149 const bool is_cls_parameterized = cls.NumTypeArguments() > 0; |
| 1150 ASSERT(!is_cls_parameterized || | 1150 ASSERT(!is_cls_parameterized || |
| 1151 (cls.type_arguments_field_offset() != Class::kNoTypeArguments)); | 1151 (cls.type_arguments_field_offset() != Class::kNoTypeArguments)); |
| 1152 // kInlineInstanceSize is a constant used as a threshold for determining | 1152 // kInlineInstanceSize is a constant used as a threshold for determining |
| 1153 // when the object initialization should be done as a loop or as | 1153 // when the object initialization should be done as a loop or as |
| 1154 // straight line code. | 1154 // straight line code. |
| 1155 const int kInlineInstanceSize = 12; | 1155 const int kInlineInstanceSize = 12; |
| 1156 const intptr_t instance_size = cls.instance_size(); | 1156 const intptr_t instance_size = cls.instance_size(); |
| (...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1252 __ sw(TMP, Address(SP, 1 * kWordSize)); // Class of object to be allocated. | 1252 __ sw(TMP, Address(SP, 1 * kWordSize)); // Class of object to be allocated. |
| 1253 | 1253 |
| 1254 if (is_cls_parameterized) { | 1254 if (is_cls_parameterized) { |
| 1255 // Push type arguments of object to be allocated and of instantiator. | 1255 // Push type arguments of object to be allocated and of instantiator. |
| 1256 __ sw(T1, Address(SP, 0 * kWordSize)); | 1256 __ sw(T1, Address(SP, 0 * kWordSize)); |
| 1257 } else { | 1257 } else { |
| 1258 // Push null type arguments. | 1258 // Push null type arguments. |
| 1259 __ sw(T7, Address(SP, 0 * kWordSize)); | 1259 __ sw(T7, Address(SP, 0 * kWordSize)); |
| 1260 } | 1260 } |
| 1261 __ CallRuntime(kAllocateObjectRuntimeEntry, 2); // Allocate object. | 1261 __ CallRuntime(kAllocateObjectRuntimeEntry, 2); // Allocate object. |
| 1262 __ TraceSimMsg("AllocationStubForClass return"); | 1262 __ Comment("AllocationStubForClass return"); |
| 1263 // Pop result (newly allocated object). | 1263 // Pop result (newly allocated object). |
| 1264 __ lw(V0, Address(SP, 2 * kWordSize)); | 1264 __ lw(V0, Address(SP, 2 * kWordSize)); |
| 1265 __ addiu(SP, SP, Immediate(3 * kWordSize)); // Pop arguments. | 1265 __ addiu(SP, SP, Immediate(3 * kWordSize)); // Pop arguments. |
| 1266 // V0: new object | 1266 // V0: new object |
| 1267 // Restore the frame pointer and return. | 1267 // Restore the frame pointer and return. |
| 1268 __ LeaveStubFrameAndReturn(RA); | 1268 __ LeaveStubFrameAndReturn(RA); |
| 1269 *patch_code_pc_offset = assembler->CodeSize(); | 1269 *patch_code_pc_offset = assembler->CodeSize(); |
| 1270 StubCode* stub_code = Isolate::Current()->stub_code(); | 1270 StubCode* stub_code = Isolate::Current()->stub_code(); |
| 1271 __ BranchPatchable(&stub_code->FixAllocationStubTargetLabel()); | 1271 __ BranchPatchable(&stub_code->FixAllocationStubTargetLabel()); |
| 1272 } | 1272 } |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1305 // noSuchMethod on closures always throws an error, so it will never return. | 1305 // noSuchMethod on closures always throws an error, so it will never return. |
| 1306 __ break_(0); | 1306 __ break_(0); |
| 1307 } | 1307 } |
| 1308 | 1308 |
| 1309 | 1309 |
| 1310 // T0: function object. | 1310 // T0: function object. |
| 1311 // S5: inline cache data object. | 1311 // S5: inline cache data object. |
| 1312 // Cannot use function object from ICData as it may be the inlined | 1312 // Cannot use function object from ICData as it may be the inlined |
| 1313 // function and not the top-scope function. | 1313 // function and not the top-scope function. |
| 1314 void StubCode::GenerateOptimizedUsageCounterIncrement(Assembler* assembler) { | 1314 void StubCode::GenerateOptimizedUsageCounterIncrement(Assembler* assembler) { |
| 1315 __ TraceSimMsg("OptimizedUsageCounterIncrement"); | 1315 __ Comment("OptimizedUsageCounterIncrement"); |
| 1316 Register ic_reg = S5; | 1316 Register ic_reg = S5; |
| 1317 Register func_reg = T0; | 1317 Register func_reg = T0; |
| 1318 if (FLAG_trace_optimized_ic_calls) { | 1318 if (FLAG_trace_optimized_ic_calls) { |
| 1319 __ EnterStubFrame(); | 1319 __ EnterStubFrame(); |
| 1320 __ addiu(SP, SP, Immediate(-4 * kWordSize)); | 1320 __ addiu(SP, SP, Immediate(-4 * kWordSize)); |
| 1321 __ sw(T0, Address(SP, 3 * kWordSize)); | 1321 __ sw(T0, Address(SP, 3 * kWordSize)); |
| 1322 __ sw(S5, Address(SP, 2 * kWordSize)); | 1322 __ sw(S5, Address(SP, 2 * kWordSize)); |
| 1323 __ sw(ic_reg, Address(SP, 1 * kWordSize)); // Argument. | 1323 __ sw(ic_reg, Address(SP, 1 * kWordSize)); // Argument. |
| 1324 __ sw(func_reg, Address(SP, 0 * kWordSize)); // Argument. | 1324 __ sw(func_reg, Address(SP, 0 * kWordSize)); // Argument. |
| 1325 __ CallRuntime(kTraceICCallRuntimeEntry, 2); | 1325 __ CallRuntime(kTraceICCallRuntimeEntry, 2); |
| 1326 __ lw(S5, Address(SP, 2 * kWordSize)); | 1326 __ lw(S5, Address(SP, 2 * kWordSize)); |
| 1327 __ lw(T0, Address(SP, 3 * kWordSize)); | 1327 __ lw(T0, Address(SP, 3 * kWordSize)); |
| 1328 __ addiu(SP, SP, Immediate(4 * kWordSize)); // Discard argument; | 1328 __ addiu(SP, SP, Immediate(4 * kWordSize)); // Discard argument; |
| 1329 __ LeaveStubFrame(); | 1329 __ LeaveStubFrame(); |
| 1330 } | 1330 } |
| 1331 __ lw(T7, FieldAddress(func_reg, Function::usage_counter_offset())); | 1331 __ lw(T7, FieldAddress(func_reg, Function::usage_counter_offset())); |
| 1332 __ addiu(T7, T7, Immediate(1)); | 1332 __ addiu(T7, T7, Immediate(1)); |
| 1333 __ sw(T7, FieldAddress(func_reg, Function::usage_counter_offset())); | 1333 __ sw(T7, FieldAddress(func_reg, Function::usage_counter_offset())); |
| 1334 } | 1334 } |
| 1335 | 1335 |
| 1336 | 1336 |
| 1337 // Loads function into 'temp_reg'. | 1337 // Loads function into 'temp_reg'. |
| 1338 void StubCode::GenerateUsageCounterIncrement(Assembler* assembler, | 1338 void StubCode::GenerateUsageCounterIncrement(Assembler* assembler, |
| 1339 Register temp_reg) { | 1339 Register temp_reg) { |
| 1340 __ TraceSimMsg("UsageCounterIncrement"); | 1340 __ Comment("UsageCounterIncrement"); |
| 1341 Register ic_reg = S5; | 1341 Register ic_reg = S5; |
| 1342 Register func_reg = temp_reg; | 1342 Register func_reg = temp_reg; |
| 1343 ASSERT(temp_reg == T0); | 1343 ASSERT(temp_reg == T0); |
| 1344 __ Comment("Increment function counter"); | 1344 __ Comment("Increment function counter"); |
| 1345 __ lw(func_reg, FieldAddress(ic_reg, ICData::owner_offset())); | 1345 __ lw(func_reg, FieldAddress(ic_reg, ICData::owner_offset())); |
| 1346 __ lw(T1, FieldAddress(func_reg, Function::usage_counter_offset())); | 1346 __ lw(T1, FieldAddress(func_reg, Function::usage_counter_offset())); |
| 1347 __ addiu(T1, T1, Immediate(1)); | 1347 __ addiu(T1, T1, Immediate(1)); |
| 1348 __ sw(T1, FieldAddress(func_reg, Function::usage_counter_offset())); | 1348 __ sw(T1, FieldAddress(func_reg, Function::usage_counter_offset())); |
| 1349 } | 1349 } |
| 1350 | 1350 |
| (...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1426 // - If receiver is not-Smi -> load receiver's class. | 1426 // - If receiver is not-Smi -> load receiver's class. |
| 1427 // - Check if 'num_args' (including receiver) match any IC data group. | 1427 // - Check if 'num_args' (including receiver) match any IC data group. |
| 1428 // - Match found -> jump to target. | 1428 // - Match found -> jump to target. |
| 1429 // - Match not found -> jump to IC miss. | 1429 // - Match not found -> jump to IC miss. |
| 1430 void StubCode::GenerateNArgsCheckInlineCacheStub( | 1430 void StubCode::GenerateNArgsCheckInlineCacheStub( |
| 1431 Assembler* assembler, | 1431 Assembler* assembler, |
| 1432 intptr_t num_args, | 1432 intptr_t num_args, |
| 1433 const RuntimeEntry& handle_ic_miss, | 1433 const RuntimeEntry& handle_ic_miss, |
| 1434 Token::Kind kind, | 1434 Token::Kind kind, |
| 1435 RangeCollectionMode range_collection_mode) { | 1435 RangeCollectionMode range_collection_mode) { |
| 1436 __ TraceSimMsg("NArgsCheckInlineCacheStub"); | 1436 __ Comment("NArgsCheckInlineCacheStub"); |
| 1437 ASSERT(num_args > 0); | 1437 ASSERT(num_args > 0); |
| 1438 #if defined(DEBUG) | 1438 #if defined(DEBUG) |
| 1439 { Label ok; | 1439 { Label ok; |
| 1440 // Check that the IC data array has NumArgsTested() == num_args. | 1440 // Check that the IC data array has NumArgsTested() == num_args. |
| 1441 // 'NumArgsTested' is stored in the least significant bits of 'state_bits'. | 1441 // 'NumArgsTested' is stored in the least significant bits of 'state_bits'. |
| 1442 __ lw(T0, FieldAddress(S5, ICData::state_bits_offset())); | 1442 __ lw(T0, FieldAddress(S5, ICData::state_bits_offset())); |
| 1443 ASSERT(ICData::NumArgsTestedShift() == 0); // No shift needed. | 1443 ASSERT(ICData::NumArgsTestedShift() == 0); // No shift needed. |
| 1444 __ andi(T0, T0, Immediate(ICData::NumArgsTestedMask())); | 1444 __ andi(T0, T0, Immediate(ICData::NumArgsTestedMask())); |
| 1445 __ BranchEqual(T0, Immediate(num_args), &ok); | 1445 __ BranchEqual(T0, Immediate(num_args), &ok); |
| 1446 __ Stop("Incorrect stub for IC data"); | 1446 __ Stop("Incorrect stub for IC data"); |
| (...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1565 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); | 1565 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); |
| 1566 __ sw(TMP, Address(SP, (num_slots - 3) * kWordSize)); | 1566 __ sw(TMP, Address(SP, (num_slots - 3) * kWordSize)); |
| 1567 // Push call arguments. | 1567 // Push call arguments. |
| 1568 for (intptr_t i = 0; i < num_args; i++) { | 1568 for (intptr_t i = 0; i < num_args; i++) { |
| 1569 __ lw(TMP, Address(T1, -i * kWordSize)); | 1569 __ lw(TMP, Address(T1, -i * kWordSize)); |
| 1570 __ sw(TMP, Address(SP, (num_slots - i - 4) * kWordSize)); | 1570 __ sw(TMP, Address(SP, (num_slots - i - 4) * kWordSize)); |
| 1571 } | 1571 } |
| 1572 // Pass IC data object. | 1572 // Pass IC data object. |
| 1573 __ sw(S5, Address(SP, (num_slots - num_args - 4) * kWordSize)); | 1573 __ sw(S5, Address(SP, (num_slots - num_args - 4) * kWordSize)); |
| 1574 __ CallRuntime(handle_ic_miss, num_args + 1); | 1574 __ CallRuntime(handle_ic_miss, num_args + 1); |
| 1575 __ TraceSimMsg("NArgsCheckInlineCacheStub return"); | 1575 __ Comment("NArgsCheckInlineCacheStub return"); |
| 1576 // Pop returned function object into T3. | 1576 // Pop returned function object into T3. |
| 1577 // Restore arguments descriptor array and IC data array. | 1577 // Restore arguments descriptor array and IC data array. |
| 1578 __ lw(T3, Address(SP, (num_slots - 3) * kWordSize)); | 1578 __ lw(T3, Address(SP, (num_slots - 3) * kWordSize)); |
| 1579 __ lw(S4, Address(SP, (num_slots - 2) * kWordSize)); | 1579 __ lw(S4, Address(SP, (num_slots - 2) * kWordSize)); |
| 1580 __ lw(S5, Address(SP, (num_slots - 1) * kWordSize)); | 1580 __ lw(S5, Address(SP, (num_slots - 1) * kWordSize)); |
| 1581 // Remove the call arguments pushed earlier, including the IC data object | 1581 // Remove the call arguments pushed earlier, including the IC data object |
| 1582 // and the arguments descriptor array. | 1582 // and the arguments descriptor array. |
| 1583 __ addiu(SP, SP, Immediate(num_slots * kWordSize)); | 1583 __ addiu(SP, SP, Immediate(num_slots * kWordSize)); |
| 1584 __ LeaveStubFrame(); | 1584 __ LeaveStubFrame(); |
| 1585 | 1585 |
| (...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1753 kInlineCacheMissHandlerThreeArgsRuntimeEntry, Token::kILLEGAL, | 1753 kInlineCacheMissHandlerThreeArgsRuntimeEntry, Token::kILLEGAL, |
| 1754 kIgnoreRanges); | 1754 kIgnoreRanges); |
| 1755 } | 1755 } |
| 1756 | 1756 |
| 1757 | 1757 |
| 1758 // Intermediary stub between a static call and its target. ICData contains | 1758 // Intermediary stub between a static call and its target. ICData contains |
| 1759 // the target function and the call count. | 1759 // the target function and the call count. |
| 1760 // S5: ICData | 1760 // S5: ICData |
| 1761 void StubCode::GenerateZeroArgsUnoptimizedStaticCallStub(Assembler* assembler) { | 1761 void StubCode::GenerateZeroArgsUnoptimizedStaticCallStub(Assembler* assembler) { |
| 1762 GenerateUsageCounterIncrement(assembler, T0); | 1762 GenerateUsageCounterIncrement(assembler, T0); |
| 1763 __ TraceSimMsg("UnoptimizedStaticCallStub"); | 1763 __ Comment("UnoptimizedStaticCallStub"); |
| 1764 #if defined(DEBUG) | 1764 #if defined(DEBUG) |
| 1765 { Label ok; | 1765 { Label ok; |
| 1766 // Check that the IC data array has NumArgsTested() == 0. | 1766 // Check that the IC data array has NumArgsTested() == 0. |
| 1767 // 'NumArgsTested' is stored in the least significant bits of 'state_bits'. | 1767 // 'NumArgsTested' is stored in the least significant bits of 'state_bits'. |
| 1768 __ lw(T0, FieldAddress(S5, ICData::state_bits_offset())); | 1768 __ lw(T0, FieldAddress(S5, ICData::state_bits_offset())); |
| 1769 ASSERT(ICData::NumArgsTestedShift() == 0); // No shift needed. | 1769 ASSERT(ICData::NumArgsTestedShift() == 0); // No shift needed. |
| 1770 __ andi(T0, T0, Immediate(ICData::NumArgsTestedMask())); | 1770 __ andi(T0, T0, Immediate(ICData::NumArgsTestedMask())); |
| 1771 __ beq(T0, ZR, &ok); | 1771 __ beq(T0, ZR, &ok); |
| 1772 __ Stop("Incorrect IC data for unoptimized static call"); | 1772 __ Stop("Incorrect IC data for unoptimized static call"); |
| 1773 __ Bind(&ok); | 1773 __ Bind(&ok); |
| (...skipping 171 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1945 } | 1945 } |
| 1946 | 1946 |
| 1947 | 1947 |
| 1948 // Used to check class and type arguments. Arguments passed in registers: | 1948 // Used to check class and type arguments. Arguments passed in registers: |
| 1949 // RA: return address. | 1949 // RA: return address. |
| 1950 // A0: instance (must be preserved). | 1950 // A0: instance (must be preserved). |
| 1951 // A1: instantiator type arguments or NULL. | 1951 // A1: instantiator type arguments or NULL. |
| 1952 // A2: cache array. | 1952 // A2: cache array. |
| 1953 // Result in V0: null -> not found, otherwise result (true or false). | 1953 // Result in V0: null -> not found, otherwise result (true or false). |
| 1954 static void GenerateSubtypeNTestCacheStub(Assembler* assembler, int n) { | 1954 static void GenerateSubtypeNTestCacheStub(Assembler* assembler, int n) { |
| 1955 __ TraceSimMsg("SubtypeNTestCacheStub"); | 1955 __ Comment("SubtypeNTestCacheStub"); |
| 1956 ASSERT((1 <= n) && (n <= 3)); | 1956 ASSERT((1 <= n) && (n <= 3)); |
| 1957 if (n > 1) { | 1957 if (n > 1) { |
| 1958 // Get instance type arguments. | 1958 // Get instance type arguments. |
| 1959 __ LoadClass(T0, A0); | 1959 __ LoadClass(T0, A0); |
| 1960 // Compute instance type arguments into T1. | 1960 // Compute instance type arguments into T1. |
| 1961 Label has_no_type_arguments; | 1961 Label has_no_type_arguments; |
| 1962 __ LoadImmediate(T1, reinterpret_cast<intptr_t>(Object::null())); | 1962 __ LoadImmediate(T1, reinterpret_cast<intptr_t>(Object::null())); |
| 1963 __ lw(T2, FieldAddress(T0, | 1963 __ lw(T2, FieldAddress(T0, |
| 1964 Class::type_arguments_field_offset_in_words_offset())); | 1964 Class::type_arguments_field_offset_in_words_offset())); |
| 1965 __ BranchEqual( | 1965 __ BranchEqual( |
| (...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2088 | 2088 |
| 2089 __ jr(A0); // Jump to the exception handler code. | 2089 __ jr(A0); // Jump to the exception handler code. |
| 2090 __ delay_slot()->mov(SP, A1); // Stack pointer. | 2090 __ delay_slot()->mov(SP, A1); // Stack pointer. |
| 2091 } | 2091 } |
| 2092 | 2092 |
| 2093 | 2093 |
| 2094 // Calls to the runtime to optimize the given function. | 2094 // Calls to the runtime to optimize the given function. |
| 2095 // T0: function to be reoptimized. | 2095 // T0: function to be reoptimized. |
| 2096 // S4: argument descriptor (preserved). | 2096 // S4: argument descriptor (preserved). |
| 2097 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { | 2097 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { |
| 2098 __ TraceSimMsg("OptimizeFunctionStub"); | 2098 __ Comment("OptimizeFunctionStub"); |
| 2099 __ EnterStubFrame(); | 2099 __ EnterStubFrame(); |
| 2100 __ addiu(SP, SP, Immediate(-3 * kWordSize)); | 2100 __ addiu(SP, SP, Immediate(-3 * kWordSize)); |
| 2101 __ sw(S4, Address(SP, 2 * kWordSize)); | 2101 __ sw(S4, Address(SP, 2 * kWordSize)); |
| 2102 // Setup space on stack for return value. | 2102 // Setup space on stack for return value. |
| 2103 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); | 2103 __ LoadImmediate(TMP, reinterpret_cast<intptr_t>(Object::null())); |
| 2104 __ sw(TMP, Address(SP, 1 * kWordSize)); | 2104 __ sw(TMP, Address(SP, 1 * kWordSize)); |
| 2105 __ sw(T0, Address(SP, 0 * kWordSize)); | 2105 __ sw(T0, Address(SP, 0 * kWordSize)); |
| 2106 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1); | 2106 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1); |
| 2107 __ TraceSimMsg("OptimizeFunctionStub return"); | 2107 __ Comment("OptimizeFunctionStub return"); |
| 2108 __ lw(T0, Address(SP, 1 * kWordSize)); // Get Code object | 2108 __ lw(T0, Address(SP, 1 * kWordSize)); // Get Code object |
| 2109 __ lw(S4, Address(SP, 2 * kWordSize)); // Restore argument descriptor. | 2109 __ lw(S4, Address(SP, 2 * kWordSize)); // Restore argument descriptor. |
| 2110 __ addiu(SP, SP, Immediate(3 * kWordSize)); // Discard argument. | 2110 __ addiu(SP, SP, Immediate(3 * kWordSize)); // Discard argument. |
| 2111 | 2111 |
| 2112 __ lw(T0, FieldAddress(T0, Code::instructions_offset())); | 2112 __ lw(T0, FieldAddress(T0, Code::instructions_offset())); |
| 2113 __ AddImmediate(T0, Instructions::HeaderSize() - kHeapObjectTag); | 2113 __ AddImmediate(T0, Instructions::HeaderSize() - kHeapObjectTag); |
| 2114 __ LeaveStubFrameAndReturn(T0); | 2114 __ LeaveStubFrameAndReturn(T0); |
| 2115 __ break_(0); | 2115 __ break_(0); |
| 2116 } | 2116 } |
| 2117 | 2117 |
| 2118 | 2118 |
| 2119 DECLARE_LEAF_RUNTIME_ENTRY(intptr_t, | 2119 DECLARE_LEAF_RUNTIME_ENTRY(intptr_t, |
| 2120 BigintCompare, | 2120 BigintCompare, |
| 2121 RawBigint* left, | 2121 RawBigint* left, |
| 2122 RawBigint* right); | 2122 RawBigint* right); |
| 2123 | 2123 |
| 2124 | 2124 |
| 2125 // Does identical check (object references are equal or not equal) with special | 2125 // Does identical check (object references are equal or not equal) with special |
| 2126 // checks for boxed numbers. | 2126 // checks for boxed numbers. |
| 2127 // Returns: CMPRES1 is zero if equal, non-zero otherwise. | 2127 // Returns: CMPRES1 is zero if equal, non-zero otherwise. |
| 2128 // Note: A Mint cannot contain a value that would fit in Smi, a Bigint | 2128 // Note: A Mint cannot contain a value that would fit in Smi, a Bigint |
| 2129 // cannot contain a value that fits in Mint or Smi. | 2129 // cannot contain a value that fits in Mint or Smi. |
| 2130 void StubCode::GenerateIdenticalWithNumberCheckStub(Assembler* assembler, | 2130 void StubCode::GenerateIdenticalWithNumberCheckStub(Assembler* assembler, |
| 2131 const Register left, | 2131 const Register left, |
| 2132 const Register right, | 2132 const Register right, |
| 2133 const Register temp1, | 2133 const Register temp1, |
| 2134 const Register temp2) { | 2134 const Register temp2) { |
| 2135 __ TraceSimMsg("IdenticalWithNumberCheckStub"); | |
| 2136 __ Comment("IdenticalWithNumberCheckStub"); | 2135 __ Comment("IdenticalWithNumberCheckStub"); |
| 2137 Label reference_compare, done, check_mint, check_bigint; | 2136 Label reference_compare, done, check_mint, check_bigint; |
| 2138 // If any of the arguments is Smi do reference compare. | 2137 // If any of the arguments is Smi do reference compare. |
| 2139 __ andi(temp1, left, Immediate(kSmiTagMask)); | 2138 __ andi(temp1, left, Immediate(kSmiTagMask)); |
| 2140 __ beq(temp1, ZR, &reference_compare); | 2139 __ beq(temp1, ZR, &reference_compare); |
| 2141 __ andi(temp1, right, Immediate(kSmiTagMask)); | 2140 __ andi(temp1, right, Immediate(kSmiTagMask)); |
| 2142 __ beq(temp1, ZR, &reference_compare); | 2141 __ beq(temp1, ZR, &reference_compare); |
| 2143 | 2142 |
| 2144 // Value compare for two doubles. | 2143 // Value compare for two doubles. |
| 2145 __ LoadImmediate(temp1, kDoubleCid); | 2144 __ LoadImmediate(temp1, kDoubleCid); |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2184 __ subu(CMPRES1, temp1, temp2); | 2183 __ subu(CMPRES1, temp1, temp2); |
| 2185 __ bne(CMPRES1, ZR, &done); | 2184 __ bne(CMPRES1, ZR, &done); |
| 2186 | 2185 |
| 2187 __ EnterStubFrame(); | 2186 __ EnterStubFrame(); |
| 2188 __ ReserveAlignedFrameSpace(2 * kWordSize); | 2187 __ ReserveAlignedFrameSpace(2 * kWordSize); |
| 2189 __ sw(left, Address(SP, 1 * kWordSize)); | 2188 __ sw(left, Address(SP, 1 * kWordSize)); |
| 2190 __ sw(right, Address(SP, 0 * kWordSize)); | 2189 __ sw(right, Address(SP, 0 * kWordSize)); |
| 2191 __ mov(A0, left); | 2190 __ mov(A0, left); |
| 2192 __ mov(A1, right); | 2191 __ mov(A1, right); |
| 2193 __ CallRuntime(kBigintCompareRuntimeEntry, 2); | 2192 __ CallRuntime(kBigintCompareRuntimeEntry, 2); |
| 2194 __ TraceSimMsg("IdenticalWithNumberCheckStub return"); | 2193 __ Comment("IdenticalWithNumberCheckStub return"); |
| 2195 // Result in V0, 0 means equal. | 2194 // Result in V0, 0 means equal. |
| 2196 __ LeaveStubFrame(); | 2195 __ LeaveStubFrame(); |
| 2197 __ b(&done); | 2196 __ b(&done); |
| 2198 __ delay_slot()->mov(CMPRES1, V0); | 2197 __ delay_slot()->mov(CMPRES1, V0); |
| 2199 | 2198 |
| 2200 __ Bind(&reference_compare); | 2199 __ Bind(&reference_compare); |
| 2201 __ subu(CMPRES1, left, right); | 2200 __ subu(CMPRES1, left, right); |
| 2202 __ Bind(&done); | 2201 __ Bind(&done); |
| 2203 // A branch or test after this comparison will check CMPRES1 == ZR. | 2202 // A branch or test after this comparison will check CMPRES1 == ZR. |
| 2204 } | 2203 } |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2252 const Register right = T0; | 2251 const Register right = T0; |
| 2253 __ lw(left, Address(SP, 1 * kWordSize)); | 2252 __ lw(left, Address(SP, 1 * kWordSize)); |
| 2254 __ lw(right, Address(SP, 0 * kWordSize)); | 2253 __ lw(right, Address(SP, 0 * kWordSize)); |
| 2255 GenerateIdenticalWithNumberCheckStub(assembler, left, right, temp1, temp2); | 2254 GenerateIdenticalWithNumberCheckStub(assembler, left, right, temp1, temp2); |
| 2256 __ Ret(); | 2255 __ Ret(); |
| 2257 } | 2256 } |
| 2258 | 2257 |
| 2259 } // namespace dart | 2258 } // namespace dart |
| 2260 | 2259 |
| 2261 #endif // defined TARGET_ARCH_MIPS | 2260 #endif // defined TARGET_ARCH_MIPS |
| OLD | NEW |