Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 24 matching lines...) Expand all Loading... | |
| 35 | 35 |
| 36 namespace v8 { | 36 namespace v8 { |
| 37 namespace internal { | 37 namespace internal { |
| 38 | 38 |
| 39 MacroAssembler::MacroAssembler(void* buffer, int size) | 39 MacroAssembler::MacroAssembler(void* buffer, int size) |
| 40 : Assembler(buffer, size), | 40 : Assembler(buffer, size), |
| 41 unresolved_(0), | 41 unresolved_(0), |
| 42 generating_stub_(false), | 42 generating_stub_(false), |
| 43 allow_stub_calls_(true), | 43 allow_stub_calls_(true), |
| 44 code_object_(Heap::undefined_value()) { | 44 code_object_(Heap::undefined_value()) { |
| 45 } | 45 } |
|
William Hesse
2009/06/11 09:32:29
These are already implemented (using incl and addl
| |
| 46 | 46 |
| 47 | 47 |
| 48 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) { | |
| 49 ASSERT(value > 0); | |
| 50 if (FLAG_native_code_counters && counter->Enabled()) { | |
| 51 movq(kScratchRegister, ExternalReference(counter)); | |
| 52 if (value == 1) { | |
| 53 inc(Operand(kScratchRegister, 0)); | |
| 54 } else { | |
| 55 add(Operand(kScratchRegister, 0), Immediate(value)); | |
| 56 } | |
| 57 } | |
| 58 } | |
| 59 | |
| 60 | |
| 61 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) { | |
| 62 ASSERT(value > 0); | |
| 63 if (FLAG_native_code_counters && counter->Enabled()) { | |
| 64 movq(kScratchRegister, ExternalReference(counter)); | |
| 65 if (value == 1) { | |
| 66 dec(Operand(kScratchRegister, 0)); | |
| 67 } else { | |
| 68 sub(Operand(kScratchRegister, 0), Immediate(value)); | |
| 69 } | |
| 70 } | |
| 71 } | |
| 72 | |
| 73 | |
| 48 void MacroAssembler::Assert(Condition cc, const char* msg) { | 74 void MacroAssembler::Assert(Condition cc, const char* msg) { |
| 49 if (FLAG_debug_code) Check(cc, msg); | 75 if (FLAG_debug_code) Check(cc, msg); |
| 50 } | 76 } |
| 51 | 77 |
| 52 | 78 |
| 53 void MacroAssembler::Check(Condition cc, const char* msg) { | 79 void MacroAssembler::Check(Condition cc, const char* msg) { |
| 54 Label L; | 80 Label L; |
| 55 j(cc, &L); | 81 j(cc, &L); |
| 56 Abort(msg); | 82 Abort(msg); |
| 57 // will not return here | 83 // will not return here |
| (...skipping 22 matching lines...) Expand all Loading... | |
| 80 push(kScratchRegister); | 106 push(kScratchRegister); |
| 81 movq(kScratchRegister, | 107 movq(kScratchRegister, |
| 82 reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0)), | 108 reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0)), |
| 83 RelocInfo::NONE); | 109 RelocInfo::NONE); |
| 84 push(kScratchRegister); | 110 push(kScratchRegister); |
| 85 CallRuntime(Runtime::kAbort, 2); | 111 CallRuntime(Runtime::kAbort, 2); |
| 86 // will not return here | 112 // will not return here |
| 87 } | 113 } |
| 88 | 114 |
| 89 | 115 |
| 90 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int argc) { | 116 void MacroAssembler::CallStub(CodeStub* stub) { |
| 91 UNIMPLEMENTED(); | 117 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs |
| 118 movq(kScratchRegister, stub->GetCode(), RelocInfo::CODE_TARGET); | |
| 119 call(kScratchRegister); | |
| 92 } | 120 } |
| 93 | 121 |
| 94 | 122 |
| 95 void MacroAssembler::TailCallRuntime(ExternalReference const& a, int b) { | 123 void MacroAssembler::StubReturn(int argc) { |
| 96 UNIMPLEMENTED(); | 124 ASSERT(argc >= 1 && generating_stub()); |
| 125 ret((argc - 1) * kPointerSize); | |
| 97 } | 126 } |
| 98 | 127 |
| 99 | 128 |
| 129 void MacroAssembler::IllegalOperation(int num_arguments) { | |
| 130 if (num_arguments > 0) { | |
| 131 add(rsp, Immediate(num_arguments * kPointerSize)); | |
| 132 } | |
| 133 movq(rax, Factory::undefined_value(), RelocInfo::EMBEDDED_OBJECT); | |
| 134 } | |
| 135 | |
| 136 | |
| 137 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) { | |
| 138 CallRuntime(Runtime::FunctionForId(id), num_arguments); | |
| 139 } | |
| 140 | |
| 141 | |
| 142 void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) { | |
| 143 // If the expected number of arguments of the runtime function is | |
| 144 // constant, we check that the actual number of arguments match the | |
| 145 // expectation. | |
| 146 if (f->nargs >= 0 && f->nargs != num_arguments) { | |
| 147 IllegalOperation(num_arguments); | |
| 148 return; | |
| 149 } | |
| 150 | |
| 151 Runtime::FunctionId function_id = | |
| 152 static_cast<Runtime::FunctionId>(f->stub_id); | |
| 153 RuntimeStub stub(function_id, num_arguments); | |
| 154 CallStub(&stub); | |
| 155 } | |
| 156 | |
| 157 | |
| 158 void MacroAssembler::TailCallRuntime(ExternalReference const& ext, | |
| 159 int num_arguments) { | |
| 160 // TODO(1236192): Most runtime routines don't need the number of | |
| 161 // arguments passed in because it is constant. At some point we | |
| 162 // should remove this need and make the runtime routine entry code | |
| 163 // smarter. | |
| 164 movq(rax, Immediate(num_arguments)); | |
| 165 JumpToBuiltin(ext); | |
| 166 } | |
| 167 | |
| 168 | |
| 169 void MacroAssembler::JumpToBuiltin(const ExternalReference& ext) { | |
| 170 // Set the entry point and jump to the C entry runtime stub. | |
| 171 movq(rbx, ext); | |
| 172 CEntryStub ces; | |
| 173 movq(kScratchRegister, ces.GetCode(), RelocInfo::CODE_TARGET); | |
| 174 jmp(kScratchRegister); | |
| 175 } | |
| 176 | |
| 177 | |
| 100 void MacroAssembler::Set(Register dst, int64_t x) { | 178 void MacroAssembler::Set(Register dst, int64_t x) { |
| 101 if (is_int32(x)) { | 179 if (is_int32(x)) { |
| 102 movq(dst, Immediate(x)); | 180 movq(dst, Immediate(x)); |
| 103 } else if (is_uint32(x)) { | 181 } else if (is_uint32(x)) { |
| 104 movl(dst, Immediate(x)); | 182 movl(dst, Immediate(x)); |
| 105 } else { | 183 } else { |
| 106 movq(dst, x, RelocInfo::NONE); | 184 movq(dst, x, RelocInfo::NONE); |
| 107 } | 185 } |
| 108 } | 186 } |
| 109 | 187 |
| (...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 238 movq(kScratchRegister, reg_addr); | 316 movq(kScratchRegister, reg_addr); |
| 239 movq(Operand(kScratchRegister, 0), scratch); | 317 movq(Operand(kScratchRegister, 0), scratch); |
| 240 lea(base, Operand(base, kPointerSize)); | 318 lea(base, Operand(base, kPointerSize)); |
| 241 } | 319 } |
| 242 } | 320 } |
| 243 } | 321 } |
| 244 | 322 |
| 245 #endif // ENABLE_DEBUGGER_SUPPORT | 323 #endif // ENABLE_DEBUGGER_SUPPORT |
| 246 | 324 |
| 247 | 325 |
| 326 void MacroAssembler::InvokePrologue(const ParameterCount& expected, | |
| 327 const ParameterCount& actual, | |
| 328 Handle<Code> code_constant, | |
| 329 Register code_register, | |
| 330 Label* done, | |
| 331 InvokeFlag flag) { | |
| 332 bool definitely_matches = false; | |
| 333 Label invoke; | |
| 334 if (expected.is_immediate()) { | |
| 335 ASSERT(actual.is_immediate()); | |
| 336 if (expected.immediate() == actual.immediate()) { | |
| 337 definitely_matches = true; | |
| 338 } else { | |
| 339 movq(rax, Immediate(actual.immediate())); | |
| 340 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel; | |
|
William Hesse
2009/06/11 09:32:29
This is only used once. It is no more constant th
| |
| 341 if (expected.immediate() == sentinel) { | |
| 342 // Don't worry about adapting arguments for built-ins that | |
| 343 // don't want that done. Skip adaption code by making it look | |
| 344 // like we have a match between expected and actual number of | |
| 345 // arguments. | |
| 346 definitely_matches = true; | |
| 347 } else { | |
| 348 movq(rbx, Immediate(expected.immediate())); | |
| 349 } | |
| 350 } | |
| 351 } else { | |
| 352 if (actual.is_immediate()) { | |
| 353 // Expected is in register, actual is immediate. This is the | |
| 354 // case when we invoke function values without going through the | |
| 355 // IC mechanism. | |
| 356 cmp(expected.reg(), Immediate(actual.immediate())); | |
| 357 j(equal, &invoke); | |
| 358 ASSERT(expected.reg().is(rbx)); | |
| 359 movq(rax, Immediate(actual.immediate())); | |
| 360 } else if (!expected.reg().is(actual.reg())) { | |
| 361 // Both expected and actual are in (different) registers. This | |
| 362 // is the case when we invoke functions using call and apply. | |
| 363 cmp(expected.reg(), actual.reg()); | |
| 364 j(equal, &invoke); | |
| 365 ASSERT(actual.reg().is(rax)); | |
| 366 ASSERT(expected.reg().is(rbx)); | |
| 367 } | |
| 368 } | |
| 369 | |
| 370 if (!definitely_matches) { | |
| 371 Handle<Code> adaptor = | |
| 372 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline)); | |
| 373 if (!code_constant.is_null()) { | |
| 374 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT); | |
| 375 add(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag)); | |
| 376 } else if (!code_register.is(rdx)) { | |
| 377 movq(rdx, code_register); | |
| 378 } | |
| 379 | |
| 380 movq(kScratchRegister, adaptor, RelocInfo::CODE_TARGET); | |
| 381 if (flag == CALL_FUNCTION) { | |
| 382 call(kScratchRegister); | |
| 383 jmp(done); | |
| 384 } else { | |
| 385 jmp(kScratchRegister); | |
| 386 } | |
| 387 bind(&invoke); | |
| 388 } | |
| 389 } | |
| 390 | |
| 391 | |
| 392 | |
| 393 | |
| 394 void MacroAssembler::InvokeCode(Register code, | |
| 395 const ParameterCount& expected, | |
| 396 const ParameterCount& actual, | |
| 397 InvokeFlag flag) { | |
| 398 Label done; | |
| 399 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag); | |
| 400 if (flag == CALL_FUNCTION) { | |
| 401 call(code); | |
| 402 } else { | |
| 403 ASSERT(flag == JUMP_FUNCTION); | |
| 404 jmp(code); | |
| 405 } | |
| 406 bind(&done); | |
| 407 } | |
| 408 | |
| 409 | |
| 410 void MacroAssembler::InvokeCode(Handle<Code> code, | |
| 411 const ParameterCount& expected, | |
| 412 const ParameterCount& actual, | |
| 413 RelocInfo::Mode rmode, | |
| 414 InvokeFlag flag) { | |
| 415 Label done; | |
| 416 Register dummy = rax; | |
| 417 InvokePrologue(expected, actual, code, dummy, &done, flag); | |
| 418 movq(kScratchRegister, code, rmode); | |
| 419 if (flag == CALL_FUNCTION) { | |
| 420 call(kScratchRegister); | |
| 421 } else { | |
| 422 ASSERT(flag == JUMP_FUNCTION); | |
| 423 jmp(kScratchRegister); | |
| 424 } | |
| 425 bind(&done); | |
| 426 } | |
| 248 | 427 |
| 249 | 428 |
| 250 void MacroAssembler::InvokeFunction(Register fun, | 429 void MacroAssembler::InvokeFunction(Register fun, |
| 251 const ParameterCount& actual, | 430 const ParameterCount& actual, |
| 252 InvokeFlag flag) { | 431 InvokeFlag flag) { |
| 253 UNIMPLEMENTED(); | 432 ASSERT(fun.is(rdi)); |
|
William Hesse
2009/06/11 09:32:29
Can we use function instead of fun as the paramete
| |
| 433 movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | |
| 434 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); | |
| 435 movq(rbx, FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset)); | |
| 436 movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset)); | |
| 437 lea(rdx, FieldOperand(rdx, Code::kHeaderSize)); | |
| 438 | |
| 439 ParameterCount expected(rbx); | |
| 440 InvokeCode(rdx, expected, actual, flag); | |
| 254 } | 441 } |
| 255 | 442 |
| 256 | 443 |
| 257 void MacroAssembler::EnterFrame(StackFrame::Type type) { | 444 void MacroAssembler::EnterFrame(StackFrame::Type type) { |
| 258 push(rbp); | 445 push(rbp); |
| 259 movq(rbp, rsp); | 446 movq(rbp, rsp); |
| 260 push(rsi); // Context. | 447 push(rsi); // Context. |
| 261 push(Immediate(Smi::FromInt(type))); | 448 push(Immediate(Smi::FromInt(type))); |
| 262 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT); | 449 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT); |
| 263 push(kScratchRegister); | 450 push(kScratchRegister); |
| (...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 376 push(rcx); | 563 push(rcx); |
| 377 | 564 |
| 378 // Clear the top frame. | 565 // Clear the top frame. |
| 379 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address); | 566 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address); |
| 380 movq(kScratchRegister, c_entry_fp_address); | 567 movq(kScratchRegister, c_entry_fp_address); |
| 381 movq(Operand(kScratchRegister, 0), Immediate(0)); | 568 movq(Operand(kScratchRegister, 0), Immediate(0)); |
| 382 } | 569 } |
| 383 | 570 |
| 384 | 571 |
| 385 } } // namespace v8::internal | 572 } } // namespace v8::internal |
| OLD | NEW |