| OLD | NEW |
| (Empty) |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #ifndef V8_X87_LITHIUM_CODEGEN_X87_H_ | |
| 6 #define V8_X87_LITHIUM_CODEGEN_X87_H_ | |
| 7 | |
| 8 #include <map> | |
| 9 #include "src/x87/lithium-x87.h" | |
| 10 | |
| 11 #include "src/base/logging.h" | |
| 12 #include "src/deoptimizer.h" | |
| 13 #include "src/lithium-codegen.h" | |
| 14 #include "src/safepoint-table.h" | |
| 15 #include "src/scopes.h" | |
| 16 #include "src/utils.h" | |
| 17 #include "src/x87/lithium-gap-resolver-x87.h" | |
| 18 | |
| 19 namespace v8 { | |
| 20 namespace internal { | |
| 21 | |
| 22 // Forward declarations. | |
| 23 class LDeferredCode; | |
| 24 class LGapNode; | |
| 25 class SafepointGenerator; | |
| 26 | |
| 27 class LCodeGen: public LCodeGenBase { | |
| 28 public: | |
| 29 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info) | |
| 30 : LCodeGenBase(chunk, assembler, info), | |
| 31 deoptimizations_(4, info->zone()), | |
| 32 jump_table_(4, info->zone()), | |
| 33 inlined_function_count_(0), | |
| 34 scope_(info->scope()), | |
| 35 translations_(info->zone()), | |
| 36 deferred_(8, info->zone()), | |
| 37 dynamic_frame_alignment_(false), | |
| 38 support_aligned_spilled_doubles_(false), | |
| 39 osr_pc_offset_(-1), | |
| 40 frame_is_built_(false), | |
| 41 x87_stack_(assembler), | |
| 42 safepoints_(info->zone()), | |
| 43 resolver_(this), | |
| 44 expected_safepoint_kind_(Safepoint::kSimple) { | |
| 45 PopulateDeoptimizationLiteralsWithInlinedFunctions(); | |
| 46 } | |
| 47 | |
| 48 int LookupDestination(int block_id) const { | |
| 49 return chunk()->LookupDestination(block_id); | |
| 50 } | |
| 51 | |
| 52 bool IsNextEmittedBlock(int block_id) const { | |
| 53 return LookupDestination(block_id) == GetNextEmittedBlock(); | |
| 54 } | |
| 55 | |
| 56 bool NeedsEagerFrame() const { | |
| 57 return GetStackSlotCount() > 0 || | |
| 58 info()->is_non_deferred_calling() || | |
| 59 !info()->IsStub() || | |
| 60 info()->requires_frame(); | |
| 61 } | |
| 62 bool NeedsDeferredFrame() const { | |
| 63 return !NeedsEagerFrame() && info()->is_deferred_calling(); | |
| 64 } | |
| 65 | |
| 66 // Support for converting LOperands to assembler types. | |
| 67 Operand ToOperand(LOperand* op) const; | |
| 68 Register ToRegister(LOperand* op) const; | |
| 69 X87Register ToX87Register(LOperand* op) const; | |
| 70 | |
| 71 bool IsInteger32(LConstantOperand* op) const; | |
| 72 bool IsSmi(LConstantOperand* op) const; | |
| 73 Immediate ToImmediate(LOperand* op, const Representation& r) const { | |
| 74 return Immediate(ToRepresentation(LConstantOperand::cast(op), r)); | |
| 75 } | |
| 76 double ToDouble(LConstantOperand* op) const; | |
| 77 | |
| 78 // Support for non-sse2 (x87) floating point stack handling. | |
| 79 // These functions maintain the mapping of physical stack registers to our | |
| 80 // virtual registers between instructions. | |
| 81 enum X87OperandType { kX87DoubleOperand, kX87FloatOperand, kX87IntOperand }; | |
| 82 | |
| 83 void X87Mov(X87Register reg, Operand src, | |
| 84 X87OperandType operand = kX87DoubleOperand); | |
| 85 void X87Mov(Operand src, X87Register reg, | |
| 86 X87OperandType operand = kX87DoubleOperand); | |
| 87 void X87Mov(X87Register reg, X87Register src, | |
| 88 X87OperandType operand = kX87DoubleOperand); | |
| 89 | |
| 90 void X87PrepareBinaryOp( | |
| 91 X87Register left, X87Register right, X87Register result); | |
| 92 | |
| 93 void X87LoadForUsage(X87Register reg); | |
| 94 void X87LoadForUsage(X87Register reg1, X87Register reg2); | |
| 95 void X87PrepareToWrite(X87Register reg) { x87_stack_.PrepareToWrite(reg); } | |
| 96 void X87CommitWrite(X87Register reg) { x87_stack_.CommitWrite(reg); } | |
| 97 | |
| 98 void X87Fxch(X87Register reg, int other_slot = 0) { | |
| 99 x87_stack_.Fxch(reg, other_slot); | |
| 100 } | |
| 101 void X87Free(X87Register reg) { | |
| 102 x87_stack_.Free(reg); | |
| 103 } | |
| 104 | |
| 105 | |
| 106 bool X87StackEmpty() { | |
| 107 return x87_stack_.depth() == 0; | |
| 108 } | |
| 109 | |
| 110 Handle<Object> ToHandle(LConstantOperand* op) const; | |
| 111 | |
| 112 // The operand denoting the second word (the one with a higher address) of | |
| 113 // a double stack slot. | |
| 114 Operand HighOperand(LOperand* op); | |
| 115 | |
| 116 // Try to generate code for the entire chunk, but it may fail if the | |
| 117 // chunk contains constructs we cannot handle. Returns true if the | |
| 118 // code generation attempt succeeded. | |
| 119 bool GenerateCode(); | |
| 120 | |
| 121 // Finish the code by setting stack height, safepoint, and bailout | |
| 122 // information on it. | |
| 123 void FinishCode(Handle<Code> code); | |
| 124 | |
| 125 // Deferred code support. | |
| 126 void DoDeferredNumberTagD(LNumberTagD* instr); | |
| 127 | |
| 128 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 }; | |
| 129 void DoDeferredNumberTagIU(LInstruction* instr, | |
| 130 LOperand* value, | |
| 131 LOperand* temp, | |
| 132 IntegerSignedness signedness); | |
| 133 | |
| 134 void DoDeferredTaggedToI(LTaggedToI* instr, Label* done); | |
| 135 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr); | |
| 136 void DoDeferredStackCheck(LStackCheck* instr); | |
| 137 void DoDeferredMaybeGrowElements(LMaybeGrowElements* instr); | |
| 138 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr); | |
| 139 void DoDeferredStringCharFromCode(LStringCharFromCode* instr); | |
| 140 void DoDeferredAllocate(LAllocate* instr); | |
| 141 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); | |
| 142 void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, | |
| 143 Register object, | |
| 144 Register index); | |
| 145 | |
| 146 // Parallel move support. | |
| 147 void DoParallelMove(LParallelMove* move); | |
| 148 void DoGap(LGap* instr); | |
| 149 | |
| 150 // Emit frame translation commands for an environment. | |
| 151 void WriteTranslation(LEnvironment* environment, Translation* translation); | |
| 152 | |
| 153 void EnsureRelocSpaceForDeoptimization(); | |
| 154 | |
| 155 // Declare methods that deal with the individual node types. | |
| 156 #define DECLARE_DO(type) void Do##type(L##type* node); | |
| 157 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO) | |
| 158 #undef DECLARE_DO | |
| 159 | |
| 160 private: | |
| 161 LanguageMode language_mode() const { return info()->language_mode(); } | |
| 162 | |
| 163 Scope* scope() const { return scope_; } | |
| 164 | |
| 165 void EmitClassOfTest(Label* if_true, | |
| 166 Label* if_false, | |
| 167 Handle<String> class_name, | |
| 168 Register input, | |
| 169 Register temporary, | |
| 170 Register temporary2); | |
| 171 | |
| 172 int GetStackSlotCount() const { return chunk()->spill_slot_count(); } | |
| 173 | |
| 174 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); } | |
| 175 | |
| 176 // Code generation passes. Returns true if code generation should | |
| 177 // continue. | |
| 178 void GenerateBodyInstructionPre(LInstruction* instr) override; | |
| 179 void GenerateBodyInstructionPost(LInstruction* instr) override; | |
| 180 bool GeneratePrologue(); | |
| 181 bool GenerateDeferredCode(); | |
| 182 bool GenerateJumpTable(); | |
| 183 bool GenerateSafepointTable(); | |
| 184 | |
| 185 // Generates the custom OSR entrypoint and sets the osr_pc_offset. | |
| 186 void GenerateOsrPrologue(); | |
| 187 | |
| 188 enum SafepointMode { | |
| 189 RECORD_SIMPLE_SAFEPOINT, | |
| 190 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS | |
| 191 }; | |
| 192 | |
| 193 void CallCode(Handle<Code> code, | |
| 194 RelocInfo::Mode mode, | |
| 195 LInstruction* instr); | |
| 196 | |
| 197 void CallCodeGeneric(Handle<Code> code, | |
| 198 RelocInfo::Mode mode, | |
| 199 LInstruction* instr, | |
| 200 SafepointMode safepoint_mode); | |
| 201 | |
| 202 void CallRuntime(const Runtime::Function* fun, int argc, LInstruction* instr, | |
| 203 SaveFPRegsMode save_doubles = kDontSaveFPRegs); | |
| 204 | |
| 205 void CallRuntime(Runtime::FunctionId id, | |
| 206 int argc, | |
| 207 LInstruction* instr) { | |
| 208 const Runtime::Function* function = Runtime::FunctionForId(id); | |
| 209 CallRuntime(function, argc, instr); | |
| 210 } | |
| 211 | |
| 212 void CallRuntimeFromDeferred(Runtime::FunctionId id, | |
| 213 int argc, | |
| 214 LInstruction* instr, | |
| 215 LOperand* context); | |
| 216 | |
| 217 void LoadContextFromDeferred(LOperand* context); | |
| 218 | |
| 219 // Generate a direct call to a known function. Expects the function | |
| 220 // to be in edi. | |
| 221 void CallKnownFunction(Handle<JSFunction> function, | |
| 222 int formal_parameter_count, int arity, | |
| 223 LInstruction* instr); | |
| 224 | |
| 225 void RecordSafepointWithLazyDeopt(LInstruction* instr, | |
| 226 SafepointMode safepoint_mode); | |
| 227 | |
| 228 void RegisterEnvironmentForDeoptimization(LEnvironment* environment, | |
| 229 Safepoint::DeoptMode mode); | |
| 230 void DeoptimizeIf(Condition cc, LInstruction* instr, | |
| 231 Deoptimizer::DeoptReason deopt_reason, | |
| 232 Deoptimizer::BailoutType bailout_type); | |
| 233 void DeoptimizeIf(Condition cc, LInstruction* instr, | |
| 234 Deoptimizer::DeoptReason deopt_reason); | |
| 235 | |
| 236 bool DeoptEveryNTimes() { | |
| 237 return FLAG_deopt_every_n_times != 0 && !info()->IsStub(); | |
| 238 } | |
| 239 | |
| 240 void AddToTranslation(LEnvironment* environment, | |
| 241 Translation* translation, | |
| 242 LOperand* op, | |
| 243 bool is_tagged, | |
| 244 bool is_uint32, | |
| 245 int* object_index_pointer, | |
| 246 int* dematerialized_index_pointer); | |
| 247 void PopulateDeoptimizationData(Handle<Code> code); | |
| 248 | |
| 249 void PopulateDeoptimizationLiteralsWithInlinedFunctions(); | |
| 250 | |
| 251 Register ToRegister(int index) const; | |
| 252 X87Register ToX87Register(int index) const; | |
| 253 int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const; | |
| 254 int32_t ToInteger32(LConstantOperand* op) const; | |
| 255 ExternalReference ToExternalReference(LConstantOperand* op) const; | |
| 256 | |
| 257 Operand BuildFastArrayOperand(LOperand* elements_pointer, | |
| 258 LOperand* key, | |
| 259 Representation key_representation, | |
| 260 ElementsKind elements_kind, | |
| 261 uint32_t base_offset); | |
| 262 | |
| 263 Operand BuildSeqStringOperand(Register string, | |
| 264 LOperand* index, | |
| 265 String::Encoding encoding); | |
| 266 | |
| 267 void EmitIntegerMathAbs(LMathAbs* instr); | |
| 268 | |
| 269 // Support for recording safepoint and position information. | |
| 270 void RecordSafepoint(LPointerMap* pointers, | |
| 271 Safepoint::Kind kind, | |
| 272 int arguments, | |
| 273 Safepoint::DeoptMode mode); | |
| 274 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode); | |
| 275 void RecordSafepoint(Safepoint::DeoptMode mode); | |
| 276 void RecordSafepointWithRegisters(LPointerMap* pointers, | |
| 277 int arguments, | |
| 278 Safepoint::DeoptMode mode); | |
| 279 | |
| 280 void RecordAndWritePosition(int position) override; | |
| 281 | |
| 282 static Condition TokenToCondition(Token::Value op, bool is_unsigned); | |
| 283 void EmitGoto(int block); | |
| 284 | |
| 285 // EmitBranch expects to be the last instruction of a block. | |
| 286 template<class InstrType> | |
| 287 void EmitBranch(InstrType instr, Condition cc); | |
| 288 template <class InstrType> | |
| 289 void EmitTrueBranch(InstrType instr, Condition cc); | |
| 290 template <class InstrType> | |
| 291 void EmitFalseBranch(InstrType instr, Condition cc); | |
| 292 void EmitNumberUntagDNoSSE2(LNumberUntagD* instr, Register input, | |
| 293 Register temp, X87Register res_reg, | |
| 294 NumberUntagDMode mode); | |
| 295 | |
| 296 // Emits optimized code for typeof x == "y". Modifies input register. | |
| 297 // Returns the condition on which a final split to | |
| 298 // true and false label should be made, to optimize fallthrough. | |
| 299 Condition EmitTypeofIs(LTypeofIsAndBranch* instr, Register input); | |
| 300 | |
| 301 // Emits optimized code for %_IsString(x). Preserves input register. | |
| 302 // Returns the condition on which a final split to | |
| 303 // true and false label should be made, to optimize fallthrough. | |
| 304 Condition EmitIsString(Register input, | |
| 305 Register temp1, | |
| 306 Label* is_not_string, | |
| 307 SmiCheck check_needed); | |
| 308 | |
| 309 // Emits optimized code for %_IsConstructCall(). | |
| 310 // Caller should branch on equal condition. | |
| 311 void EmitIsConstructCall(Register temp); | |
| 312 | |
| 313 // Emits optimized code to deep-copy the contents of statically known | |
| 314 // object graphs (e.g. object literal boilerplate). | |
| 315 void EmitDeepCopy(Handle<JSObject> object, | |
| 316 Register result, | |
| 317 Register source, | |
| 318 int* offset, | |
| 319 AllocationSiteMode mode); | |
| 320 | |
| 321 void EnsureSpaceForLazyDeopt(int space_needed) override; | |
| 322 void DoLoadKeyedExternalArray(LLoadKeyed* instr); | |
| 323 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr); | |
| 324 void DoLoadKeyedFixedArray(LLoadKeyed* instr); | |
| 325 void DoStoreKeyedExternalArray(LStoreKeyed* instr); | |
| 326 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr); | |
| 327 void DoStoreKeyedFixedArray(LStoreKeyed* instr); | |
| 328 | |
| 329 template <class T> | |
| 330 void EmitVectorLoadICRegisters(T* instr); | |
| 331 template <class T> | |
| 332 void EmitVectorStoreICRegisters(T* instr); | |
| 333 | |
| 334 void EmitReturn(LReturn* instr, bool dynamic_frame_alignment); | |
| 335 | |
| 336 // Emits code for pushing either a tagged constant, a (non-double) | |
| 337 // register, or a stack slot operand. | |
| 338 void EmitPushTaggedOperand(LOperand* operand); | |
| 339 | |
| 340 void X87Fld(Operand src, X87OperandType opts); | |
| 341 | |
| 342 void EmitFlushX87ForDeopt(); | |
| 343 void FlushX87StackIfNecessary(LInstruction* instr) { | |
| 344 x87_stack_.FlushIfNecessary(instr, this); | |
| 345 } | |
| 346 friend class LGapResolver; | |
| 347 | |
| 348 #ifdef _MSC_VER | |
| 349 // On windows, you may not access the stack more than one page below | |
| 350 // the most recently mapped page. To make the allocated area randomly | |
| 351 // accessible, we write an arbitrary value to each page in range | |
| 352 // esp + offset - page_size .. esp in turn. | |
| 353 void MakeSureStackPagesMapped(int offset); | |
| 354 #endif | |
| 355 | |
| 356 ZoneList<LEnvironment*> deoptimizations_; | |
| 357 ZoneList<Deoptimizer::JumpTableEntry> jump_table_; | |
| 358 int inlined_function_count_; | |
| 359 Scope* const scope_; | |
| 360 TranslationBuffer translations_; | |
| 361 ZoneList<LDeferredCode*> deferred_; | |
| 362 bool dynamic_frame_alignment_; | |
| 363 bool support_aligned_spilled_doubles_; | |
| 364 int osr_pc_offset_; | |
| 365 bool frame_is_built_; | |
| 366 | |
| 367 class X87Stack : public ZoneObject { | |
| 368 public: | |
| 369 explicit X87Stack(MacroAssembler* masm) | |
| 370 : stack_depth_(0), is_mutable_(true), masm_(masm) { } | |
| 371 explicit X87Stack(const X87Stack& other) | |
| 372 : stack_depth_(other.stack_depth_), is_mutable_(false), masm_(masm()) { | |
| 373 for (int i = 0; i < stack_depth_; i++) { | |
| 374 stack_[i] = other.stack_[i]; | |
| 375 } | |
| 376 } | |
| 377 bool operator==(const X87Stack& other) const { | |
| 378 if (stack_depth_ != other.stack_depth_) return false; | |
| 379 for (int i = 0; i < stack_depth_; i++) { | |
| 380 if (!stack_[i].is(other.stack_[i])) return false; | |
| 381 } | |
| 382 return true; | |
| 383 } | |
| 384 X87Stack& operator=(const X87Stack& other) { | |
| 385 stack_depth_ = other.stack_depth_; | |
| 386 for (int i = 0; i < stack_depth_; i++) { | |
| 387 stack_[i] = other.stack_[i]; | |
| 388 } | |
| 389 return *this; | |
| 390 } | |
| 391 bool Contains(X87Register reg); | |
| 392 void Fxch(X87Register reg, int other_slot = 0); | |
| 393 void Free(X87Register reg); | |
| 394 void PrepareToWrite(X87Register reg); | |
| 395 void CommitWrite(X87Register reg); | |
| 396 void FlushIfNecessary(LInstruction* instr, LCodeGen* cgen); | |
| 397 void LeavingBlock(int current_block_id, LGoto* goto_instr, LCodeGen* cgen); | |
| 398 int depth() const { return stack_depth_; } | |
| 399 int GetLayout(); | |
| 400 int st(X87Register reg) { return st2idx(ArrayIndex(reg)); } | |
| 401 void pop() { | |
| 402 DCHECK(is_mutable_); | |
| 403 USE(is_mutable_); | |
| 404 stack_depth_--; | |
| 405 } | |
| 406 void push(X87Register reg) { | |
| 407 DCHECK(is_mutable_); | |
| 408 DCHECK(stack_depth_ < X87Register::kMaxNumAllocatableRegisters); | |
| 409 stack_[stack_depth_] = reg; | |
| 410 stack_depth_++; | |
| 411 } | |
| 412 | |
| 413 MacroAssembler* masm() const { return masm_; } | |
| 414 Isolate* isolate() const { return masm_->isolate(); } | |
| 415 | |
| 416 private: | |
| 417 int ArrayIndex(X87Register reg); | |
| 418 int st2idx(int pos); | |
| 419 | |
| 420 X87Register stack_[X87Register::kMaxNumAllocatableRegisters]; | |
| 421 int stack_depth_; | |
| 422 bool is_mutable_; | |
| 423 MacroAssembler* masm_; | |
| 424 }; | |
| 425 X87Stack x87_stack_; | |
| 426 // block_id -> X87Stack*; | |
| 427 typedef std::map<int, X87Stack*> X87StackMap; | |
| 428 X87StackMap x87_stack_map_; | |
| 429 | |
| 430 // Builder that keeps track of safepoints in the code. The table | |
| 431 // itself is emitted at the end of the generated code. | |
| 432 SafepointTableBuilder safepoints_; | |
| 433 | |
| 434 // Compiler from a set of parallel moves to a sequential list of moves. | |
| 435 LGapResolver resolver_; | |
| 436 | |
| 437 Safepoint::Kind expected_safepoint_kind_; | |
| 438 | |
| 439 class PushSafepointRegistersScope final BASE_EMBEDDED { | |
| 440 public: | |
| 441 explicit PushSafepointRegistersScope(LCodeGen* codegen) | |
| 442 : codegen_(codegen) { | |
| 443 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple); | |
| 444 codegen_->masm_->PushSafepointRegisters(); | |
| 445 codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters; | |
| 446 DCHECK(codegen_->info()->is_calling()); | |
| 447 } | |
| 448 | |
| 449 ~PushSafepointRegistersScope() { | |
| 450 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters); | |
| 451 codegen_->masm_->PopSafepointRegisters(); | |
| 452 codegen_->expected_safepoint_kind_ = Safepoint::kSimple; | |
| 453 } | |
| 454 | |
| 455 private: | |
| 456 LCodeGen* codegen_; | |
| 457 }; | |
| 458 | |
| 459 friend class LDeferredCode; | |
| 460 friend class LEnvironment; | |
| 461 friend class SafepointGenerator; | |
| 462 friend class X87Stack; | |
| 463 DISALLOW_COPY_AND_ASSIGN(LCodeGen); | |
| 464 }; | |
| 465 | |
| 466 | |
| 467 class LDeferredCode : public ZoneObject { | |
| 468 public: | |
| 469 explicit LDeferredCode(LCodeGen* codegen, const LCodeGen::X87Stack& x87_stack) | |
| 470 : codegen_(codegen), | |
| 471 external_exit_(NULL), | |
| 472 instruction_index_(codegen->current_instruction_), | |
| 473 x87_stack_(x87_stack) { | |
| 474 codegen->AddDeferredCode(this); | |
| 475 } | |
| 476 | |
| 477 virtual ~LDeferredCode() {} | |
| 478 virtual void Generate() = 0; | |
| 479 virtual LInstruction* instr() = 0; | |
| 480 | |
| 481 void SetExit(Label* exit) { external_exit_ = exit; } | |
| 482 Label* entry() { return &entry_; } | |
| 483 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; } | |
| 484 Label* done() { return codegen_->NeedsDeferredFrame() ? &done_ : exit(); } | |
| 485 int instruction_index() const { return instruction_index_; } | |
| 486 const LCodeGen::X87Stack& x87_stack() const { return x87_stack_; } | |
| 487 | |
| 488 protected: | |
| 489 LCodeGen* codegen() const { return codegen_; } | |
| 490 MacroAssembler* masm() const { return codegen_->masm(); } | |
| 491 | |
| 492 private: | |
| 493 LCodeGen* codegen_; | |
| 494 Label entry_; | |
| 495 Label exit_; | |
| 496 Label* external_exit_; | |
| 497 Label done_; | |
| 498 int instruction_index_; | |
| 499 LCodeGen::X87Stack x87_stack_; | |
| 500 }; | |
| 501 | |
| 502 } // namespace internal | |
| 503 } // namespace v8 | |
| 504 | |
| 505 #endif // V8_X87_LITHIUM_CODEGEN_X87_H_ | |
| OLD | NEW |