Index: src/x64/code-stubs-x64.cc |
=================================================================== |
--- src/x64/code-stubs-x64.cc (revision 8025) |
+++ src/x64/code-stubs-x64.cc (working copy) |
@@ -406,32 +406,31 @@ |
} |
-Handle<Code> GetTypeRecordingUnaryOpStub(int key, |
- TRUnaryOpIC::TypeInfo type_info) { |
- TypeRecordingUnaryOpStub stub(key, type_info); |
+Handle<Code> GetUnaryOpStub(int key, UnaryOpIC::TypeInfo type_info) { |
+ UnaryOpStub stub(key, type_info); |
return stub.GetCode(); |
} |
-void TypeRecordingUnaryOpStub::Generate(MacroAssembler* masm) { |
+void UnaryOpStub::Generate(MacroAssembler* masm) { |
switch (operand_type_) { |
- case TRUnaryOpIC::UNINITIALIZED: |
+ case UnaryOpIC::UNINITIALIZED: |
GenerateTypeTransition(masm); |
break; |
- case TRUnaryOpIC::SMI: |
+ case UnaryOpIC::SMI: |
GenerateSmiStub(masm); |
break; |
- case TRUnaryOpIC::HEAP_NUMBER: |
+ case UnaryOpIC::HEAP_NUMBER: |
GenerateHeapNumberStub(masm); |
break; |
- case TRUnaryOpIC::GENERIC: |
+ case UnaryOpIC::GENERIC: |
GenerateGenericStub(masm); |
break; |
} |
} |
-void TypeRecordingUnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { |
+void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { |
__ pop(rcx); // Save return address. |
__ push(rax); |
// Left and right arguments are now on top. |
@@ -446,7 +445,7 @@ |
// Patch the caller to an appropriate specialized stub and return the |
// operation result to the caller of the stub. |
__ TailCallExternalReference( |
- ExternalReference(IC_Utility(IC::kTypeRecordingUnaryOp_Patch), |
+ ExternalReference(IC_Utility(IC::kUnaryOp_Patch), |
masm->isolate()), |
4, |
1); |
@@ -454,7 +453,7 @@ |
// TODO(svenpanne): Use virtual functions instead of switch. |
-void TypeRecordingUnaryOpStub::GenerateSmiStub(MacroAssembler* masm) { |
+void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) { |
switch (op_) { |
case Token::SUB: |
GenerateSmiStubSub(masm); |
@@ -468,7 +467,7 @@ |
} |
-void TypeRecordingUnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) { |
+void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) { |
Label slow; |
GenerateSmiCodeSub(masm, &slow, &slow, Label::kNear, Label::kNear); |
__ bind(&slow); |
@@ -476,7 +475,7 @@ |
} |
-void TypeRecordingUnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) { |
+void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) { |
Label non_smi; |
GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); |
__ bind(&non_smi); |
@@ -484,11 +483,11 @@ |
} |
-void TypeRecordingUnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm, |
- Label* non_smi, |
- Label* slow, |
- Label::Distance non_smi_near, |
- Label::Distance slow_near) { |
+void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm, |
+ Label* non_smi, |
+ Label* slow, |
+ Label::Distance non_smi_near, |
+ Label::Distance slow_near) { |
Label done; |
__ JumpIfNotSmi(rax, non_smi, non_smi_near); |
__ SmiNeg(rax, rax, &done, Label::kNear); |
@@ -498,10 +497,9 @@ |
} |
-void TypeRecordingUnaryOpStub::GenerateSmiCodeBitNot( |
- MacroAssembler* masm, |
- Label* non_smi, |
- Label::Distance non_smi_near) { |
+void UnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm, |
+ Label* non_smi, |
+ Label::Distance non_smi_near) { |
__ JumpIfNotSmi(rax, non_smi, non_smi_near); |
__ SmiNot(rax, rax); |
__ ret(0); |
@@ -509,7 +507,7 @@ |
// TODO(svenpanne): Use virtual functions instead of switch. |
-void TypeRecordingUnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { |
+void UnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { |
switch (op_) { |
case Token::SUB: |
GenerateHeapNumberStubSub(masm); |
@@ -523,7 +521,7 @@ |
} |
-void TypeRecordingUnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) { |
+void UnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) { |
Label non_smi, slow, call_builtin; |
GenerateSmiCodeSub(masm, &non_smi, &call_builtin, Label::kNear); |
__ bind(&non_smi); |
@@ -535,7 +533,7 @@ |
} |
-void TypeRecordingUnaryOpStub::GenerateHeapNumberStubBitNot( |
+void UnaryOpStub::GenerateHeapNumberStubBitNot( |
MacroAssembler* masm) { |
Label non_smi, slow; |
GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); |
@@ -546,8 +544,8 @@ |
} |
-void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, |
- Label* slow) { |
+void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, |
+ Label* slow) { |
// Check if the operand is a heap number. |
__ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), |
Heap::kHeapNumberMapRootIndex); |
@@ -587,9 +585,8 @@ |
} |
-void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeBitNot( |
- MacroAssembler* masm, |
- Label* slow) { |
+void UnaryOpStub::GenerateHeapNumberCodeBitNot(MacroAssembler* masm, |
+ Label* slow) { |
// Check if the operand is a heap number. |
__ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), |
Heap::kHeapNumberMapRootIndex); |
@@ -606,7 +603,7 @@ |
// TODO(svenpanne): Use virtual functions instead of switch. |
-void TypeRecordingUnaryOpStub::GenerateGenericStub(MacroAssembler* masm) { |
+void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) { |
switch (op_) { |
case Token::SUB: |
GenerateGenericStubSub(masm); |
@@ -620,7 +617,7 @@ |
} |
-void TypeRecordingUnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) { |
+void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) { |
Label non_smi, slow; |
GenerateSmiCodeSub(masm, &non_smi, &slow, Label::kNear); |
__ bind(&non_smi); |
@@ -630,7 +627,7 @@ |
} |
-void TypeRecordingUnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) { |
+void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) { |
Label non_smi, slow; |
GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); |
__ bind(&non_smi); |
@@ -640,8 +637,7 @@ |
} |
-void TypeRecordingUnaryOpStub::GenerateGenericCodeFallback( |
- MacroAssembler* masm) { |
+void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) { |
// Handle the slow case by jumping to the JavaScript builtin. |
__ pop(rcx); // pop return address |
__ push(rax); |
@@ -659,7 +655,7 @@ |
} |
-const char* TypeRecordingUnaryOpStub::GetName() { |
+const char* UnaryOpStub::GetName() { |
if (name_ != NULL) return name_; |
const int kMaxNameLength = 100; |
name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray( |
@@ -673,23 +669,23 @@ |
} |
OS::SNPrintF(Vector<char>(name_, kMaxNameLength), |
- "TypeRecordingUnaryOpStub_%s_%s_%s", |
+ "UnaryOpStub_%s_%s_%s", |
op_name, |
overwrite_name, |
- TRUnaryOpIC::GetName(operand_type_)); |
+ UnaryOpIC::GetName(operand_type_)); |
return name_; |
} |
-Handle<Code> GetTypeRecordingBinaryOpStub(int key, |
- TRBinaryOpIC::TypeInfo type_info, |
- TRBinaryOpIC::TypeInfo result_type_info) { |
- TypeRecordingBinaryOpStub stub(key, type_info, result_type_info); |
+Handle<Code> GetBinaryOpStub(int key, |
+ BinaryOpIC::TypeInfo type_info, |
+ BinaryOpIC::TypeInfo result_type_info) { |
+ BinaryOpStub stub(key, type_info, result_type_info); |
return stub.GetCode(); |
} |
-void TypeRecordingBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { |
+void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { |
__ pop(rcx); // Save return address. |
__ push(rdx); |
__ push(rax); |
@@ -705,39 +701,39 @@ |
// Patch the caller to an appropriate specialized stub and return the |
// operation result to the caller of the stub. |
__ TailCallExternalReference( |
- ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch), |
+ ExternalReference(IC_Utility(IC::kBinaryOp_Patch), |
masm->isolate()), |
5, |
1); |
} |
-void TypeRecordingBinaryOpStub::Generate(MacroAssembler* masm) { |
+void BinaryOpStub::Generate(MacroAssembler* masm) { |
switch (operands_type_) { |
- case TRBinaryOpIC::UNINITIALIZED: |
+ case BinaryOpIC::UNINITIALIZED: |
GenerateTypeTransition(masm); |
break; |
- case TRBinaryOpIC::SMI: |
+ case BinaryOpIC::SMI: |
GenerateSmiStub(masm); |
break; |
- case TRBinaryOpIC::INT32: |
+ case BinaryOpIC::INT32: |
UNREACHABLE(); |
// The int32 case is identical to the Smi case. We avoid creating this |
// ic state on x64. |
break; |
- case TRBinaryOpIC::HEAP_NUMBER: |
+ case BinaryOpIC::HEAP_NUMBER: |
GenerateHeapNumberStub(masm); |
break; |
- case TRBinaryOpIC::ODDBALL: |
+ case BinaryOpIC::ODDBALL: |
GenerateOddballStub(masm); |
break; |
- case TRBinaryOpIC::BOTH_STRING: |
+ case BinaryOpIC::BOTH_STRING: |
GenerateBothStringStub(masm); |
break; |
- case TRBinaryOpIC::STRING: |
+ case BinaryOpIC::STRING: |
GenerateStringStub(masm); |
break; |
- case TRBinaryOpIC::GENERIC: |
+ case BinaryOpIC::GENERIC: |
GenerateGeneric(masm); |
break; |
default: |
@@ -746,7 +742,7 @@ |
} |
-const char* TypeRecordingBinaryOpStub::GetName() { |
+const char* BinaryOpStub::GetName() { |
if (name_ != NULL) return name_; |
const int kMaxNameLength = 100; |
name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray( |
@@ -762,19 +758,20 @@ |
} |
OS::SNPrintF(Vector<char>(name_, kMaxNameLength), |
- "TypeRecordingBinaryOpStub_%s_%s_%s", |
+ "BinaryOpStub_%s_%s_%s", |
op_name, |
overwrite_name, |
- TRBinaryOpIC::GetName(operands_type_)); |
+ BinaryOpIC::GetName(operands_type_)); |
return name_; |
} |
-void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, |
+void BinaryOpStub::GenerateSmiCode( |
+ MacroAssembler* masm, |
Label* slow, |
SmiCodeGenerateHeapNumberResults allow_heapnumber_results) { |
- // Arguments to TypeRecordingBinaryOpStub are in rdx and rax. |
+ // Arguments to BinaryOpStub are in rdx and rax. |
Register left = rdx; |
Register right = rax; |
@@ -920,10 +917,9 @@ |
} |
-void TypeRecordingBinaryOpStub::GenerateFloatingPointCode( |
- MacroAssembler* masm, |
- Label* allocation_failure, |
- Label* non_numeric_failure) { |
+void BinaryOpStub::GenerateFloatingPointCode(MacroAssembler* masm, |
+ Label* allocation_failure, |
+ Label* non_numeric_failure) { |
switch (op_) { |
case Token::ADD: |
case Token::SUB: |
@@ -1022,12 +1018,12 @@ |
// No fall-through from this generated code. |
if (FLAG_debug_code) { |
__ Abort("Unexpected fall-through in " |
- "TypeRecordingBinaryStub::GenerateFloatingPointCode."); |
+ "BinaryStub::GenerateFloatingPointCode."); |
} |
} |
-void TypeRecordingBinaryOpStub::GenerateStringAddCode(MacroAssembler* masm) { |
+void BinaryOpStub::GenerateStringAddCode(MacroAssembler* masm) { |
ASSERT(op_ == Token::ADD); |
Label left_not_string, call_runtime; |
@@ -1058,7 +1054,7 @@ |
} |
-void TypeRecordingBinaryOpStub::GenerateCallRuntimeCode(MacroAssembler* masm) { |
+void BinaryOpStub::GenerateCallRuntimeCode(MacroAssembler* masm) { |
GenerateRegisterArgsPush(masm); |
switch (op_) { |
case Token::ADD: |
@@ -1100,10 +1096,10 @@ |
} |
-void TypeRecordingBinaryOpStub::GenerateSmiStub(MacroAssembler* masm) { |
+void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) { |
Label call_runtime; |
- if (result_type_ == TRBinaryOpIC::UNINITIALIZED || |
- result_type_ == TRBinaryOpIC::SMI) { |
+ if (result_type_ == BinaryOpIC::UNINITIALIZED || |
+ result_type_ == BinaryOpIC::SMI) { |
// Only allow smi results. |
GenerateSmiCode(masm, NULL, NO_HEAPNUMBER_RESULTS); |
} else { |
@@ -1123,19 +1119,19 @@ |
} |
-void TypeRecordingBinaryOpStub::GenerateStringStub(MacroAssembler* masm) { |
- ASSERT(operands_type_ == TRBinaryOpIC::STRING); |
+void BinaryOpStub::GenerateStringStub(MacroAssembler* masm) { |
+ ASSERT(operands_type_ == BinaryOpIC::STRING); |
ASSERT(op_ == Token::ADD); |
GenerateStringAddCode(masm); |
// Try to add arguments as strings, otherwise, transition to the generic |
- // TRBinaryOpIC type. |
+ // BinaryOpIC type. |
GenerateTypeTransition(masm); |
} |
-void TypeRecordingBinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) { |
+void BinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) { |
Label call_runtime; |
- ASSERT(operands_type_ == TRBinaryOpIC::BOTH_STRING); |
+ ASSERT(operands_type_ == BinaryOpIC::BOTH_STRING); |
ASSERT(op_ == Token::ADD); |
// If both arguments are strings, call the string add stub. |
// Otherwise, do a transition. |
@@ -1163,7 +1159,7 @@ |
} |
-void TypeRecordingBinaryOpStub::GenerateOddballStub(MacroAssembler* masm) { |
+void BinaryOpStub::GenerateOddballStub(MacroAssembler* masm) { |
Label call_runtime; |
if (op_ == Token::ADD) { |
@@ -1196,7 +1192,7 @@ |
} |
-void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { |
+void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { |
Label gc_required, not_number; |
GenerateFloatingPointCode(masm, &gc_required, ¬_number); |
@@ -1208,7 +1204,7 @@ |
} |
-void TypeRecordingBinaryOpStub::GenerateGeneric(MacroAssembler* masm) { |
+void BinaryOpStub::GenerateGeneric(MacroAssembler* masm) { |
Label call_runtime, call_string_add_or_runtime; |
GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS); |
@@ -1225,9 +1221,8 @@ |
} |
-void TypeRecordingBinaryOpStub::GenerateHeapResultAllocation( |
- MacroAssembler* masm, |
- Label* alloc_failure) { |
+void BinaryOpStub::GenerateHeapResultAllocation(MacroAssembler* masm, |
+ Label* alloc_failure) { |
Label skip_allocation; |
OverwriteMode mode = mode_; |
switch (mode) { |
@@ -1265,7 +1260,7 @@ |
} |
-void TypeRecordingBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) { |
+void BinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) { |
__ pop(rcx); |
__ push(rdx); |
__ push(rax); |