Index: src/x64/code-stubs-x64.cc |
=================================================================== |
--- src/x64/code-stubs-x64.cc (revision 8025) |
+++ src/x64/code-stubs-x64.cc (working copy) |
@@ -406,32 +406,32 @@ |
} |
-Handle<Code> GetTypeRecordingUnaryOpStub(int key, |
- TRUnaryOpIC::TypeInfo type_info) { |
- TypeRecordingUnaryOpStub stub(key, type_info); |
+Handle<Code> GetUnaryOpStub(int key, |
Søren Thygesen Gjesse
2011/05/24 11:33:11
Indentation.
fschneider
2011/05/24 12:16:41
Done.
|
+ UnaryOpIC::TypeInfo type_info) { |
+ UnaryOpStub stub(key, type_info); |
return stub.GetCode(); |
} |
-void TypeRecordingUnaryOpStub::Generate(MacroAssembler* masm) { |
+void UnaryOpStub::Generate(MacroAssembler* masm) { |
switch (operand_type_) { |
- case TRUnaryOpIC::UNINITIALIZED: |
+ case UnaryOpIC::UNINITIALIZED: |
GenerateTypeTransition(masm); |
break; |
- case TRUnaryOpIC::SMI: |
+ case UnaryOpIC::SMI: |
GenerateSmiStub(masm); |
break; |
- case TRUnaryOpIC::HEAP_NUMBER: |
+ case UnaryOpIC::HEAP_NUMBER: |
GenerateHeapNumberStub(masm); |
break; |
- case TRUnaryOpIC::GENERIC: |
+ case UnaryOpIC::GENERIC: |
GenerateGenericStub(masm); |
break; |
} |
} |
-void TypeRecordingUnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { |
+void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { |
__ pop(rcx); // Save return address. |
__ push(rax); |
// Left and right arguments are now on top. |
@@ -446,7 +446,7 @@ |
// Patch the caller to an appropriate specialized stub and return the |
// operation result to the caller of the stub. |
__ TailCallExternalReference( |
- ExternalReference(IC_Utility(IC::kTypeRecordingUnaryOp_Patch), |
+ ExternalReference(IC_Utility(IC::kUnaryOp_Patch), |
masm->isolate()), |
4, |
1); |
@@ -454,7 +454,7 @@ |
// TODO(svenpanne): Use virtual functions instead of switch. |
-void TypeRecordingUnaryOpStub::GenerateSmiStub(MacroAssembler* masm) { |
+void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) { |
switch (op_) { |
case Token::SUB: |
GenerateSmiStubSub(masm); |
@@ -468,7 +468,7 @@ |
} |
-void TypeRecordingUnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) { |
+void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) { |
Label slow; |
GenerateSmiCodeSub(masm, &slow, &slow, Label::kNear, Label::kNear); |
__ bind(&slow); |
@@ -476,7 +476,7 @@ |
} |
-void TypeRecordingUnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) { |
+void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) { |
Label non_smi; |
GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); |
__ bind(&non_smi); |
@@ -484,7 +484,7 @@ |
} |
-void TypeRecordingUnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm, |
+void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm, |
Søren Thygesen Gjesse
2011/05/24 11:33:11
Indentation.
fschneider
2011/05/24 12:16:41
Done.
|
Label* non_smi, |
Label* slow, |
Label::Distance non_smi_near, |
@@ -498,7 +498,7 @@ |
} |
-void TypeRecordingUnaryOpStub::GenerateSmiCodeBitNot( |
+void UnaryOpStub::GenerateSmiCodeBitNot( |
Søren Thygesen Gjesse
2011/05/24 11:33:11
Fits full intentation?
fschneider
2011/05/24 12:16:41
Done.
|
MacroAssembler* masm, |
Label* non_smi, |
Label::Distance non_smi_near) { |
@@ -509,7 +509,7 @@ |
// TODO(svenpanne): Use virtual functions instead of switch. |
-void TypeRecordingUnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { |
+void UnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { |
switch (op_) { |
case Token::SUB: |
GenerateHeapNumberStubSub(masm); |
@@ -523,7 +523,7 @@ |
} |
-void TypeRecordingUnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) { |
+void UnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) { |
Label non_smi, slow, call_builtin; |
GenerateSmiCodeSub(masm, &non_smi, &call_builtin, Label::kNear); |
__ bind(&non_smi); |
@@ -535,7 +535,7 @@ |
} |
-void TypeRecordingUnaryOpStub::GenerateHeapNumberStubBitNot( |
+void UnaryOpStub::GenerateHeapNumberStubBitNot( |
MacroAssembler* masm) { |
Label non_smi, slow; |
GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); |
@@ -546,7 +546,7 @@ |
} |
-void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, |
+void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, |
Søren Thygesen Gjesse
2011/05/24 11:33:11
Indentation.
fschneider
2011/05/24 12:16:41
Done.
|
Label* slow) { |
// Check if the operand is a heap number. |
__ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), |
@@ -587,7 +587,7 @@ |
} |
-void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeBitNot( |
+void UnaryOpStub::GenerateHeapNumberCodeBitNot( |
Søren Thygesen Gjesse
2011/05/24 11:33:11
Fits full indentation?
fschneider
2011/05/24 12:16:41
Done.
|
MacroAssembler* masm, |
Label* slow) { |
// Check if the operand is a heap number. |
@@ -606,7 +606,7 @@ |
// TODO(svenpanne): Use virtual functions instead of switch. |
-void TypeRecordingUnaryOpStub::GenerateGenericStub(MacroAssembler* masm) { |
+void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) { |
switch (op_) { |
case Token::SUB: |
GenerateGenericStubSub(masm); |
@@ -620,7 +620,7 @@ |
} |
-void TypeRecordingUnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) { |
+void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) { |
Label non_smi, slow; |
GenerateSmiCodeSub(masm, &non_smi, &slow, Label::kNear); |
__ bind(&non_smi); |
@@ -630,7 +630,7 @@ |
} |
-void TypeRecordingUnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) { |
+void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) { |
Label non_smi, slow; |
GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); |
__ bind(&non_smi); |
@@ -640,7 +640,7 @@ |
} |
-void TypeRecordingUnaryOpStub::GenerateGenericCodeFallback( |
+void UnaryOpStub::GenerateGenericCodeFallback( |
Søren Thygesen Gjesse
2011/05/24 11:33:11
Fits one line?
fschneider
2011/05/24 12:16:41
Done.
|
MacroAssembler* masm) { |
// Handle the slow case by jumping to the JavaScript builtin. |
__ pop(rcx); // pop return address |
@@ -659,7 +659,7 @@ |
} |
-const char* TypeRecordingUnaryOpStub::GetName() { |
+const char* UnaryOpStub::GetName() { |
if (name_ != NULL) return name_; |
const int kMaxNameLength = 100; |
name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray( |
@@ -673,23 +673,23 @@ |
} |
OS::SNPrintF(Vector<char>(name_, kMaxNameLength), |
- "TypeRecordingUnaryOpStub_%s_%s_%s", |
+ "UnaryOpStub_%s_%s_%s", |
op_name, |
overwrite_name, |
- TRUnaryOpIC::GetName(operand_type_)); |
+ UnaryOpIC::GetName(operand_type_)); |
return name_; |
} |
-Handle<Code> GetTypeRecordingBinaryOpStub(int key, |
- TRBinaryOpIC::TypeInfo type_info, |
- TRBinaryOpIC::TypeInfo result_type_info) { |
- TypeRecordingBinaryOpStub stub(key, type_info, result_type_info); |
+Handle<Code> GetBinaryOpStub(int key, |
Søren Thygesen Gjesse
2011/05/24 11:33:11
Full intentation or int key on separate line.
fschneider
2011/05/24 12:16:41
Done.
|
+ BinaryOpIC::TypeInfo type_info, |
+ BinaryOpIC::TypeInfo result_type_info) { |
+ BinaryOpStub stub(key, type_info, result_type_info); |
return stub.GetCode(); |
} |
-void TypeRecordingBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { |
+void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { |
__ pop(rcx); // Save return address. |
__ push(rdx); |
__ push(rax); |
@@ -705,39 +705,39 @@ |
// Patch the caller to an appropriate specialized stub and return the |
// operation result to the caller of the stub. |
__ TailCallExternalReference( |
- ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch), |
+ ExternalReference(IC_Utility(IC::kBinaryOp_Patch), |
masm->isolate()), |
5, |
1); |
} |
-void TypeRecordingBinaryOpStub::Generate(MacroAssembler* masm) { |
+void BinaryOpStub::Generate(MacroAssembler* masm) { |
switch (operands_type_) { |
- case TRBinaryOpIC::UNINITIALIZED: |
+ case BinaryOpIC::UNINITIALIZED: |
GenerateTypeTransition(masm); |
break; |
- case TRBinaryOpIC::SMI: |
+ case BinaryOpIC::SMI: |
GenerateSmiStub(masm); |
break; |
- case TRBinaryOpIC::INT32: |
+ case BinaryOpIC::INT32: |
UNREACHABLE(); |
// The int32 case is identical to the Smi case. We avoid creating this |
// ic state on x64. |
break; |
- case TRBinaryOpIC::HEAP_NUMBER: |
+ case BinaryOpIC::HEAP_NUMBER: |
GenerateHeapNumberStub(masm); |
break; |
- case TRBinaryOpIC::ODDBALL: |
+ case BinaryOpIC::ODDBALL: |
GenerateOddballStub(masm); |
break; |
- case TRBinaryOpIC::BOTH_STRING: |
+ case BinaryOpIC::BOTH_STRING: |
GenerateBothStringStub(masm); |
break; |
- case TRBinaryOpIC::STRING: |
+ case BinaryOpIC::STRING: |
GenerateStringStub(masm); |
break; |
- case TRBinaryOpIC::GENERIC: |
+ case BinaryOpIC::GENERIC: |
GenerateGeneric(masm); |
break; |
default: |
@@ -746,7 +746,7 @@ |
} |
-const char* TypeRecordingBinaryOpStub::GetName() { |
+const char* BinaryOpStub::GetName() { |
if (name_ != NULL) return name_; |
const int kMaxNameLength = 100; |
name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray( |
@@ -762,19 +762,19 @@ |
} |
OS::SNPrintF(Vector<char>(name_, kMaxNameLength), |
- "TypeRecordingBinaryOpStub_%s_%s_%s", |
+ "BinaryOpStub_%s_%s_%s", |
op_name, |
overwrite_name, |
- TRBinaryOpIC::GetName(operands_type_)); |
+ BinaryOpIC::GetName(operands_type_)); |
return name_; |
} |
-void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, |
+void BinaryOpStub::GenerateSmiCode(MacroAssembler* masm, |
Søren Thygesen Gjesse
2011/05/24 11:33:11
MacroAssembler* masm, on separate line.
fschneider
2011/05/24 12:16:41
Done.
|
Label* slow, |
SmiCodeGenerateHeapNumberResults allow_heapnumber_results) { |
- // Arguments to TypeRecordingBinaryOpStub are in rdx and rax. |
+ // Arguments to BinaryOpStub are in rdx and rax. |
Register left = rdx; |
Register right = rax; |
@@ -920,7 +920,7 @@ |
} |
-void TypeRecordingBinaryOpStub::GenerateFloatingPointCode( |
+void BinaryOpStub::GenerateFloatingPointCode( |
Søren Thygesen Gjesse
2011/05/24 11:33:11
Fits full indentation?
fschneider
2011/05/24 12:16:41
Done.
|
MacroAssembler* masm, |
Label* allocation_failure, |
Label* non_numeric_failure) { |
@@ -1022,12 +1022,12 @@ |
// No fall-through from this generated code. |
if (FLAG_debug_code) { |
__ Abort("Unexpected fall-through in " |
- "TypeRecordingBinaryStub::GenerateFloatingPointCode."); |
+ "BinaryStub::GenerateFloatingPointCode."); |
} |
} |
-void TypeRecordingBinaryOpStub::GenerateStringAddCode(MacroAssembler* masm) { |
+void BinaryOpStub::GenerateStringAddCode(MacroAssembler* masm) { |
ASSERT(op_ == Token::ADD); |
Label left_not_string, call_runtime; |
@@ -1058,7 +1058,7 @@ |
} |
-void TypeRecordingBinaryOpStub::GenerateCallRuntimeCode(MacroAssembler* masm) { |
+void BinaryOpStub::GenerateCallRuntimeCode(MacroAssembler* masm) { |
GenerateRegisterArgsPush(masm); |
switch (op_) { |
case Token::ADD: |
@@ -1100,10 +1100,10 @@ |
} |
-void TypeRecordingBinaryOpStub::GenerateSmiStub(MacroAssembler* masm) { |
+void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) { |
Label call_runtime; |
- if (result_type_ == TRBinaryOpIC::UNINITIALIZED || |
- result_type_ == TRBinaryOpIC::SMI) { |
+ if (result_type_ == BinaryOpIC::UNINITIALIZED || |
+ result_type_ == BinaryOpIC::SMI) { |
// Only allow smi results. |
GenerateSmiCode(masm, NULL, NO_HEAPNUMBER_RESULTS); |
} else { |
@@ -1123,19 +1123,19 @@ |
} |
-void TypeRecordingBinaryOpStub::GenerateStringStub(MacroAssembler* masm) { |
- ASSERT(operands_type_ == TRBinaryOpIC::STRING); |
+void BinaryOpStub::GenerateStringStub(MacroAssembler* masm) { |
+ ASSERT(operands_type_ == BinaryOpIC::STRING); |
ASSERT(op_ == Token::ADD); |
GenerateStringAddCode(masm); |
// Try to add arguments as strings, otherwise, transition to the generic |
- // TRBinaryOpIC type. |
+ // BinaryOpIC type. |
GenerateTypeTransition(masm); |
} |
-void TypeRecordingBinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) { |
+void BinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) { |
Label call_runtime; |
- ASSERT(operands_type_ == TRBinaryOpIC::BOTH_STRING); |
+ ASSERT(operands_type_ == BinaryOpIC::BOTH_STRING); |
ASSERT(op_ == Token::ADD); |
// If both arguments are strings, call the string add stub. |
// Otherwise, do a transition. |
@@ -1163,7 +1163,7 @@ |
} |
-void TypeRecordingBinaryOpStub::GenerateOddballStub(MacroAssembler* masm) { |
+void BinaryOpStub::GenerateOddballStub(MacroAssembler* masm) { |
Label call_runtime; |
if (op_ == Token::ADD) { |
@@ -1196,7 +1196,7 @@ |
} |
-void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { |
+void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { |
Label gc_required, not_number; |
GenerateFloatingPointCode(masm, &gc_required, ¬_number); |
@@ -1208,7 +1208,7 @@ |
} |
-void TypeRecordingBinaryOpStub::GenerateGeneric(MacroAssembler* masm) { |
+void BinaryOpStub::GenerateGeneric(MacroAssembler* masm) { |
Label call_runtime, call_string_add_or_runtime; |
GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS); |
@@ -1225,7 +1225,7 @@ |
} |
-void TypeRecordingBinaryOpStub::GenerateHeapResultAllocation( |
+void BinaryOpStub::GenerateHeapResultAllocation( |
Søren Thygesen Gjesse
2011/05/24 11:33:11
Fits full indentation?
fschneider
2011/05/24 12:16:41
Done.
|
MacroAssembler* masm, |
Label* alloc_failure) { |
Label skip_allocation; |
@@ -1265,7 +1265,7 @@ |
} |
-void TypeRecordingBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) { |
+void BinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) { |
__ pop(rcx); |
__ push(rdx); |
__ push(rax); |