OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1931 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1942 __ mov(r0, Operand(Factory::true_value()), LeaveCC, eq); | 1942 __ mov(r0, Operand(Factory::true_value()), LeaveCC, eq); |
1943 } | 1943 } |
1944 | 1944 |
1945 | 1945 |
1946 void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) { | 1946 void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) { |
1947 Abort("DoInstanceOfAndBranch unimplemented."); | 1947 Abort("DoInstanceOfAndBranch unimplemented."); |
1948 } | 1948 } |
1949 | 1949 |
1950 | 1950 |
1951 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { | 1951 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { |
1952 Abort("DoInstanceOfKnownGlobal unimplemented."); | 1952 class DeferredInstanceOfKnownGlobal: public LDeferredCode { |
| 1953 public: |
| 1954 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, |
| 1955 LInstanceOfKnownGlobal* instr) |
| 1956 : LDeferredCode(codegen), instr_(instr) { } |
| 1957 virtual void Generate() { |
| 1958 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_); |
| 1959 } |
| 1960 |
| 1961 Label* map_check() { return &map_check_; } |
| 1962 |
| 1963 private: |
| 1964 LInstanceOfKnownGlobal* instr_; |
| 1965 Label map_check_; |
| 1966 }; |
| 1967 |
| 1968 DeferredInstanceOfKnownGlobal* deferred; |
| 1969 deferred = new DeferredInstanceOfKnownGlobal(this, instr); |
| 1970 |
| 1971 Label done, false_result; |
| 1972 Register object = ToRegister(instr->input()); |
| 1973 Register temp = ToRegister(instr->temp()); |
| 1974 Register result = ToRegister(instr->result()); |
| 1975 |
| 1976 ASSERT(object.is(r0)); |
| 1977 ASSERT(result.is(r0)); |
| 1978 |
| 1979 // A Smi is not instance of anything. |
| 1980 __ BranchOnSmi(object, &false_result); |
| 1981 |
| 1982 // This is the inlined call site instanceof cache. The two occurences of the |
| 1983 // hole value will be patched to the last map/result pair generated by the |
| 1984 // instanceof stub. |
| 1985 Label cache_miss; |
| 1986 Register map = temp; |
| 1987 __ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset)); |
| 1988 __ bind(deferred->map_check()); // Label for calculating code patching. |
| 1989 // We use Factory::the_hole_value() on purpose instead of loading from the |
| 1990 // root array to force relocation to be able to later patch with |
| 1991 // the cached map. |
| 1992 __ mov(ip, Operand(Factory::the_hole_value())); |
| 1993 __ cmp(map, Operand(ip)); |
| 1994 __ b(ne, &cache_miss); |
| 1995 // We use Factory::the_hole_value() on purpose instead of loading from the |
| 1996 // root array to force relocation to be able to later patch |
| 1997 // with true or false. |
| 1998 __ mov(result, Operand(Factory::the_hole_value())); |
| 1999 __ b(&done); |
| 2000 |
| 2001 // The inlined call site cache did not match. Check null and string before |
| 2002 // calling the deferred code. |
| 2003 __ bind(&cache_miss); |
| 2004 // Null is not instance of anything. |
| 2005 __ LoadRoot(ip, Heap::kNullValueRootIndex); |
| 2006 __ cmp(object, Operand(ip)); |
| 2007 __ b(eq, &false_result); |
| 2008 |
| 2009 // String values is not instance of anything. |
| 2010 Condition is_string = masm_->IsObjectStringType(object, temp); |
| 2011 __ b(is_string, &false_result); |
| 2012 |
| 2013 // Go to the deferred code. |
| 2014 __ b(deferred->entry()); |
| 2015 |
| 2016 __ bind(&false_result); |
| 2017 __ LoadRoot(result, Heap::kFalseValueRootIndex); |
| 2018 |
| 2019 // Here result has either true or false. Deferred code also produces true or |
| 2020 // false object. |
| 2021 __ bind(deferred->exit()); |
| 2022 __ bind(&done); |
| 2023 } |
| 2024 |
| 2025 |
| 2026 void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, |
| 2027 Label* map_check) { |
| 2028 Register result = ToRegister(instr->result()); |
| 2029 ASSERT(result.is(r0)); |
| 2030 |
| 2031 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; |
| 2032 flags = static_cast<InstanceofStub::Flags>( |
| 2033 flags | InstanceofStub::kArgsInRegisters); |
| 2034 flags = static_cast<InstanceofStub::Flags>( |
| 2035 flags | InstanceofStub::kCallSiteInlineCheck); |
| 2036 flags = static_cast<InstanceofStub::Flags>( |
| 2037 flags | InstanceofStub::kReturnTrueFalseObject); |
| 2038 InstanceofStub stub(flags); |
| 2039 |
| 2040 __ PushSafepointRegisters(); |
| 2041 |
| 2042 // Get the temp register reserved by the instruction. This needs to be r4 as |
| 2043 // its slot of the pushing of safepoint registers is used to communicate the |
| 2044 // offset to the location of the map check. |
| 2045 Register temp = ToRegister(instr->temp()); |
| 2046 ASSERT(temp.is(r4)); |
| 2047 __ mov(InstanceofStub::right(), Operand(instr->function())); |
| 2048 static const int kAdditionalDelta = 4; |
| 2049 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta; |
| 2050 Label before_push_delta; |
| 2051 __ bind(&before_push_delta); |
| 2052 __ BlockConstPoolFor(kAdditionalDelta); |
| 2053 __ mov(temp, Operand(delta * kPointerSize)); |
| 2054 __ StoreToSafepointRegisterSlot(temp); |
| 2055 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET); |
| 2056 ASSERT_EQ(kAdditionalDelta, |
| 2057 masm_->InstructionsGeneratedSince(&before_push_delta)); |
| 2058 RecordSafepointWithRegisters( |
| 2059 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); |
| 2060 // Put the result value into the result register slot and |
| 2061 // restore all registers. |
| 2062 __ StoreToSafepointRegisterSlot(result); |
| 2063 |
| 2064 __ PopSafepointRegisters(); |
1953 } | 2065 } |
1954 | 2066 |
1955 | 2067 |
1956 static Condition ComputeCompareCondition(Token::Value op) { | 2068 static Condition ComputeCompareCondition(Token::Value op) { |
1957 switch (op) { | 2069 switch (op) { |
1958 case Token::EQ_STRICT: | 2070 case Token::EQ_STRICT: |
1959 case Token::EQ: | 2071 case Token::EQ: |
1960 return eq; | 2072 return eq; |
1961 case Token::LT: | 2073 case Token::LT: |
1962 return lt; | 2074 return lt; |
(...skipping 1289 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3252 | 3364 |
3253 | 3365 |
3254 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { | 3366 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { |
3255 Abort("DoOsrEntry unimplemented."); | 3367 Abort("DoOsrEntry unimplemented."); |
3256 } | 3368 } |
3257 | 3369 |
3258 | 3370 |
3259 #undef __ | 3371 #undef __ |
3260 | 3372 |
3261 } } // namespace v8::internal | 3373 } } // namespace v8::internal |
OLD | NEW |