Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(747)

Unified Diff: src/sh4/builtins-sh4.cc

Issue 11275184: First draft of the sh4 port Base URL: http://github.com/v8/v8.git@master
Patch Set: Use GYP and fixe some typos Created 8 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « src/sh4/assembler-sh4-inl.h ('k') | src/sh4/checks-sh4.h » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: src/sh4/builtins-sh4.cc
diff --git a/src/arm/builtins-arm.cc b/src/sh4/builtins-sh4.cc
similarity index 90%
copy from src/arm/builtins-arm.cc
copy to src/sh4/builtins-sh4.cc
index 2d1d7b1199712556880ac857eec03ab7facc2aa6..f8c34cebb6a1d56d9bf0f2886e4f23ab4054b07c 100644
--- a/src/arm/builtins-arm.cc
+++ b/src/sh4/builtins-sh4.cc
@@ -1,4 +1,4 @@
-// Copyright 2012 the V8 project authors. All rights reserved.
+// Copyright 2011-2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -27,7 +27,7 @@
#include "v8.h"
-#if defined(V8_TARGET_ARCH_ARM)
+#if defined(V8_TARGET_ARCH_SH4)
#include "codegen.h"
#include "debug.h"
@@ -42,6 +42,10 @@ namespace internal {
#define __ ACCESS_MASM(masm)
+// Define register map
+#include "map-sh4.h"
+
+
void Builtins::Generate_Adaptor(MacroAssembler* masm,
CFunctionId id,
BuiltinExtraArguments extra_args) {
@@ -185,8 +189,8 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
__ bind(&loop);
__ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
__ bind(&entry);
- __ cmp(scratch1, scratch2);
- __ b(lt, &loop);
+ __ cmpge(scratch1, scratch2);
+ __ bf(&loop);
}
}
@@ -223,9 +227,10 @@ static void AllocateJSArray(MacroAssembler* masm,
STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
__ mov(elements_array_end,
Operand((JSArray::kSize + FixedArray::kHeaderSize) / kPointerSize));
+ __ asr(scratch1, array_size, Operand(kSmiTagSize));
__ add(elements_array_end,
elements_array_end,
- Operand(array_size, ASR, kSmiTagSize));
+ scratch1);
__ AllocateInNewSpace(
elements_array_end,
result,
@@ -277,9 +282,11 @@ static void AllocateJSArray(MacroAssembler* masm,
// elements_array_storage: elements array element storage
// array_size: smi-tagged size of elements array
STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
+ __ lsl(elements_array_end, array_size,
+ Operand(kPointerSizeLog2 - kSmiTagSize));
__ add(elements_array_end,
elements_array_storage,
- Operand(array_size, LSL, kPointerSizeLog2 - kSmiTagSize));
+ elements_array_end);
// Fill the allocated FixedArray with the hole value if requested.
// result: JSObject
@@ -288,13 +295,13 @@ static void AllocateJSArray(MacroAssembler* masm,
if (fill_with_hole) {
Label loop, entry;
__ LoadRoot(scratch1, Heap::kTheHoleValueRootIndex);
- __ jmp(&entry);
+ __ jmp_near(&entry);
__ bind(&loop);
__ str(scratch1,
MemOperand(elements_array_storage, kPointerSize, PostIndex));
__ bind(&entry);
- __ cmp(elements_array_storage, elements_array_end);
- __ b(lt, &loop);
+ __ cmpge(elements_array_storage, elements_array_end);
+ __ bf(&loop);
}
}
@@ -335,7 +342,7 @@ static void ArrayNativeCode(MacroAssembler* masm,
// Set up return value, remove receiver from stack and return.
__ mov(r0, r2);
__ add(sp, sp, Operand(kPointerSize));
- __ Jump(lr);
+ __ Ret();
// Check for one argument. Bail out if argument is not smi or if it is
// negative.
@@ -351,14 +358,15 @@ static void ArrayNativeCode(MacroAssembler* masm,
__ b(&empty_array);
__ bind(&not_empty_array);
- __ and_(r3, r2, Operand(kIntptrSignBit | kSmiTagMask), SetCC);
+ __ land(r3, r2, Operand(kIntptrSignBit | kSmiTagMask));
+ __ cmp(r3, Operand(0));
__ b(ne, call_generic_code);
// Handle construction of an empty array of a certain size. Bail out if size
// is too large to actually allocate an elements array.
STATIC_ASSERT(kSmiTag == 0);
- __ cmp(r2, Operand(JSObject::kInitialMaxFastElementArray << kSmiTagSize));
- __ b(ge, call_generic_code);
+ __ cmpge(r2, Operand(JSObject::kInitialMaxFastElementArray << kSmiTagSize));
+ __ bt(call_generic_code);
// r0: argc
// r1: constructor
@@ -375,14 +383,14 @@ static void ArrayNativeCode(MacroAssembler* masm,
true,
call_generic_code);
__ IncrementCounter(counters->array_function_native(), 1, r2, r4);
- // Set up return value, remove receiver and argument from stack and return.
+ // Setup return value, remove receiver and argument from stack and return.
__ mov(r0, r3);
__ add(sp, sp, Operand(2 * kPointerSize));
- __ Jump(lr);
+ __ Ret();
// Handle construction of an array from a list of arguments.
__ bind(&argc_two_or_more);
- __ mov(r2, Operand(r0, LSL, kSmiTagSize)); // Convet argc to a smi.
+ __ lsl(r2, r0, Operand(kSmiTagSize)); // Convet argc to a smi.
// r0: argc
// r1: constructor
@@ -419,8 +427,8 @@ static void ArrayNativeCode(MacroAssembler* masm,
}
__ str(r2, MemOperand(r5, -kPointerSize, PreIndex));
__ bind(&entry);
- __ cmp(r4, r5);
- __ b(lt, &loop);
+ __ cmpge(r4, r5);
+ __ bf(&loop);
__ bind(&finish);
__ mov(sp, r7);
@@ -432,7 +440,7 @@ static void ArrayNativeCode(MacroAssembler* masm,
// sp[0]: receiver
__ add(sp, sp, Operand(kPointerSize));
__ mov(r0, r3);
- __ Jump(lr);
+ __ Ret();
__ bind(&has_non_smi_element);
// Double values are handled by the runtime.
@@ -465,8 +473,8 @@ static void ArrayNativeCode(MacroAssembler* masm,
__ bind(&loop2);
__ ldr(r2, MemOperand(r7, kPointerSize, PostIndex));
__ str(r2, MemOperand(r5, -kPointerSize, PreIndex));
- __ cmp(r4, r5);
- __ b(lt, &loop2);
+ __ cmpge(r4, r5);
+ __ bf(&loop2);
__ b(&finish);
}
@@ -487,7 +495,7 @@ void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
__ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
__ tst(r2, Operand(kSmiTagMask));
__ Assert(ne, "Unexpected initial map for InternalArray function");
- __ CompareObjectType(r2, r3, r4, MAP_TYPE);
+ __ CompareObjectType(r2, r3, r4, MAP_TYPE, eq);
__ Assert(eq, "Unexpected initial map for InternalArray function");
}
@@ -521,7 +529,7 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
__ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
__ tst(r2, Operand(kSmiTagMask));
__ Assert(ne, "Unexpected initial map for Array function");
- __ CompareObjectType(r2, r3, r4, MAP_TYPE);
+ __ CompareObjectType(r2, r3, r4, MAP_TYPE, eq);
__ Assert(eq, "Unexpected initial map for Array function");
}
@@ -554,7 +562,7 @@ void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
__ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
__ tst(r2, Operand(kSmiTagMask));
__ Assert(ne, "Unexpected initial map for Array function");
- __ CompareObjectType(r2, r3, r4, MAP_TYPE);
+ __ CompareObjectType(r2, r3, r4, MAP_TYPE, eq);
__ Assert(eq, "Unexpected initial map for Array function");
}
@@ -584,7 +592,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
Register function = r1;
if (FLAG_debug_code) {
__ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r2);
- __ cmp(function, Operand(r2));
+ __ cmp(function, r2);
__ Assert(eq, "Unexpected String function");
}
@@ -594,7 +602,8 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
__ b(eq, &no_arguments);
// First args = sp[(argc - 1) * 4].
__ sub(r0, r0, Operand(1));
- __ ldr(r0, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex));
+ __ lsl(r0, r0, Operand(kPointerSizeLog2));
+ __ ldr(r0, MemOperand(sp, r0));
// sp now point to args[0], drop args[0] + receiver.
__ Drop(2);
@@ -703,7 +712,7 @@ static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
__ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
__ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset));
__ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
- __ mov(pc, r2);
+ __ jmp(r2);
}
@@ -756,7 +765,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
FrameScope scope(masm, StackFrame::CONSTRUCT);
// Preserve the two incoming parameters on the stack.
- __ mov(r0, Operand(r0, LSL, kSmiTagSize));
+ __ lsl(r0, r0, Operand(kSmiTagSize));
__ push(r0); // Smi-tagged arguments count.
__ push(r1); // Constructor function.
@@ -774,20 +783,20 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
__ b(ne, &rt_call);
#endif
- // Load the initial map and verify that it is in fact a map.
- // r1: constructor function
- __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
- __ JumpIfSmi(r2, &rt_call);
- __ CompareObjectType(r2, r3, r4, MAP_TYPE);
- __ b(ne, &rt_call);
+ // Load the initial map and verify that it is in fact a map.
+ // r1: constructor function
+ __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
+ __ JumpIfSmi(r2, &rt_call);
+ __ CompareObjectType(r2, r3, r4, MAP_TYPE, eq);
+ __ b(ne, &rt_call);
- // Check that the constructor is not constructing a JSFunction (see
- // comments in Runtime_NewObject in runtime.cc). In which case the
- // initial map's instance type would be JS_FUNCTION_TYPE.
- // r1: constructor function
- // r2: initial map
- __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
- __ b(eq, &rt_call);
+ // Check that the constructor is not constructing a JSFunction (see comments
+ // in Runtime_NewObject in runtime.cc). In which case the initial map's
+ // instance type would be JS_FUNCTION_TYPE.
+ // r1: constructor function
+ // r2: initial map
+ __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE, eq);
+ __ b(eq, &rt_call);
if (count_constructions) {
Label allocate;
@@ -796,7 +805,8 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
MemOperand constructor_count =
FieldMemOperand(r3, SharedFunctionInfo::kConstructionCountOffset);
__ ldrb(r4, constructor_count);
- __ sub(r4, r4, Operand(1), SetCC);
+ __ sub(r4, r4, Operand(1));
+ __ cmpeq(r4, Operand(0));
__ strb(r4, constructor_count);
__ b(ne, &allocate);
@@ -839,18 +849,20 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// r3: object size (in words)
// r4: JSObject (not tagged)
// r5: First in-object property of JSObject (not tagged)
- __ add(r6, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
+ __ lsl(r6, r3, Operand(kPointerSizeLog2));
+ __ add(r6, r4, r6); // End of object.
ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize);
__ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
if (count_constructions) {
__ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
__ Ubfx(r0, r0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
kBitsPerByte);
- __ add(r0, r5, Operand(r0, LSL, kPointerSizeLog2));
+ __ lsl(r0, r0, Operand(kPointerSizeLog2));
+ __ add(r0, r5, r0);
// r0: offset of first field after pre-allocated fields
if (FLAG_debug_code) {
- __ cmp(r0, r6);
- __ Assert(le, "Unexpected number of pre-allocated property fields.");
+ __ cmpgt(r0, r6);
+ __ Assert(ne, "Unexpected number of pre-allocated property fields.");
}
__ InitializeFieldsWithFiller(r5, r0, r7);
// To allow for truncation.
@@ -875,14 +887,16 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
__ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
__ Ubfx(r6, r0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
kBitsPerByte);
- __ add(r3, r3, Operand(r6));
+ __ add(r3, r3, r6);
__ Ubfx(r6, r0, Map::kInObjectPropertiesByte * kBitsPerByte,
kBitsPerByte);
- __ sub(r3, r3, Operand(r6), SetCC);
+ __ sub(r3, r3, r6);
+ __ cmpeq(r6, Operand(0));
// Done if no extra properties are to be allocated.
__ b(eq, &allocated);
- __ Assert(pl, "Property allocation count failed.");
+ __ cmpge(r6, Operand(0));
+ __ Assert(eq, "Property allocation count failed.");
// Scale the number of elements by pointer size and add the header for
// FixedArrays to the start of the next object calculation from above.
@@ -909,7 +923,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
__ str(r6, MemOperand(r2, kPointerSize, PostIndex));
ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
- __ mov(r0, Operand(r3, LSL, kSmiTagSize));
+ __ lsl(r0, r3, Operand(kSmiTagSize));
__ str(r0, MemOperand(r2, kPointerSize, PostIndex));
// Initialize the fields to undefined.
@@ -918,22 +932,23 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// r3: number of elements in properties array
// r4: JSObject
// r5: FixedArray (not tagged)
- __ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
+ __ lsl(r6, r3, Operand(kPointerSizeLog2));
+ __ add(r6, r2, r6); // End of object.
ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
{ Label loop, entry;
if (count_constructions) {
__ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
} else if (FLAG_debug_code) {
- __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
- __ cmp(r7, r8);
+ __ LoadRoot(cp, Heap::kUndefinedValueRootIndex);
+ __ cmp(r7, cp);
__ Assert(eq, "Undefined value not loaded.");
}
__ b(&entry);
__ bind(&loop);
__ str(r7, MemOperand(r2, kPointerSize, PostIndex));
__ bind(&entry);
- __ cmp(r2, r6);
- __ b(lt, &loop);
+ __ cmpge(r2, r6);
+ __ bf(&loop);
}
// Store the initialized FixedArray into the properties field of
@@ -982,7 +997,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
__ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
// Set up number of arguments for function call below
- __ mov(r0, Operand(r3, LSR, kSmiTagSize));
+ __ lsr(r0, r3, Operand(kSmiTagSize));
// Copy arguments and receiver to the expression stack.
// r0: number of arguments
@@ -996,11 +1011,13 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
Label loop, entry;
__ b(&entry);
__ bind(&loop);
- __ ldr(ip, MemOperand(r2, r3, LSL, kPointerSizeLog2 - 1));
+ __ lsl(ip, r3, Operand(kPointerSizeLog2 - 1));
+ __ ldr(ip, MemOperand(r2, ip));
__ push(ip);
__ bind(&entry);
- __ sub(r3, r3, Operand(2), SetCC);
- __ b(ge, &loop);
+ __ sub(r3, r3, Operand(2));
+ __ cmpge(r3, Operand(0));
+ __ bt(&loop);
// Call the function.
// r0: number of arguments
@@ -1044,8 +1061,8 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// If the type of the result (stored in its map) is less than
// FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
- __ CompareObjectType(r0, r3, r3, FIRST_SPEC_OBJECT_TYPE);
- __ b(ge, &exit);
+ __ CompareObjectType(r0, r3, r3, FIRST_SPEC_OBJECT_TYPE, ge);
+ __ bt(&exit);
// Throw away the result of the constructor invocation and use the
// on-stack receiver as the result.
@@ -1064,10 +1081,11 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// Leave construct frame.
}
- __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
+ __ lsl(ip, r1, Operand(kPointerSizeLog2 - 1));
+ __ add(sp, sp, ip);
__ add(sp, sp, Operand(kPointerSize));
__ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2);
- __ Jump(lr);
+ __ rts();
}
@@ -1093,7 +1111,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// r1: function
// r2: receiver
// r3: argc
- // r4: argv
+ // r4: argv (JSEntryStub does set it)
// r5-r7, cp may be clobbered
// Clear the context before we push it when entering the internal frame.
@@ -1117,9 +1135,10 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// r3: argc
// r4: argv, i.e. points to first arg
Label loop, entry;
- __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
+ __ lsl(r2, r3, Operand(kPointerSizeLog2));
+ __ add(r2, r4, r2);
// r2 points past last arg.
- __ b(&entry);
+ __ b_near(&entry);
__ bind(&loop);
__ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter
__ ldr(r0, MemOperand(r0)); // dereference handle
@@ -1131,15 +1150,17 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Initialize all JavaScript callee-saved registers, since they will be seen
// by the garbage collector as part of handlers.
__ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
- __ mov(r5, Operand(r4));
- __ mov(r6, Operand(r4));
- __ mov(r7, Operand(r4));
- if (kR9Available == 1) {
- __ mov(r9, Operand(r4));
- }
+ __ mov(sh4_r5, r4);
+ __ mov(sh4_r6, r4);
+ __ mov(sh4_r7, r4);
+ __ mov(sh4_r8, r4);
+ __ mov(sh4_r9, r4);
// Invoke the code and pass argc as r0.
- __ mov(r0, Operand(r3));
+ __ mov(r0, r3);
+
+ // r0: argc
+ // r1: function
if (is_construct) {
CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
__ CallStub(&stub);
@@ -1152,7 +1173,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// return.
// Respect ABI stack constraint.
}
- __ Jump(lr);
+ __ rts();
// r0: result
}
@@ -1242,14 +1263,14 @@ static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
// Switch on the state.
Label with_tos_register, unknown_state;
__ cmp(r6, Operand(FullCodeGenerator::NO_REGISTERS));
- __ b(ne, &with_tos_register);
+ __ b(ne, &with_tos_register, Label::kNear);
__ add(sp, sp, Operand(1 * kPointerSize)); // Remove state.
__ Ret();
__ bind(&with_tos_register);
__ ldr(r0, MemOperand(sp, 1 * kPointerSize));
__ cmp(r6, Operand(FullCodeGenerator::TOS_REG));
- __ b(ne, &unknown_state);
+ __ b(ne, &unknown_state, Label::kNear);
__ add(sp, sp, Operand(2 * kPointerSize)); // Remove state.
__ Ret();
@@ -1273,48 +1294,20 @@ void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
// doesn't do any garbage collection which allows us to save/restore
// the registers without worrying about which of them contain
// pointers. This seems a bit fragile.
- __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit());
+ __ Push(pr, fp);
+ __ pushm(kJSCallerSaved | kCalleeSaved);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ CallRuntime(Runtime::kNotifyOSR, 0);
}
- __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit());
+ __ popm(kJSCallerSaved | kCalleeSaved);
+ __ Pop(pr, fp);
__ Ret();
}
void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
- CpuFeatures::TryForceFeatureScope scope(VFP3);
- if (!CPU::SupportsCrankshaft()) {
- __ Abort("Unreachable code: Cannot optimize without VFP3 support.");
- return;
- }
-
- // Lookup the function in the JavaScript frame and push it as an
- // argument to the on-stack replacement function.
- __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ push(r0);
- __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
- }
-
- // If the result was -1 it means that we couldn't optimize the
- // function. Just return and continue in the unoptimized version.
- Label skip;
- __ cmp(r0, Operand(Smi::FromInt(-1)));
- __ b(ne, &skip);
- __ Ret();
-
- __ bind(&skip);
- // Untag the AST id and push it on the stack.
- __ SmiUntag(r0);
- __ push(r0);
-
- // Generate the code for doing the frame-to-frame translation using
- // the deoptimizer infrastructure.
- Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR);
- generator.Generate();
+ __ UNIMPLEMENTED_BREAK();
}
@@ -1322,8 +1315,8 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
// 1. Make sure we have at least one argument.
// r0: actual number of arguments
{ Label done;
- __ cmp(r0, Operand(0));
- __ b(ne, &done);
+ __ tst(r0, r0);
+ __ b(ne, &done, Label::kNear);
__ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
__ push(r2);
__ add(r0, r0, Operand(1));
@@ -1334,9 +1327,10 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
// if it is a function.
// r0: actual number of arguments
Label slow, non_function;
- __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
+ __ lsl(r1, r0, Operand(kPointerSizeLog2));
+ __ ldr(r1, MemOperand(sp, r1));
__ JumpIfSmi(r1, &non_function);
- __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
+ __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE, eq);
__ b(ne, &slow);
// 3a. Patch the first argument if necessary when calling a function.
@@ -1360,12 +1354,13 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
__ b(ne, &shift_arguments);
// Compute the receiver in non-strict mode.
- __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
+ __ lsl(r2, r0, Operand(kPointerSizeLog2));
+ __ add(r2, sp, r2);
__ ldr(r2, MemOperand(r2, -kPointerSize));
// r0: actual number of arguments
// r1: function
// r2: first argument
- __ JumpIfSmi(r2, &convert_to_object);
+ __ JumpIfSmi(r2, &convert_to_object, Label::kNear);
__ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
__ cmp(r2, r3);
@@ -1375,15 +1370,15 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
__ b(eq, &use_global_receiver);
STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
- __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE);
- __ b(ge, &shift_arguments);
+ __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE, ge);
+ __ bt(&shift_arguments);
__ bind(&convert_to_object);
{
// Enter an internal frame in order to preserve argument count.
FrameScope scope(masm, StackFrame::INTERNAL);
- __ mov(r0, Operand(r0, LSL, kSmiTagSize)); // Smi-tagged.
+ __ lsl(r0, r0, Operand(kSmiTagSize)); // Smi-tagged.
__ push(r0);
__ push(r2);
@@ -1391,15 +1386,16 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
__ mov(r2, r0);
__ pop(r0);
- __ mov(r0, Operand(r0, ASR, kSmiTagSize));
+ __ asr(r0, r0, Operand(kSmiTagSize));
// Exit the internal frame.
}
- // Restore the function to r1, and the flag to r4.
- __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
+ // Restore the function to r1.
+ __ lsl(r1, r0, Operand(kPointerSizeLog2));
+ __ ldr(r1, MemOperand(sp, r1));
__ mov(r4, Operand(0, RelocInfo::NONE));
- __ jmp(&patch_receiver);
+ __ jmp_near(&patch_receiver);
// Use the global receiver object from the called function as the
// receiver.
@@ -1412,7 +1408,8 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
__ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
__ bind(&patch_receiver);
- __ add(r3, sp, Operand(r0, LSL, kPointerSizeLog2));
+ __ lsl(r3, r0, Operand(kPointerSizeLog2));
+ __ add(r3, sp, r3);
__ str(r2, MemOperand(r3, -kPointerSize));
__ jmp(&shift_arguments);
@@ -1433,7 +1430,8 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
// r0: actual number of arguments
// r1: function
// r4: call type (0: JS function, 1: function proxy, 2: non-function)
- __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
+ __ lsl(r2, r0, Operand(kPointerSizeLog2));
+ __ add(r2, sp, r2);
__ str(r1, MemOperand(r2, -kPointerSize));
// 4. Shift arguments and return address one slot down on the stack
@@ -1445,7 +1443,8 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
__ bind(&shift_arguments);
{ Label loop;
// Calculate the copy start address (destination). Copy end address is sp.
- __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
+ __ lsl(r2, r0, Operand(kPointerSizeLog2));
+ __ add(r2, sp, r2);
__ bind(&loop);
__ ldr(ip, MemOperand(r2, -kPointerSize));
@@ -1466,7 +1465,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
// r4: call type (0: JS function, 1: function proxy, 2: non-function)
{ Label function, non_proxy;
__ tst(r4, r4);
- __ b(eq, &function);
+ __ b(eq, &function, Label::kNear);
// Expected number of arguments is 0 for CALL_NON_FUNCTION.
__ mov(r2, Operand(0, RelocInfo::NONE));
__ SetCallKind(r5, CALL_AS_METHOD);
@@ -1481,7 +1480,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
__ bind(&non_proxy);
__ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION);
- __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
+ __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET);
__ bind(&function);
}
@@ -1491,17 +1490,18 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
// (tail-call) to the code in register edx without checking arguments.
// r0: actual number of arguments
// r1: function
+ Label end;
__ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
__ ldr(r2,
FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
- __ mov(r2, Operand(r2, ASR, kSmiTagSize));
+ __ asr(r2, r2, Operand(kSmiTagSize));
__ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
__ SetCallKind(r5, CALL_AS_METHOD);
__ cmp(r2, r0); // Check formal and actual parameter counts.
- __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
- RelocInfo::CODE_TARGET,
- ne);
-
+ __ bt_near(&end);
+ __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
+ RelocInfo::CODE_TARGET);
+ __ bind(&end);
ParameterCount expected(0);
__ InvokeCode(r3, expected, expected, JUMP_FUNCTION,
NullCallWrapper(), CALL_AS_METHOD);
@@ -1533,8 +1533,9 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
// here which will cause r2 to become negative.
__ sub(r2, sp, r2);
// Check if the arguments will overflow the stack.
- __ cmp(r2, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
- __ b(gt, &okay); // Signed comparison.
+ __ lsl(ip, r0, Operand(kPointerSizeLog2 - kSmiTagSize));
+ __ cmpgt(r2, ip);
+ __ bt_near(&okay); // Signed comparison.
// Out of stack space.
__ ldr(r1, MemOperand(fp, kFunctionOffset));
@@ -1555,8 +1556,8 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
// Check that the function is a JS function (otherwise it must be a proxy).
Label push_receiver;
__ ldr(r1, MemOperand(fp, kFunctionOffset));
- __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
- __ b(ne, &push_receiver);
+ __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE, eq);
+ __ bf(&push_receiver);
// Change context eagerly to get the right global object if necessary.
__ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
@@ -1587,8 +1588,8 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
// Check if the receiver is already a JavaScript object.
// r0: receiver
STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
- __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
- __ b(ge, &push_receiver);
+ __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE, ge);
+ __ bt(&push_receiver);
// Convert the receiver to a regular object.
// r0: receiver
@@ -1602,7 +1603,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
const int kGlobalOffset =
Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
__ ldr(r0, FieldMemOperand(cp, kGlobalOffset));
- __ ldr(r0, FieldMemOperand(r0, GlobalObject::kNativeContextOffset));
+ __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalContextOffset));
__ ldr(r0, FieldMemOperand(r0, kGlobalOffset));
__ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
@@ -1614,7 +1615,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
// Copy all arguments from the array to the stack.
Label entry, loop;
__ ldr(r0, MemOperand(fp, kIndexOffset));
- __ b(&entry);
+ __ b_near(&entry);
// Load the current argument from the arguments array and push it to the
// stack.
@@ -1643,16 +1644,16 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
// Invoke the function.
Label call_proxy;
ParameterCount actual(r0);
- __ mov(r0, Operand(r0, ASR, kSmiTagSize));
+ __ asr(r0, r0, Operand(kSmiTagSize));
__ ldr(r1, MemOperand(fp, kFunctionOffset));
- __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
- __ b(ne, &call_proxy);
+ __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE, eq);
+ __ bf(&call_proxy);
__ InvokeFunction(r1, actual, CALL_FUNCTION,
NullCallWrapper(), CALL_AS_METHOD);
frame_scope.GenerateLeaveFrame();
__ add(sp, sp, Operand(3 * kPointerSize));
- __ Jump(lr);
+ __ rts();
// Invoke the function proxy.
__ bind(&call_proxy);
@@ -1667,14 +1668,15 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
// Tear down the internal frame and remove function, receiver and args.
}
__ add(sp, sp, Operand(3 * kPointerSize));
- __ Jump(lr);
+ __ rts();
}
static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
- __ mov(r0, Operand(r0, LSL, kSmiTagSize));
+ __ lsl(r0, r0, Operand(kSmiTagSize));
__ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
- __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() | fp.bit() | lr.bit());
+ __ push(pr);
+ __ Push(fp, r4, r1, r0);
__ add(fp, sp, Operand(3 * kPointerSize));
}
@@ -1687,8 +1689,9 @@ static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
// then tear down the parameters.
__ ldr(r1, MemOperand(fp, -3 * kPointerSize));
__ mov(sp, fp);
- __ ldm(ia_w, sp, fp.bit() | lr.bit());
- __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ Pop(pr, fp);
+ __ lsl(ip, r1, Operand(kPointerSizeLog2 - kSmiTagSize));
+ __ add(sp, sp, ip);
__ add(sp, sp, Operand(kPointerSize)); // adjust for receiver
}
@@ -1705,8 +1708,8 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
Label invoke, dont_adapt_arguments;
Label enough, too_few;
- __ cmp(r0, r2);
- __ b(lt, &too_few);
+ __ cmpge(r0, r2);
+ __ bf(&too_few);
__ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
__ b(eq, &dont_adapt_arguments);
@@ -1719,10 +1722,12 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// r1: function
// r2: expected number of arguments
// r3: code entry to call
- __ add(r0, fp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ lsl(r0, r0, Operand(kPointerSizeLog2 - kSmiTagSize));
+ __ add(r0, fp, r0);
// adjust for return address and receiver
__ add(r0, r0, Operand(2 * kPointerSize));
- __ sub(r2, r0, Operand(r2, LSL, kPointerSizeLog2));
+ __ lsl(r2, r2, Operand(kPointerSizeLog2));
+ __ sub(r2, r0, r2);
// Copy the arguments (including the receiver) to the new stack frame.
// r0: copy start address
@@ -1750,7 +1755,8 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// r1: function
// r2: expected number of arguments
// r3: code entry to call
- __ add(r0, fp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ lsl(r0, r0, Operand(kPointerSizeLog2 - kSmiTagSize));
+ __ add(r0, fp, r0);
// Copy the arguments (including the receiver) to the new stack frame.
// r0: copy start address
@@ -1771,7 +1777,8 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// r2: expected number of arguments
// r3: code entry to call
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
- __ sub(r2, fp, Operand(r2, LSL, kPointerSizeLog2));
+ __ lsl(r2, r2, Operand(kPointerSizeLog2));
+ __ sub(r2, fp, r2);
__ sub(r2, r2, Operand(4 * kPointerSize)); // Adjust for frame.
Label fill;
@@ -1783,21 +1790,21 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// Call the entry point.
__ bind(&invoke);
- __ Call(r3);
+ __ jsr(r3);
// Store offset of return address for deoptimizer.
masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
// Exit frame and return.
LeaveArgumentsAdaptorFrame(masm);
- __ Jump(lr);
+ __ rts();
// -------------------------------------------
// Dont adapt arguments.
// -------------------------------------------
__ bind(&dont_adapt_arguments);
- __ Jump(r3);
+ __ jmp(r3);
}
@@ -1805,4 +1812,4 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
} } // namespace v8::internal
-#endif // V8_TARGET_ARCH_ARM
+#endif // V8_TARGET_ARCH_SH4
« no previous file with comments | « src/sh4/assembler-sh4-inl.h ('k') | src/sh4/checks-sh4.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698