Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(409)

Side by Side Diff: src/arm/codegen-arm.cc

Issue 655002: Merge revisions 3777-3813 from bleding_edge to partial snapshots ... (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/partial_snapshots/
Patch Set: Created 10 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/codegen-arm.h ('k') | src/arm/disasm-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after
114 114
115 CodeGenState::~CodeGenState() { 115 CodeGenState::~CodeGenState() {
116 ASSERT(owner_->state() == this); 116 ASSERT(owner_->state() == this);
117 owner_->set_state(previous_); 117 owner_->set_state(previous_);
118 } 118 }
119 119
120 120
121 // ------------------------------------------------------------------------- 121 // -------------------------------------------------------------------------
122 // CodeGenerator implementation 122 // CodeGenerator implementation
123 123
124 CodeGenerator::CodeGenerator(MacroAssembler* masm, 124 CodeGenerator::CodeGenerator(MacroAssembler* masm)
125 Handle<Script> script, 125 : deferred_(8),
126 bool is_eval)
127 : is_eval_(is_eval),
128 script_(script),
129 deferred_(8),
130 masm_(masm), 126 masm_(masm),
131 scope_(NULL), 127 info_(NULL),
132 frame_(NULL), 128 frame_(NULL),
133 allocator_(NULL), 129 allocator_(NULL),
134 cc_reg_(al), 130 cc_reg_(al),
135 state_(NULL), 131 state_(NULL),
136 function_return_is_shadowed_(false) { 132 function_return_is_shadowed_(false) {
137 } 133 }
138 134
139 135
136 Scope* CodeGenerator::scope() { return info_->function()->scope(); }
137
138
140 // Calling conventions: 139 // Calling conventions:
141 // fp: caller's frame pointer 140 // fp: caller's frame pointer
142 // sp: stack pointer 141 // sp: stack pointer
143 // r1: called JS function 142 // r1: called JS function
144 // cp: callee's context 143 // cp: callee's context
145 144
146 void CodeGenerator::Generate(FunctionLiteral* fun, 145 void CodeGenerator::Generate(CompilationInfo* info, Mode mode) {
147 Mode mode,
148 CompilationInfo* info) {
149 // Record the position for debugging purposes. 146 // Record the position for debugging purposes.
150 CodeForFunctionPosition(fun); 147 CodeForFunctionPosition(info->function());
151
152 ZoneList<Statement*>* body = fun->body();
153 148
154 // Initialize state. 149 // Initialize state.
155 ASSERT(scope_ == NULL); 150 info_ = info;
156 scope_ = fun->scope();
157 ASSERT(allocator_ == NULL); 151 ASSERT(allocator_ == NULL);
158 RegisterAllocator register_allocator(this); 152 RegisterAllocator register_allocator(this);
159 allocator_ = &register_allocator; 153 allocator_ = &register_allocator;
160 ASSERT(frame_ == NULL); 154 ASSERT(frame_ == NULL);
161 frame_ = new VirtualFrame(); 155 frame_ = new VirtualFrame();
162 cc_reg_ = al; 156 cc_reg_ = al;
163 { 157 {
164 CodeGenState state(this); 158 CodeGenState state(this);
165 159
166 // Entry: 160 // Entry:
167 // Stack: receiver, arguments 161 // Stack: receiver, arguments
168 // lr: return address 162 // lr: return address
169 // fp: caller's frame pointer 163 // fp: caller's frame pointer
170 // sp: stack pointer 164 // sp: stack pointer
171 // r1: called JS function 165 // r1: called JS function
172 // cp: callee's context 166 // cp: callee's context
173 allocator_->Initialize(); 167 allocator_->Initialize();
174 168
175 #ifdef DEBUG 169 #ifdef DEBUG
176 if (strlen(FLAG_stop_at) > 0 && 170 if (strlen(FLAG_stop_at) > 0 &&
177 fun->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { 171 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
178 frame_->SpillAll(); 172 frame_->SpillAll();
179 __ stop("stop-at"); 173 __ stop("stop-at");
180 } 174 }
181 #endif 175 #endif
182 176
183 if (mode == PRIMARY) { 177 if (mode == PRIMARY) {
184 frame_->Enter(); 178 frame_->Enter();
185 // tos: code slot 179 // tos: code slot
186 180
187 // Allocate space for locals and initialize them. This also checks 181 // Allocate space for locals and initialize them. This also checks
188 // for stack overflow. 182 // for stack overflow.
189 frame_->AllocateStackSlots(); 183 frame_->AllocateStackSlots();
190 184
191 VirtualFrame::SpilledScope spilled_scope; 185 VirtualFrame::SpilledScope spilled_scope;
192 int heap_slots = scope_->num_heap_slots(); 186 int heap_slots = scope()->num_heap_slots();
193 if (heap_slots > 0) { 187 if (heap_slots > 0) {
194 // Allocate local context. 188 // Allocate local context.
195 // Get outer context and create a new context based on it. 189 // Get outer context and create a new context based on it.
196 __ ldr(r0, frame_->Function()); 190 __ ldr(r0, frame_->Function());
197 frame_->EmitPush(r0); 191 frame_->EmitPush(r0);
198 if (heap_slots <= FastNewContextStub::kMaximumSlots) { 192 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
199 FastNewContextStub stub(heap_slots); 193 FastNewContextStub stub(heap_slots);
200 frame_->CallStub(&stub, 1); 194 frame_->CallStub(&stub, 1);
201 } else { 195 } else {
202 frame_->CallRuntime(Runtime::kNewContext, 1); 196 frame_->CallRuntime(Runtime::kNewContext, 1);
203 } 197 }
204 198
205 #ifdef DEBUG 199 #ifdef DEBUG
206 JumpTarget verified_true; 200 JumpTarget verified_true;
207 __ cmp(r0, Operand(cp)); 201 __ cmp(r0, Operand(cp));
208 verified_true.Branch(eq); 202 verified_true.Branch(eq);
209 __ stop("NewContext: r0 is expected to be the same as cp"); 203 __ stop("NewContext: r0 is expected to be the same as cp");
210 verified_true.Bind(); 204 verified_true.Bind();
211 #endif 205 #endif
212 // Update context local. 206 // Update context local.
213 __ str(cp, frame_->Context()); 207 __ str(cp, frame_->Context());
214 } 208 }
215 209
216 // TODO(1241774): Improve this code: 210 // TODO(1241774): Improve this code:
217 // 1) only needed if we have a context 211 // 1) only needed if we have a context
218 // 2) no need to recompute context ptr every single time 212 // 2) no need to recompute context ptr every single time
219 // 3) don't copy parameter operand code from SlotOperand! 213 // 3) don't copy parameter operand code from SlotOperand!
220 { 214 {
221 Comment cmnt2(masm_, "[ copy context parameters into .context"); 215 Comment cmnt2(masm_, "[ copy context parameters into .context");
222
223 // Note that iteration order is relevant here! If we have the same 216 // Note that iteration order is relevant here! If we have the same
224 // parameter twice (e.g., function (x, y, x)), and that parameter 217 // parameter twice (e.g., function (x, y, x)), and that parameter
225 // needs to be copied into the context, it must be the last argument 218 // needs to be copied into the context, it must be the last argument
226 // passed to the parameter that needs to be copied. This is a rare 219 // passed to the parameter that needs to be copied. This is a rare
227 // case so we don't check for it, instead we rely on the copying 220 // case so we don't check for it, instead we rely on the copying
228 // order: such a parameter is copied repeatedly into the same 221 // order: such a parameter is copied repeatedly into the same
229 // context location and thus the last value is what is seen inside 222 // context location and thus the last value is what is seen inside
230 // the function. 223 // the function.
231 for (int i = 0; i < scope_->num_parameters(); i++) { 224 for (int i = 0; i < scope()->num_parameters(); i++) {
232 Variable* par = scope_->parameter(i); 225 Variable* par = scope()->parameter(i);
233 Slot* slot = par->slot(); 226 Slot* slot = par->slot();
234 if (slot != NULL && slot->type() == Slot::CONTEXT) { 227 if (slot != NULL && slot->type() == Slot::CONTEXT) {
235 // No parameters in global scope. 228 ASSERT(!scope()->is_global_scope()); // No params in global scope.
236 ASSERT(!scope_->is_global_scope());
237 __ ldr(r1, frame_->ParameterAt(i)); 229 __ ldr(r1, frame_->ParameterAt(i));
238 // Loads r2 with context; used below in RecordWrite. 230 // Loads r2 with context; used below in RecordWrite.
239 __ str(r1, SlotOperand(slot, r2)); 231 __ str(r1, SlotOperand(slot, r2));
240 // Load the offset into r3. 232 // Load the offset into r3.
241 int slot_offset = 233 int slot_offset =
242 FixedArray::kHeaderSize + slot->index() * kPointerSize; 234 FixedArray::kHeaderSize + slot->index() * kPointerSize;
243 __ mov(r3, Operand(slot_offset)); 235 __ mov(r3, Operand(slot_offset));
244 __ RecordWrite(r2, r3, r1); 236 __ RecordWrite(r2, r3, r1);
245 } 237 }
246 } 238 }
247 } 239 }
248 240
249 // Store the arguments object. This must happen after context 241 // Store the arguments object. This must happen after context
250 // initialization because the arguments object may be stored in the 242 // initialization because the arguments object may be stored in the
251 // context. 243 // context.
252 if (scope_->arguments() != NULL) { 244 if (scope()->arguments() != NULL) {
253 Comment cmnt(masm_, "[ allocate arguments object"); 245 Comment cmnt(masm_, "[ allocate arguments object");
254 ASSERT(scope_->arguments_shadow() != NULL); 246 ASSERT(scope()->arguments_shadow() != NULL);
255 Variable* arguments = scope_->arguments()->var(); 247 Variable* arguments = scope()->arguments()->var();
256 Variable* shadow = scope_->arguments_shadow()->var(); 248 Variable* shadow = scope()->arguments_shadow()->var();
257 ASSERT(arguments != NULL && arguments->slot() != NULL); 249 ASSERT(arguments != NULL && arguments->slot() != NULL);
258 ASSERT(shadow != NULL && shadow->slot() != NULL); 250 ASSERT(shadow != NULL && shadow->slot() != NULL);
259 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT); 251 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
260 __ ldr(r2, frame_->Function()); 252 __ ldr(r2, frame_->Function());
261 // The receiver is below the arguments, the return address, and the 253 // The receiver is below the arguments, the return address, and the
262 // frame pointer on the stack. 254 // frame pointer on the stack.
263 const int kReceiverDisplacement = 2 + scope_->num_parameters(); 255 const int kReceiverDisplacement = 2 + scope()->num_parameters();
264 __ add(r1, fp, Operand(kReceiverDisplacement * kPointerSize)); 256 __ add(r1, fp, Operand(kReceiverDisplacement * kPointerSize));
265 __ mov(r0, Operand(Smi::FromInt(scope_->num_parameters()))); 257 __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters())));
266 frame_->Adjust(3); 258 frame_->Adjust(3);
267 __ stm(db_w, sp, r0.bit() | r1.bit() | r2.bit()); 259 __ stm(db_w, sp, r0.bit() | r1.bit() | r2.bit());
268 frame_->CallStub(&stub, 3); 260 frame_->CallStub(&stub, 3);
269 frame_->EmitPush(r0); 261 frame_->EmitPush(r0);
270 StoreToSlot(arguments->slot(), NOT_CONST_INIT); 262 StoreToSlot(arguments->slot(), NOT_CONST_INIT);
271 StoreToSlot(shadow->slot(), NOT_CONST_INIT); 263 StoreToSlot(shadow->slot(), NOT_CONST_INIT);
272 frame_->Drop(); // Value is no longer needed. 264 frame_->Drop(); // Value is no longer needed.
273 } 265 }
274 266
275 // Initialize ThisFunction reference if present. 267 // Initialize ThisFunction reference if present.
276 if (scope_->is_function_scope() && scope_->function() != NULL) { 268 if (scope()->is_function_scope() && scope()->function() != NULL) {
277 __ mov(ip, Operand(Factory::the_hole_value())); 269 __ mov(ip, Operand(Factory::the_hole_value()));
278 frame_->EmitPush(ip); 270 frame_->EmitPush(ip);
279 StoreToSlot(scope_->function()->slot(), NOT_CONST_INIT); 271 StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT);
280 } 272 }
281 } else { 273 } else {
282 // When used as the secondary compiler for splitting, r1, cp, 274 // When used as the secondary compiler for splitting, r1, cp,
283 // fp, and lr have been pushed on the stack. Adjust the virtual 275 // fp, and lr have been pushed on the stack. Adjust the virtual
284 // frame to match this state. 276 // frame to match this state.
285 frame_->Adjust(4); 277 frame_->Adjust(4);
286 allocator_->Unuse(r1); 278 allocator_->Unuse(r1);
287 allocator_->Unuse(lr); 279 allocator_->Unuse(lr);
288 } 280 }
289 281
290 // Initialize the function return target after the locals are set 282 // Initialize the function return target after the locals are set
291 // up, because it needs the expected frame height from the frame. 283 // up, because it needs the expected frame height from the frame.
292 function_return_.set_direction(JumpTarget::BIDIRECTIONAL); 284 function_return_.set_direction(JumpTarget::BIDIRECTIONAL);
293 function_return_is_shadowed_ = false; 285 function_return_is_shadowed_ = false;
294 286
295 // Generate code to 'execute' declarations and initialize functions 287 // Generate code to 'execute' declarations and initialize functions
296 // (source elements). In case of an illegal redeclaration we need to 288 // (source elements). In case of an illegal redeclaration we need to
297 // handle that instead of processing the declarations. 289 // handle that instead of processing the declarations.
298 if (scope_->HasIllegalRedeclaration()) { 290 if (scope()->HasIllegalRedeclaration()) {
299 Comment cmnt(masm_, "[ illegal redeclarations"); 291 Comment cmnt(masm_, "[ illegal redeclarations");
300 scope_->VisitIllegalRedeclaration(this); 292 scope()->VisitIllegalRedeclaration(this);
301 } else { 293 } else {
302 Comment cmnt(masm_, "[ declarations"); 294 Comment cmnt(masm_, "[ declarations");
303 ProcessDeclarations(scope_->declarations()); 295 ProcessDeclarations(scope()->declarations());
304 // Bail out if a stack-overflow exception occurred when processing 296 // Bail out if a stack-overflow exception occurred when processing
305 // declarations. 297 // declarations.
306 if (HasStackOverflow()) return; 298 if (HasStackOverflow()) return;
307 } 299 }
308 300
309 if (FLAG_trace) { 301 if (FLAG_trace) {
310 frame_->CallRuntime(Runtime::kTraceEnter, 0); 302 frame_->CallRuntime(Runtime::kTraceEnter, 0);
311 // Ignore the return value. 303 // Ignore the return value.
312 } 304 }
313 305
314 // Compile the body of the function in a vanilla state. Don't 306 // Compile the body of the function in a vanilla state. Don't
315 // bother compiling all the code if the scope has an illegal 307 // bother compiling all the code if the scope has an illegal
316 // redeclaration. 308 // redeclaration.
317 if (!scope_->HasIllegalRedeclaration()) { 309 if (!scope()->HasIllegalRedeclaration()) {
318 Comment cmnt(masm_, "[ function body"); 310 Comment cmnt(masm_, "[ function body");
319 #ifdef DEBUG 311 #ifdef DEBUG
320 bool is_builtin = Bootstrapper::IsActive(); 312 bool is_builtin = Bootstrapper::IsActive();
321 bool should_trace = 313 bool should_trace =
322 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls; 314 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
323 if (should_trace) { 315 if (should_trace) {
324 frame_->CallRuntime(Runtime::kDebugTrace, 0); 316 frame_->CallRuntime(Runtime::kDebugTrace, 0);
325 // Ignore the return value. 317 // Ignore the return value.
326 } 318 }
327 #endif 319 #endif
328 VisitStatementsAndSpill(body); 320 VisitStatementsAndSpill(info->function()->body());
329 } 321 }
330 } 322 }
331 323
332 // Generate the return sequence if necessary. 324 // Generate the return sequence if necessary.
333 if (has_valid_frame() || function_return_.is_linked()) { 325 if (has_valid_frame() || function_return_.is_linked()) {
334 if (!function_return_.is_linked()) { 326 if (!function_return_.is_linked()) {
335 CodeForReturnPosition(fun); 327 CodeForReturnPosition(info->function());
336 } 328 }
337 // exit 329 // exit
338 // r0: result 330 // r0: result
339 // sp: stack pointer 331 // sp: stack pointer
340 // fp: frame pointer 332 // fp: frame pointer
341 // cp: callee's context 333 // cp: callee's context
342 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 334 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
343 335
344 function_return_.Bind(); 336 function_return_.Bind();
345 if (FLAG_trace) { 337 if (FLAG_trace) {
346 // Push the return value on the stack as the parameter. 338 // Push the return value on the stack as the parameter.
347 // Runtime::TraceExit returns the parameter as it is. 339 // Runtime::TraceExit returns the parameter as it is.
348 frame_->EmitPush(r0); 340 frame_->EmitPush(r0);
349 frame_->CallRuntime(Runtime::kTraceExit, 1); 341 frame_->CallRuntime(Runtime::kTraceExit, 1);
350 } 342 }
351 343
352 // Add a label for checking the size of the code used for returning. 344 // Add a label for checking the size of the code used for returning.
353 Label check_exit_codesize; 345 Label check_exit_codesize;
354 masm_->bind(&check_exit_codesize); 346 masm_->bind(&check_exit_codesize);
355 347
356 // Calculate the exact length of the return sequence and make sure that 348 // Calculate the exact length of the return sequence and make sure that
357 // the constant pool is not emitted inside of the return sequence. 349 // the constant pool is not emitted inside of the return sequence.
358 int32_t sp_delta = (scope_->num_parameters() + 1) * kPointerSize; 350 int32_t sp_delta = (scope()->num_parameters() + 1) * kPointerSize;
359 int return_sequence_length = Assembler::kJSReturnSequenceLength; 351 int return_sequence_length = Assembler::kJSReturnSequenceLength;
360 if (!masm_->ImmediateFitsAddrMode1Instruction(sp_delta)) { 352 if (!masm_->ImmediateFitsAddrMode1Instruction(sp_delta)) {
361 // Additional mov instruction generated. 353 // Additional mov instruction generated.
362 return_sequence_length++; 354 return_sequence_length++;
363 } 355 }
364 masm_->BlockConstPoolFor(return_sequence_length); 356 masm_->BlockConstPoolFor(return_sequence_length);
365 357
366 // Tear down the frame which will restore the caller's frame pointer and 358 // Tear down the frame which will restore the caller's frame pointer and
367 // the link register. 359 // the link register.
368 frame_->Exit(); 360 frame_->Exit();
(...skipping 19 matching lines...) Expand all
388 ASSERT(!function_return_is_shadowed_); 380 ASSERT(!function_return_is_shadowed_);
389 function_return_.Unuse(); 381 function_return_.Unuse();
390 DeleteFrame(); 382 DeleteFrame();
391 383
392 // Process any deferred code using the register allocator. 384 // Process any deferred code using the register allocator.
393 if (!HasStackOverflow()) { 385 if (!HasStackOverflow()) {
394 ProcessDeferred(); 386 ProcessDeferred();
395 } 387 }
396 388
397 allocator_ = NULL; 389 allocator_ = NULL;
398 scope_ = NULL;
399 } 390 }
400 391
401 392
402 MemOperand CodeGenerator::SlotOperand(Slot* slot, Register tmp) { 393 MemOperand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
403 // Currently, this assertion will fail if we try to assign to 394 // Currently, this assertion will fail if we try to assign to
404 // a constant variable that is constant because it is read-only 395 // a constant variable that is constant because it is read-only
405 // (such as the variable referring to a named function expression). 396 // (such as the variable referring to a named function expression).
406 // We need to implement assignments to read-only variables. 397 // We need to implement assignments to read-only variables.
407 // Ideally, we should do this during AST generation (by converting 398 // Ideally, we should do this during AST generation (by converting
408 // such assignments into expression statements); however, in general 399 // such assignments into expression statements); however, in general
(...skipping 1886 matching lines...) Expand 10 before | Expand all | Expand 10 after
2295 2286
2296 2287
2297 void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) { 2288 void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) {
2298 #ifdef DEBUG 2289 #ifdef DEBUG
2299 int original_height = frame_->height(); 2290 int original_height = frame_->height();
2300 #endif 2291 #endif
2301 VirtualFrame::SpilledScope spilled_scope; 2292 VirtualFrame::SpilledScope spilled_scope;
2302 Comment cmnt(masm_, "[ DebuggerStatament"); 2293 Comment cmnt(masm_, "[ DebuggerStatament");
2303 CodeForStatementPosition(node); 2294 CodeForStatementPosition(node);
2304 #ifdef ENABLE_DEBUGGER_SUPPORT 2295 #ifdef ENABLE_DEBUGGER_SUPPORT
2305 DebugerStatementStub ces; 2296 DebuggerStatementStub ces;
2306 frame_->CallStub(&ces, 0); 2297 frame_->CallStub(&ces, 0);
2307 #endif 2298 #endif
2308 // Ignore the return value. 2299 // Ignore the return value.
2309 ASSERT(frame_->height() == original_height); 2300 ASSERT(frame_->height() == original_height);
2310 } 2301 }
2311 2302
2312 2303
2313 void CodeGenerator::InstantiateBoilerplate(Handle<JSFunction> boilerplate) { 2304 void CodeGenerator::InstantiateBoilerplate(Handle<JSFunction> boilerplate) {
2314 VirtualFrame::SpilledScope spilled_scope; 2305 VirtualFrame::SpilledScope spilled_scope;
2315 ASSERT(boilerplate->IsBoilerplate()); 2306 ASSERT(boilerplate->IsBoilerplate());
(...skipping 18 matching lines...) Expand all
2334 2325
2335 void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) { 2326 void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) {
2336 #ifdef DEBUG 2327 #ifdef DEBUG
2337 int original_height = frame_->height(); 2328 int original_height = frame_->height();
2338 #endif 2329 #endif
2339 VirtualFrame::SpilledScope spilled_scope; 2330 VirtualFrame::SpilledScope spilled_scope;
2340 Comment cmnt(masm_, "[ FunctionLiteral"); 2331 Comment cmnt(masm_, "[ FunctionLiteral");
2341 2332
2342 // Build the function boilerplate and instantiate it. 2333 // Build the function boilerplate and instantiate it.
2343 Handle<JSFunction> boilerplate = 2334 Handle<JSFunction> boilerplate =
2344 Compiler::BuildBoilerplate(node, script_, this); 2335 Compiler::BuildBoilerplate(node, script(), this);
2345 // Check for stack-overflow exception. 2336 // Check for stack-overflow exception.
2346 if (HasStackOverflow()) { 2337 if (HasStackOverflow()) {
2347 ASSERT(frame_->height() == original_height); 2338 ASSERT(frame_->height() == original_height);
2348 return; 2339 return;
2349 } 2340 }
2350 InstantiateBoilerplate(boilerplate); 2341 InstantiateBoilerplate(boilerplate);
2351 ASSERT(frame_->height() == original_height + 1); 2342 ASSERT(frame_->height() == original_height + 1);
2352 } 2343 }
2353 2344
2354 2345
(...skipping 1157 matching lines...) Expand 10 before | Expand all | Expand 10 after
3512 cc_reg_ = eq; 3503 cc_reg_ = eq;
3513 } 3504 }
3514 3505
3515 3506
3516 void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) { 3507 void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
3517 VirtualFrame::SpilledScope spilled_scope; 3508 VirtualFrame::SpilledScope spilled_scope;
3518 ASSERT(args->length() == 0); 3509 ASSERT(args->length() == 0);
3519 3510
3520 // Seed the result with the formal parameters count, which will be used 3511 // Seed the result with the formal parameters count, which will be used
3521 // in case no arguments adaptor frame is found below the current frame. 3512 // in case no arguments adaptor frame is found below the current frame.
3522 __ mov(r0, Operand(Smi::FromInt(scope_->num_parameters()))); 3513 __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters())));
3523 3514
3524 // Call the shared stub to get to the arguments.length. 3515 // Call the shared stub to get to the arguments.length.
3525 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_LENGTH); 3516 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_LENGTH);
3526 frame_->CallStub(&stub, 0); 3517 frame_->CallStub(&stub, 0);
3527 frame_->EmitPush(r0); 3518 frame_->EmitPush(r0);
3528 } 3519 }
3529 3520
3530 3521
3531 void CodeGenerator::GenerateArgumentsAccess(ZoneList<Expression*>* args) { 3522 void CodeGenerator::GenerateArgumentsAccess(ZoneList<Expression*>* args) {
3532 VirtualFrame::SpilledScope spilled_scope; 3523 VirtualFrame::SpilledScope spilled_scope;
3533 ASSERT(args->length() == 1); 3524 ASSERT(args->length() == 1);
3534 3525
3535 // Satisfy contract with ArgumentsAccessStub: 3526 // Satisfy contract with ArgumentsAccessStub:
3536 // Load the key into r1 and the formal parameters count into r0. 3527 // Load the key into r1 and the formal parameters count into r0.
3537 LoadAndSpill(args->at(0)); 3528 LoadAndSpill(args->at(0));
3538 frame_->EmitPop(r1); 3529 frame_->EmitPop(r1);
3539 __ mov(r0, Operand(Smi::FromInt(scope_->num_parameters()))); 3530 __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters())));
3540 3531
3541 // Call the shared stub to get to arguments[key]. 3532 // Call the shared stub to get to arguments[key].
3542 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); 3533 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
3543 frame_->CallStub(&stub, 0); 3534 frame_->CallStub(&stub, 0);
3544 frame_->EmitPush(r0); 3535 frame_->EmitPush(r0);
3545 } 3536 }
3546 3537
3547 3538
3548 void CodeGenerator::GenerateRandomPositiveSmi(ZoneList<Expression*>* args) { 3539 void CodeGenerator::GenerateRandomPositiveSmi(ZoneList<Expression*>* args) {
3549 VirtualFrame::SpilledScope spilled_scope; 3540 VirtualFrame::SpilledScope spilled_scope;
3550 ASSERT(args->length() == 0); 3541 ASSERT(args->length() == 0);
3551 __ Call(ExternalReference::random_positive_smi_function().address(), 3542 __ Call(ExternalReference::random_positive_smi_function().address(),
3552 RelocInfo::RUNTIME_ENTRY); 3543 RelocInfo::RUNTIME_ENTRY);
3553 frame_->EmitPush(r0); 3544 frame_->EmitPush(r0);
3554 } 3545 }
3555 3546
3556 3547
3557 void CodeGenerator::GenerateStringAdd(ZoneList<Expression*>* args) { 3548 void CodeGenerator::GenerateStringAdd(ZoneList<Expression*>* args) {
3558 ASSERT_EQ(2, args->length()); 3549 ASSERT_EQ(2, args->length());
3559 3550
3560 Load(args->at(0)); 3551 Load(args->at(0));
3561 Load(args->at(1)); 3552 Load(args->at(1));
3562 3553
3563 frame_->CallRuntime(Runtime::kStringAdd, 2); 3554 StringAddStub stub(NO_STRING_ADD_FLAGS);
3555 frame_->CallStub(&stub, 2);
3564 frame_->EmitPush(r0); 3556 frame_->EmitPush(r0);
3565 } 3557 }
3566 3558
3567 3559
3568 void CodeGenerator::GenerateSubString(ZoneList<Expression*>* args) { 3560 void CodeGenerator::GenerateSubString(ZoneList<Expression*>* args) {
3569 ASSERT_EQ(3, args->length()); 3561 ASSERT_EQ(3, args->length());
3570 3562
3571 Load(args->at(0)); 3563 Load(args->at(0));
3572 Load(args->at(1)); 3564 Load(args->at(1));
3573 Load(args->at(2)); 3565 Load(args->at(2));
3574 3566
3575 frame_->CallRuntime(Runtime::kSubString, 3); 3567 SubStringStub stub;
3568 frame_->CallStub(&stub, 3);
3576 frame_->EmitPush(r0); 3569 frame_->EmitPush(r0);
3577 } 3570 }
3578 3571
3579 3572
3580 void CodeGenerator::GenerateStringCompare(ZoneList<Expression*>* args) { 3573 void CodeGenerator::GenerateStringCompare(ZoneList<Expression*>* args) {
3581 ASSERT_EQ(2, args->length()); 3574 ASSERT_EQ(2, args->length());
3582 3575
3583 Load(args->at(0)); 3576 Load(args->at(0));
3584 Load(args->at(1)); 3577 Load(args->at(1));
3585 3578
(...skipping 1747 matching lines...) Expand 10 before | Expand all | Expand 10 after
5333 5326
5334 // Push arguments to the stack 5327 // Push arguments to the stack
5335 __ push(r1); 5328 __ push(r1);
5336 __ push(r0); 5329 __ push(r0);
5337 5330
5338 if (Token::ADD == operation) { 5331 if (Token::ADD == operation) {
5339 // Test for string arguments before calling runtime. 5332 // Test for string arguments before calling runtime.
5340 // r1 : first argument 5333 // r1 : first argument
5341 // r0 : second argument 5334 // r0 : second argument
5342 // sp[0] : second argument 5335 // sp[0] : second argument
5343 // sp[1] : first argument 5336 // sp[4] : first argument
5344 5337
5345 Label not_strings, not_string1, string1; 5338 Label not_strings, not_string1, string1;
5346 __ tst(r1, Operand(kSmiTagMask)); 5339 __ tst(r1, Operand(kSmiTagMask));
5347 __ b(eq, &not_string1); 5340 __ b(eq, &not_string1);
5348 __ CompareObjectType(r1, r2, r2, FIRST_NONSTRING_TYPE); 5341 __ CompareObjectType(r1, r2, r2, FIRST_NONSTRING_TYPE);
5349 __ b(ge, &not_string1); 5342 __ b(ge, &not_string1);
5350 5343
5351 // First argument is a a string, test second. 5344 // First argument is a a string, test second.
5352 __ tst(r0, Operand(kSmiTagMask)); 5345 __ tst(r0, Operand(kSmiTagMask));
5353 __ b(eq, &string1); 5346 __ b(eq, &string1);
5354 __ CompareObjectType(r0, r2, r2, FIRST_NONSTRING_TYPE); 5347 __ CompareObjectType(r0, r2, r2, FIRST_NONSTRING_TYPE);
5355 __ b(ge, &string1); 5348 __ b(ge, &string1);
5356 5349
5357 // First and second argument are strings. 5350 // First and second argument are strings.
5358 __ TailCallRuntime(ExternalReference(Runtime::kStringAdd), 2, 1); 5351 StringAddStub stub(NO_STRING_CHECK_IN_STUB);
5352 __ TailCallStub(&stub);
5359 5353
5360 // Only first argument is a string. 5354 // Only first argument is a string.
5361 __ bind(&string1); 5355 __ bind(&string1);
5362 __ InvokeBuiltin(Builtins::STRING_ADD_LEFT, JUMP_JS); 5356 __ InvokeBuiltin(Builtins::STRING_ADD_LEFT, JUMP_JS);
5363 5357
5364 // First argument was not a string, test second. 5358 // First argument was not a string, test second.
5365 __ bind(&not_string1); 5359 __ bind(&not_string1);
5366 __ tst(r0, Operand(kSmiTagMask)); 5360 __ tst(r0, Operand(kSmiTagMask));
5367 __ b(eq, &not_strings); 5361 __ b(eq, &not_strings);
5368 __ CompareObjectType(r0, r2, r2, FIRST_NONSTRING_TYPE); 5362 __ CompareObjectType(r0, r2, r2, FIRST_NONSTRING_TYPE);
5369 __ b(ge, &not_strings); 5363 __ b(ge, &not_strings);
5370 5364
5371 // Only second argument is a string. 5365 // Only second argument is a string.
5372 __ b(&not_strings);
5373 __ InvokeBuiltin(Builtins::STRING_ADD_RIGHT, JUMP_JS); 5366 __ InvokeBuiltin(Builtins::STRING_ADD_RIGHT, JUMP_JS);
5374 5367
5375 __ bind(&not_strings); 5368 __ bind(&not_strings);
5376 } 5369 }
5377 5370
5378 __ InvokeBuiltin(builtin, JUMP_JS); // Tail call. No return. 5371 __ InvokeBuiltin(builtin, JUMP_JS); // Tail call. No return.
5379 5372
5380 // We branch here if at least one of r0 and r1 is not a Smi. 5373 // We branch here if at least one of r0 and r1 is not a Smi.
5381 __ bind(not_smi); 5374 __ bind(not_smi);
5382 if (mode == NO_OVERWRITE) { 5375 if (mode == NO_OVERWRITE) {
(...skipping 461 matching lines...) Expand 10 before | Expand all | Expand 10 after
5844 5837
5845 OS::SNPrintF(Vector<char>(name_, len), 5838 OS::SNPrintF(Vector<char>(name_, len),
5846 "GenericBinaryOpStub_%s_%s%s", 5839 "GenericBinaryOpStub_%s_%s%s",
5847 op_name, 5840 op_name,
5848 overwrite_name, 5841 overwrite_name,
5849 specialized_on_rhs_ ? "_ConstantRhs" : 0); 5842 specialized_on_rhs_ ? "_ConstantRhs" : 0);
5850 return name_; 5843 return name_;
5851 } 5844 }
5852 5845
5853 5846
5847
5854 void GenericBinaryOpStub::Generate(MacroAssembler* masm) { 5848 void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
5855 // r1 : x 5849 // r1 : x
5856 // r0 : y 5850 // r0 : y
5857 // result : r0 5851 // result : r0
5858 5852
5859 // All ops need to know whether we are dealing with two Smis. Set up r2 to 5853 // All ops need to know whether we are dealing with two Smis. Set up r2 to
5860 // tell us that. 5854 // tell us that.
5861 __ orr(r2, r1, Operand(r0)); // r2 = x | y; 5855 __ orr(r2, r1, Operand(r0)); // r2 = x | y;
5862 5856
5863 switch (op_) { 5857 switch (op_) {
(...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after
6036 Label slow; 6030 Label slow;
6037 ASSERT(kSmiTag == 0); // adjust code below 6031 ASSERT(kSmiTag == 0); // adjust code below
6038 __ tst(r2, Operand(kSmiTagMask)); 6032 __ tst(r2, Operand(kSmiTagMask));
6039 __ b(ne, &slow); 6033 __ b(ne, &slow);
6040 switch (op_) { 6034 switch (op_) {
6041 case Token::BIT_OR: __ orr(r0, r0, Operand(r1)); break; 6035 case Token::BIT_OR: __ orr(r0, r0, Operand(r1)); break;
6042 case Token::BIT_AND: __ and_(r0, r0, Operand(r1)); break; 6036 case Token::BIT_AND: __ and_(r0, r0, Operand(r1)); break;
6043 case Token::BIT_XOR: __ eor(r0, r0, Operand(r1)); break; 6037 case Token::BIT_XOR: __ eor(r0, r0, Operand(r1)); break;
6044 case Token::SAR: 6038 case Token::SAR:
6045 // Remove tags from right operand. 6039 // Remove tags from right operand.
6046 __ mov(r2, Operand(r0, ASR, kSmiTagSize)); // y 6040 __ GetLeastBitsFromSmi(r2, r0, 5);
6047 // Use only the 5 least significant bits of the shift count.
6048 __ and_(r2, r2, Operand(0x1f));
6049 __ mov(r0, Operand(r1, ASR, r2)); 6041 __ mov(r0, Operand(r1, ASR, r2));
6050 // Smi tag result. 6042 // Smi tag result.
6051 __ bic(r0, r0, Operand(kSmiTagMask)); 6043 __ bic(r0, r0, Operand(kSmiTagMask));
6052 break; 6044 break;
6053 case Token::SHR: 6045 case Token::SHR:
6054 // Remove tags from operands. We can't do this on a 31 bit number 6046 // Remove tags from operands. We can't do this on a 31 bit number
6055 // because then the 0s get shifted into bit 30 instead of bit 31. 6047 // because then the 0s get shifted into bit 30 instead of bit 31.
6056 __ mov(r3, Operand(r1, ASR, kSmiTagSize)); // x 6048 __ mov(r3, Operand(r1, ASR, kSmiTagSize)); // x
6057 __ mov(r2, Operand(r0, ASR, kSmiTagSize)); // y 6049 __ GetLeastBitsFromSmi(r2, r0, 5);
6058 // Use only the 5 least significant bits of the shift count.
6059 __ and_(r2, r2, Operand(0x1f));
6060 __ mov(r3, Operand(r3, LSR, r2)); 6050 __ mov(r3, Operand(r3, LSR, r2));
6061 // Unsigned shift is not allowed to produce a negative number, so 6051 // Unsigned shift is not allowed to produce a negative number, so
6062 // check the sign bit and the sign bit after Smi tagging. 6052 // check the sign bit and the sign bit after Smi tagging.
6063 __ tst(r3, Operand(0xc0000000)); 6053 __ tst(r3, Operand(0xc0000000));
6064 __ b(ne, &slow); 6054 __ b(ne, &slow);
6065 // Smi tag result. 6055 // Smi tag result.
6066 __ mov(r0, Operand(r3, LSL, kSmiTagSize)); 6056 __ mov(r0, Operand(r3, LSL, kSmiTagSize));
6067 break; 6057 break;
6068 case Token::SHL: 6058 case Token::SHL:
6069 // Remove tags from operands. 6059 // Remove tags from operands.
6070 __ mov(r3, Operand(r1, ASR, kSmiTagSize)); // x 6060 __ mov(r3, Operand(r1, ASR, kSmiTagSize)); // x
6071 __ mov(r2, Operand(r0, ASR, kSmiTagSize)); // y 6061 __ GetLeastBitsFromSmi(r2, r0, 5);
6072 // Use only the 5 least significant bits of the shift count.
6073 __ and_(r2, r2, Operand(0x1f));
6074 __ mov(r3, Operand(r3, LSL, r2)); 6062 __ mov(r3, Operand(r3, LSL, r2));
6075 // Check that the signed result fits in a Smi. 6063 // Check that the signed result fits in a Smi.
6076 __ add(r2, r3, Operand(0x40000000), SetCC); 6064 __ add(r2, r3, Operand(0x40000000), SetCC);
6077 __ b(mi, &slow); 6065 __ b(mi, &slow);
6078 __ mov(r0, Operand(r3, LSL, kSmiTagSize)); 6066 __ mov(r0, Operand(r3, LSL, kSmiTagSize));
6079 break; 6067 break;
6080 default: UNREACHABLE(); 6068 default: UNREACHABLE();
6081 } 6069 }
6082 __ Ret(); 6070 __ Ret();
6083 __ bind(&slow); 6071 __ bind(&slow);
(...skipping 387 matching lines...) Expand 10 before | Expand all | Expand 10 after
6471 // Called from C, so do not pop argc and args on exit (preserve sp) 6459 // Called from C, so do not pop argc and args on exit (preserve sp)
6472 // No need to save register-passed args 6460 // No need to save register-passed args
6473 // Save callee-saved registers (incl. cp and fp), sp, and lr 6461 // Save callee-saved registers (incl. cp and fp), sp, and lr
6474 __ stm(db_w, sp, kCalleeSaved | lr.bit()); 6462 __ stm(db_w, sp, kCalleeSaved | lr.bit());
6475 6463
6476 // Get address of argv, see stm above. 6464 // Get address of argv, see stm above.
6477 // r0: code entry 6465 // r0: code entry
6478 // r1: function 6466 // r1: function
6479 // r2: receiver 6467 // r2: receiver
6480 // r3: argc 6468 // r3: argc
6481 __ add(r4, sp, Operand((kNumCalleeSaved + 1)*kPointerSize)); 6469 __ ldr(r4, MemOperand(sp, (kNumCalleeSaved + 1) * kPointerSize)); // argv
6482 __ ldr(r4, MemOperand(r4)); // argv
6483 6470
6484 // Push a frame with special values setup to mark it as an entry frame. 6471 // Push a frame with special values setup to mark it as an entry frame.
6485 // r0: code entry 6472 // r0: code entry
6486 // r1: function 6473 // r1: function
6487 // r2: receiver 6474 // r2: receiver
6488 // r3: argc 6475 // r3: argc
6489 // r4: argv 6476 // r4: argv
6490 __ mov(r8, Operand(-1)); // Push a bad frame pointer to fail if it is used. 6477 __ mov(r8, Operand(-1)); // Push a bad frame pointer to fail if it is used.
6491 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; 6478 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
6492 __ mov(r7, Operand(Smi::FromInt(marker))); 6479 __ mov(r7, Operand(Smi::FromInt(marker)));
(...skipping 337 matching lines...) Expand 10 before | Expand all | Expand 10 after
6830 6817
6831 int CompareStub::MinorKey() { 6818 int CompareStub::MinorKey() {
6832 // Encode the three parameters in a unique 16 bit value. 6819 // Encode the three parameters in a unique 16 bit value.
6833 ASSERT((static_cast<unsigned>(cc_) >> 26) < (1 << 16)); 6820 ASSERT((static_cast<unsigned>(cc_) >> 26) < (1 << 16));
6834 int nnn_value = (never_nan_nan_ ? 2 : 0); 6821 int nnn_value = (never_nan_nan_ ? 2 : 0);
6835 if (cc_ != eq) nnn_value = 0; // Avoid duplicate stubs. 6822 if (cc_ != eq) nnn_value = 0; // Avoid duplicate stubs.
6836 return (static_cast<unsigned>(cc_) >> 26) | nnn_value | (strict_ ? 1 : 0); 6823 return (static_cast<unsigned>(cc_) >> 26) | nnn_value | (strict_ ? 1 : 0);
6837 } 6824 }
6838 6825
6839 6826
6827 void StringStubBase::GenerateCopyCharacters(MacroAssembler* masm,
6828 Register dest,
6829 Register src,
6830 Register count,
6831 Register scratch,
6832 bool ascii) {
6833 Label loop;
6834 Label done;
6835 // This loop just copies one character at a time, as it is only used for very
6836 // short strings.
6837 if (!ascii) {
6838 __ add(count, count, Operand(count), SetCC);
6839 } else {
6840 __ cmp(count, Operand(0));
6841 }
6842 __ b(eq, &done);
6843
6844 __ bind(&loop);
6845 __ ldrb(scratch, MemOperand(src, 1, PostIndex));
6846 // Perform sub between load and dependent store to get the load time to
6847 // complete.
6848 __ sub(count, count, Operand(1), SetCC);
6849 __ strb(scratch, MemOperand(dest, 1, PostIndex));
6850 // last iteration.
6851 __ b(gt, &loop);
6852
6853 __ bind(&done);
6854 }
6855
6856
6857 enum CopyCharactersFlags {
6858 COPY_ASCII = 1,
6859 DEST_ALWAYS_ALIGNED = 2
6860 };
6861
6862
6863 void StringStubBase::GenerateCopyCharactersLong(MacroAssembler* masm,
6864 Register dest,
6865 Register src,
6866 Register count,
6867 Register scratch1,
6868 Register scratch2,
6869 Register scratch3,
6870 Register scratch4,
6871 Register scratch5,
6872 int flags) {
6873 bool ascii = (flags & COPY_ASCII) != 0;
6874 bool dest_always_aligned = (flags & DEST_ALWAYS_ALIGNED) != 0;
6875
6876 if (dest_always_aligned && FLAG_debug_code) {
6877 // Check that destination is actually word aligned if the flag says
6878 // that it is.
6879 __ tst(dest, Operand(kPointerAlignmentMask));
6880 __ Check(eq, "Destination of copy not aligned.");
6881 }
6882
6883 const int kReadAlignment = 4;
6884 const int kReadAlignmentMask = kReadAlignment - 1;
6885 // Ensure that reading an entire aligned word containing the last character
6886 // of a string will not read outside the allocated area (because we pad up
6887 // to kObjectAlignment).
6888 ASSERT(kObjectAlignment >= kReadAlignment);
6889 // Assumes word reads and writes are little endian.
6890 // Nothing to do for zero characters.
6891 Label done;
6892 if (!ascii) {
6893 __ add(count, count, Operand(count), SetCC);
6894 } else {
6895 __ cmp(count, Operand(0));
6896 }
6897 __ b(eq, &done);
6898
6899 // Assume that you cannot read (or write) unaligned.
6900 Label byte_loop;
6901 // Must copy at least eight bytes, otherwise just do it one byte at a time.
6902 __ cmp(count, Operand(8));
6903 __ add(count, dest, Operand(count));
6904 Register limit = count; // Read until src equals this.
6905 __ b(lt, &byte_loop);
6906
6907 if (!dest_always_aligned) {
6908 // Align dest by byte copying. Copies between zero and three bytes.
6909 __ and_(scratch4, dest, Operand(kReadAlignmentMask), SetCC);
6910 Label dest_aligned;
6911 __ b(eq, &dest_aligned);
6912 __ cmp(scratch4, Operand(2));
6913 __ ldrb(scratch1, MemOperand(src, 1, PostIndex));
6914 __ ldrb(scratch2, MemOperand(src, 1, PostIndex), le);
6915 __ ldrb(scratch3, MemOperand(src, 1, PostIndex), lt);
6916 __ strb(scratch1, MemOperand(dest, 1, PostIndex));
6917 __ strb(scratch2, MemOperand(dest, 1, PostIndex), le);
6918 __ strb(scratch3, MemOperand(dest, 1, PostIndex), lt);
6919 __ bind(&dest_aligned);
6920 }
6921
6922 Label simple_loop;
6923
6924 __ sub(scratch4, dest, Operand(src));
6925 __ and_(scratch4, scratch4, Operand(0x03), SetCC);
6926 __ b(eq, &simple_loop);
6927 // Shift register is number of bits in a source word that
6928 // must be combined with bits in the next source word in order
6929 // to create a destination word.
6930
6931 // Complex loop for src/dst that are not aligned the same way.
6932 {
6933 Label loop;
6934 __ mov(scratch4, Operand(scratch4, LSL, 3));
6935 Register left_shift = scratch4;
6936 __ and_(src, src, Operand(~3)); // Round down to load previous word.
6937 __ ldr(scratch1, MemOperand(src, 4, PostIndex));
6938 // Store the "shift" most significant bits of scratch in the least
6939 // signficant bits (i.e., shift down by (32-shift)).
6940 __ rsb(scratch2, left_shift, Operand(32));
6941 Register right_shift = scratch2;
6942 __ mov(scratch1, Operand(scratch1, LSR, right_shift));
6943
6944 __ bind(&loop);
6945 __ ldr(scratch3, MemOperand(src, 4, PostIndex));
6946 __ sub(scratch5, limit, Operand(dest));
6947 __ orr(scratch1, scratch1, Operand(scratch3, LSL, left_shift));
6948 __ str(scratch1, MemOperand(dest, 4, PostIndex));
6949 __ mov(scratch1, Operand(scratch3, LSR, right_shift));
6950 // Loop if four or more bytes left to copy.
6951 // Compare to eight, because we did the subtract before increasing dst.
6952 __ sub(scratch5, scratch5, Operand(8), SetCC);
6953 __ b(ge, &loop);
6954 }
6955 // There is now between zero and three bytes left to copy (negative that
6956 // number is in scratch5), and between one and three bytes already read into
6957 // scratch1 (eight times that number in scratch4). We may have read past
6958 // the end of the string, but because objects are aligned, we have not read
6959 // past the end of the object.
6960 // Find the minimum of remaining characters to move and preloaded characters
6961 // and write those as bytes.
6962 __ add(scratch5, scratch5, Operand(4), SetCC);
6963 __ b(eq, &done);
6964 __ cmp(scratch4, Operand(scratch5, LSL, 3), ne);
6965 // Move minimum of bytes read and bytes left to copy to scratch4.
6966 __ mov(scratch5, Operand(scratch4, LSR, 3), LeaveCC, lt);
6967 // Between one and three (value in scratch5) characters already read into
6968 // scratch ready to write.
6969 __ cmp(scratch5, Operand(2));
6970 __ strb(scratch1, MemOperand(dest, 1, PostIndex));
6971 __ mov(scratch1, Operand(scratch1, LSR, 8), LeaveCC, ge);
6972 __ strb(scratch1, MemOperand(dest, 1, PostIndex), ge);
6973 __ mov(scratch1, Operand(scratch1, LSR, 8), LeaveCC, gt);
6974 __ strb(scratch1, MemOperand(dest, 1, PostIndex), gt);
6975 // Copy any remaining bytes.
6976 __ b(&byte_loop);
6977
6978 // Simple loop.
6979 // Copy words from src to dst, until less than four bytes left.
6980 // Both src and dest are word aligned.
6981 __ bind(&simple_loop);
6982 {
6983 Label loop;
6984 __ bind(&loop);
6985 __ ldr(scratch1, MemOperand(src, 4, PostIndex));
6986 __ sub(scratch3, limit, Operand(dest));
6987 __ str(scratch1, MemOperand(dest, 4, PostIndex));
6988 // Compare to 8, not 4, because we do the substraction before increasing
6989 // dest.
6990 __ cmp(scratch3, Operand(8));
6991 __ b(ge, &loop);
6992 }
6993
6994 // Copy bytes from src to dst until dst hits limit.
6995 __ bind(&byte_loop);
6996 __ cmp(dest, Operand(limit));
6997 __ ldrb(scratch1, MemOperand(src, 1, PostIndex), lt);
6998 __ b(ge, &done);
6999 __ strb(scratch1, MemOperand(dest, 1, PostIndex));
7000 __ b(&byte_loop);
7001
7002 __ bind(&done);
7003 }
7004
7005
7006 void SubStringStub::Generate(MacroAssembler* masm) {
7007 Label runtime;
7008
7009 // Stack frame on entry.
7010 // lr: return address
7011 // sp[0]: to
7012 // sp[4]: from
7013 // sp[8]: string
7014
7015 // This stub is called from the native-call %_SubString(...), so
7016 // nothing can be assumed about the arguments. It is tested that:
7017 // "string" is a sequential string,
7018 // both "from" and "to" are smis, and
7019 // 0 <= from <= to <= string.length.
7020 // If any of these assumptions fail, we call the runtime system.
7021
7022 static const int kToOffset = 0 * kPointerSize;
7023 static const int kFromOffset = 1 * kPointerSize;
7024 static const int kStringOffset = 2 * kPointerSize;
7025
7026
7027 // Check bounds and smi-ness.
7028 __ ldr(r7, MemOperand(sp, kToOffset));
7029 __ ldr(r6, MemOperand(sp, kFromOffset));
7030 ASSERT_EQ(0, kSmiTag);
7031 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
7032 // I.e., arithmetic shift right by one un-smi-tags.
7033 __ mov(r2, Operand(r7, ASR, 1), SetCC);
7034 __ mov(r3, Operand(r6, ASR, 1), SetCC, cc);
7035 // If either r2 or r6 had the smi tag bit set, then carry is set now.
7036 __ b(cs, &runtime); // Either "from" or "to" is not a smi.
7037 __ b(mi, &runtime); // From is negative.
7038
7039 __ sub(r2, r2, Operand(r3), SetCC);
7040 __ b(mi, &runtime); // Fail if from > to.
7041 // Handle sub-strings of length 2 and less in the runtime system.
7042 __ cmp(r2, Operand(2));
7043 __ b(le, &runtime);
7044
7045 // r2: length
7046 // r6: from (smi)
7047 // r7: to (smi)
7048
7049 // Make sure first argument is a sequential (or flat) string.
7050 __ ldr(r5, MemOperand(sp, kStringOffset));
7051 ASSERT_EQ(0, kSmiTag);
7052 __ tst(r5, Operand(kSmiTagMask));
7053 __ b(eq, &runtime);
7054 Condition is_string = masm->IsObjectStringType(r5, r1);
7055 __ b(NegateCondition(is_string), &runtime);
7056
7057 // r1: instance type
7058 // r2: length
7059 // r5: string
7060 // r6: from (smi)
7061 // r7: to (smi)
7062 Label seq_string;
7063 __ and_(r4, r1, Operand(kStringRepresentationMask));
7064 ASSERT(kSeqStringTag < kConsStringTag);
7065 ASSERT(kExternalStringTag > kConsStringTag);
7066 __ cmp(r4, Operand(kConsStringTag));
7067 __ b(gt, &runtime); // External strings go to runtime.
7068 __ b(lt, &seq_string); // Sequential strings are handled directly.
7069
7070 // Cons string. Try to recurse (once) on the first substring.
7071 // (This adds a little more generality than necessary to handle flattened
7072 // cons strings, but not much).
7073 __ ldr(r5, FieldMemOperand(r5, ConsString::kFirstOffset));
7074 __ ldr(r4, FieldMemOperand(r5, HeapObject::kMapOffset));
7075 __ ldrb(r1, FieldMemOperand(r4, Map::kInstanceTypeOffset));
7076 __ tst(r1, Operand(kStringRepresentationMask));
7077 ASSERT_EQ(0, kSeqStringTag);
7078 __ b(ne, &runtime); // Cons and External strings go to runtime.
7079
7080 // Definitly a sequential string.
7081 __ bind(&seq_string);
7082
7083 // r1: instance type.
7084 // r2: length
7085 // r5: string
7086 // r6: from (smi)
7087 // r7: to (smi)
7088 __ ldr(r4, FieldMemOperand(r5, String::kLengthOffset));
7089 __ cmp(r4, Operand(r7, ASR, 1));
7090 __ b(lt, &runtime); // Fail if to > length.
7091
7092 // r1: instance type.
7093 // r2: result string length.
7094 // r5: string.
7095 // r6: from offset (smi)
7096 // Check for flat ascii string.
7097 Label non_ascii_flat;
7098 __ tst(r1, Operand(kStringEncodingMask));
7099 ASSERT_EQ(0, kTwoByteStringTag);
7100 __ b(eq, &non_ascii_flat);
7101
7102 // Allocate the result.
7103 __ AllocateAsciiString(r0, r2, r3, r4, r1, &runtime);
7104
7105 // r0: result string.
7106 // r2: result string length.
7107 // r5: string.
7108 // r6: from offset (smi)
7109 // Locate first character of result.
7110 __ add(r1, r0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
7111 // Locate 'from' character of string.
7112 __ add(r5, r5, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
7113 __ add(r5, r5, Operand(r6, ASR, 1));
7114
7115 // r0: result string.
7116 // r1: first character of result string.
7117 // r2: result string length.
7118 // r5: first character of sub string to copy.
7119 ASSERT_EQ(0, SeqAsciiString::kHeaderSize & kObjectAlignmentMask);
7120 GenerateCopyCharactersLong(masm, r1, r5, r2, r3, r4, r6, r7, r9,
7121 COPY_ASCII | DEST_ALWAYS_ALIGNED);
7122 __ IncrementCounter(&Counters::sub_string_native, 1, r3, r4);
7123 __ add(sp, sp, Operand(3 * kPointerSize));
7124 __ Ret();
7125
7126 __ bind(&non_ascii_flat);
7127 // r2: result string length.
7128 // r5: string.
7129 // r6: from offset (smi)
7130 // Check for flat two byte string.
7131
7132 // Allocate the result.
7133 __ AllocateTwoByteString(r0, r2, r1, r3, r4, &runtime);
7134
7135 // r0: result string.
7136 // r2: result string length.
7137 // r5: string.
7138 // Locate first character of result.
7139 __ add(r1, r0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
7140 // Locate 'from' character of string.
7141 __ add(r5, r5, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
7142 // As "from" is a smi it is 2 times the value which matches the size of a two
7143 // byte character.
7144 __ add(r5, r5, Operand(r6));
7145
7146 // r0: result string.
7147 // r1: first character of result.
7148 // r2: result length.
7149 // r5: first character of string to copy.
7150 ASSERT_EQ(0, SeqTwoByteString::kHeaderSize & kObjectAlignmentMask);
7151 GenerateCopyCharactersLong(masm, r1, r5, r2, r3, r4, r6, r7, r9,
7152 DEST_ALWAYS_ALIGNED);
7153 __ IncrementCounter(&Counters::sub_string_native, 1, r3, r4);
7154 __ add(sp, sp, Operand(3 * kPointerSize));
7155 __ Ret();
7156
7157 // Just jump to runtime to create the sub string.
7158 __ bind(&runtime);
7159 __ TailCallRuntime(ExternalReference(Runtime::kSubString), 3, 1);
7160 }
6840 7161
6841 7162
6842 void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm, 7163 void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
6843 Register left, 7164 Register left,
6844 Register right, 7165 Register right,
6845 Register scratch1, 7166 Register scratch1,
6846 Register scratch2, 7167 Register scratch2,
6847 Register scratch3, 7168 Register scratch3,
6848 Register scratch4) { 7169 Register scratch4) {
6849 Label compare_lengths; 7170 Label compare_lengths;
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
6891 __ mov(r0, Operand(Smi::FromInt(GREATER)), LeaveCC, gt); 7212 __ mov(r0, Operand(Smi::FromInt(GREATER)), LeaveCC, gt);
6892 __ mov(r0, Operand(Smi::FromInt(LESS)), LeaveCC, lt); 7213 __ mov(r0, Operand(Smi::FromInt(LESS)), LeaveCC, lt);
6893 __ Ret(); 7214 __ Ret();
6894 } 7215 }
6895 7216
6896 7217
6897 void StringCompareStub::Generate(MacroAssembler* masm) { 7218 void StringCompareStub::Generate(MacroAssembler* masm) {
6898 Label runtime; 7219 Label runtime;
6899 7220
6900 // Stack frame on entry. 7221 // Stack frame on entry.
6901 // sp[0]: return address 7222 // sp[0]: right string
6902 // sp[4]: right string 7223 // sp[4]: left string
6903 // sp[8]: left string 7224 __ ldr(r0, MemOperand(sp, 1 * kPointerSize)); // left
6904 7225 __ ldr(r1, MemOperand(sp, 0 * kPointerSize)); // right
6905 __ ldr(r0, MemOperand(sp, 2 * kPointerSize)); // left
6906 __ ldr(r1, MemOperand(sp, 1 * kPointerSize)); // right
6907 7226
6908 Label not_same; 7227 Label not_same;
6909 __ cmp(r0, r1); 7228 __ cmp(r0, r1);
6910 __ b(ne, &not_same); 7229 __ b(ne, &not_same);
6911 ASSERT_EQ(0, EQUAL); 7230 ASSERT_EQ(0, EQUAL);
6912 ASSERT_EQ(0, kSmiTag); 7231 ASSERT_EQ(0, kSmiTag);
6913 __ mov(r0, Operand(Smi::FromInt(EQUAL))); 7232 __ mov(r0, Operand(Smi::FromInt(EQUAL)));
6914 __ IncrementCounter(&Counters::string_compare_native, 1, r1, r2); 7233 __ IncrementCounter(&Counters::string_compare_native, 1, r1, r2);
6915 __ add(sp, sp, Operand(2 * kPointerSize)); 7234 __ add(sp, sp, Operand(2 * kPointerSize));
6916 __ Ret(); 7235 __ Ret();
6917 7236
6918 __ bind(&not_same); 7237 __ bind(&not_same);
6919 7238
6920 // Check that both objects are sequential ascii strings. 7239 // Check that both objects are sequential ascii strings.
6921 __ JumpIfNotBothSequentialAsciiStrings(r0, r1, r2, r3, &runtime); 7240 __ JumpIfNotBothSequentialAsciiStrings(r0, r1, r2, r3, &runtime);
6922 7241
6923 // Compare flat ascii strings natively. Remove arguments from stack first. 7242 // Compare flat ascii strings natively. Remove arguments from stack first.
6924 __ IncrementCounter(&Counters::string_compare_native, 1, r2, r3); 7243 __ IncrementCounter(&Counters::string_compare_native, 1, r2, r3);
6925 __ add(sp, sp, Operand(2 * kPointerSize)); 7244 __ add(sp, sp, Operand(2 * kPointerSize));
6926 GenerateCompareFlatAsciiStrings(masm, r0, r1, r2, r3, r4, r5); 7245 GenerateCompareFlatAsciiStrings(masm, r0, r1, r2, r3, r4, r5);
6927 7246
6928 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) 7247 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
6929 // tagged as a small integer. 7248 // tagged as a small integer.
6930 __ bind(&runtime); 7249 __ bind(&runtime);
6931 __ TailCallRuntime(ExternalReference(Runtime::kStringCompare), 2, 1); 7250 __ TailCallRuntime(ExternalReference(Runtime::kStringCompare), 2, 1);
6932 } 7251 }
6933 7252
6934 7253
7254 void StringAddStub::Generate(MacroAssembler* masm) {
7255 Label string_add_runtime;
7256 // Stack on entry:
7257 // sp[0]: second argument.
7258 // sp[4]: first argument.
7259
7260 // Load the two arguments.
7261 __ ldr(r0, MemOperand(sp, 1 * kPointerSize)); // First argument.
7262 __ ldr(r1, MemOperand(sp, 0 * kPointerSize)); // Second argument.
7263
7264 // Make sure that both arguments are strings if not known in advance.
7265 if (string_check_) {
7266 ASSERT_EQ(0, kSmiTag);
7267 __ JumpIfEitherSmi(r0, r1, &string_add_runtime);
7268 // Load instance types.
7269 __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
7270 __ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset));
7271 __ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
7272 __ ldrb(r5, FieldMemOperand(r5, Map::kInstanceTypeOffset));
7273 ASSERT_EQ(0, kStringTag);
7274 // If either is not a string, go to runtime.
7275 __ tst(r4, Operand(kIsNotStringMask));
7276 __ tst(r5, Operand(kIsNotStringMask), eq);
7277 __ b(ne, &string_add_runtime);
7278 }
7279
7280 // Both arguments are strings.
7281 // r0: first string
7282 // r1: second string
7283 // r4: first string instance type (if string_check_)
7284 // r5: second string instance type (if string_check_)
7285 {
7286 Label strings_not_empty;
7287 // Check if either of the strings are empty. In that case return the other.
7288 __ ldr(r2, FieldMemOperand(r0, String::kLengthOffset));
7289 __ ldr(r3, FieldMemOperand(r1, String::kLengthOffset));
7290 __ cmp(r2, Operand(0)); // Test if first string is empty.
7291 __ mov(r0, Operand(r1), LeaveCC, eq); // If first is empty, return second.
7292 __ cmp(r3, Operand(0), ne); // Else test if second string is empty.
7293 __ b(ne, &strings_not_empty); // If either string was empty, return r0.
7294
7295 __ IncrementCounter(&Counters::string_add_native, 1, r2, r3);
7296 __ add(sp, sp, Operand(2 * kPointerSize));
7297 __ Ret();
7298
7299 __ bind(&strings_not_empty);
7300 }
7301
7302 // Both strings are non-empty.
7303 // r0: first string
7304 // r1: second string
7305 // r2: length of first string
7306 // r3: length of second string
7307 // r4: first string instance type (if string_check_)
7308 // r5: second string instance type (if string_check_)
7309 // Look at the length of the result of adding the two strings.
7310 Label string_add_flat_result;
7311 // Adding two lengths can't overflow.
7312 ASSERT(String::kMaxLength * 2 > String::kMaxLength);
7313 __ add(r6, r2, Operand(r3));
7314 // Use the runtime system when adding two one character strings, as it
7315 // contains optimizations for this specific case using the symbol table.
7316 __ cmp(r6, Operand(2));
7317 __ b(eq, &string_add_runtime);
7318 // Check if resulting string will be flat.
7319 __ cmp(r6, Operand(String::kMinNonFlatLength));
7320 __ b(lt, &string_add_flat_result);
7321 // Handle exceptionally long strings in the runtime system.
7322 ASSERT((String::kMaxLength & 0x80000000) == 0);
7323 ASSERT(IsPowerOf2(String::kMaxLength + 1));
7324 // kMaxLength + 1 is representable as shifted literal, kMaxLength is not.
7325 __ cmp(r6, Operand(String::kMaxLength + 1));
7326 __ b(hs, &string_add_runtime);
7327
7328 // If result is not supposed to be flat, allocate a cons string object.
7329 // If both strings are ascii the result is an ascii cons string.
7330 if (!string_check_) {
7331 __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
7332 __ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset));
7333 __ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
7334 __ ldrb(r5, FieldMemOperand(r5, Map::kInstanceTypeOffset));
7335 }
7336 Label non_ascii, allocated;
7337 ASSERT_EQ(0, kTwoByteStringTag);
7338 __ tst(r4, Operand(kStringEncodingMask));
7339 __ tst(r5, Operand(kStringEncodingMask), ne);
7340 __ b(eq, &non_ascii);
7341
7342 // Allocate an ASCII cons string.
7343 __ AllocateAsciiConsString(r7, r6, r4, r5, &string_add_runtime);
7344 __ bind(&allocated);
7345 // Fill the fields of the cons string.
7346 __ str(r0, FieldMemOperand(r7, ConsString::kFirstOffset));
7347 __ str(r1, FieldMemOperand(r7, ConsString::kSecondOffset));
7348 __ mov(r0, Operand(r7));
7349 __ IncrementCounter(&Counters::string_add_native, 1, r2, r3);
7350 __ add(sp, sp, Operand(2 * kPointerSize));
7351 __ Ret();
7352
7353 __ bind(&non_ascii);
7354 // Allocate a two byte cons string.
7355 __ AllocateTwoByteConsString(r7, r6, r4, r5, &string_add_runtime);
7356 __ jmp(&allocated);
7357
7358 // Handle creating a flat result. First check that both strings are
7359 // sequential and that they have the same encoding.
7360 // r0: first string
7361 // r1: second string
7362 // r2: length of first string
7363 // r3: length of second string
7364 // r4: first string instance type (if string_check_)
7365 // r5: second string instance type (if string_check_)
7366 // r6: sum of lengths.
7367 __ bind(&string_add_flat_result);
7368 if (!string_check_) {
7369 __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
7370 __ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset));
7371 __ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
7372 __ ldrb(r5, FieldMemOperand(r5, Map::kInstanceTypeOffset));
7373 }
7374 // Check that both strings are sequential.
7375 ASSERT_EQ(0, kSeqStringTag);
7376 __ tst(r4, Operand(kStringRepresentationMask));
7377 __ tst(r5, Operand(kStringRepresentationMask), eq);
7378 __ b(ne, &string_add_runtime);
7379 // Now check if both strings have the same encoding (ASCII/Two-byte).
7380 // r0: first string.
7381 // r1: second string.
7382 // r2: length of first string.
7383 // r3: length of second string.
7384 // r6: sum of lengths..
7385 Label non_ascii_string_add_flat_result;
7386 ASSERT(IsPowerOf2(kStringEncodingMask)); // Just one bit to test.
7387 __ eor(r7, r4, Operand(r5));
7388 __ tst(r7, Operand(kStringEncodingMask));
7389 __ b(ne, &string_add_runtime);
7390 // And see if it's ASCII or two-byte.
7391 __ tst(r4, Operand(kStringEncodingMask));
7392 __ b(eq, &non_ascii_string_add_flat_result);
7393
7394 // Both strings are sequential ASCII strings. We also know that they are
7395 // short (since the sum of the lengths is less than kMinNonFlatLength).
7396 __ AllocateAsciiString(r7, r6, r4, r5, r9, &string_add_runtime);
7397 // Locate first character of result.
7398 __ add(r6, r7, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
7399 // Locate first character of first argument.
7400 __ add(r0, r0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
7401 // r0: first character of first string.
7402 // r1: second string.
7403 // r2: length of first string.
7404 // r3: length of second string.
7405 // r6: first character of result.
7406 // r7: result string.
7407 GenerateCopyCharacters(masm, r6, r0, r2, r4, true);
7408
7409 // Load second argument and locate first character.
7410 __ add(r1, r1, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
7411 // r1: first character of second string.
7412 // r3: length of second string.
7413 // r6: next character of result.
7414 // r7: result string.
7415 GenerateCopyCharacters(masm, r6, r1, r3, r4, true);
7416 __ mov(r0, Operand(r7));
7417 __ IncrementCounter(&Counters::string_add_native, 1, r2, r3);
7418 __ add(sp, sp, Operand(2 * kPointerSize));
7419 __ Ret();
7420
7421 __ bind(&non_ascii_string_add_flat_result);
7422 // Both strings are sequential two byte strings.
7423 // r0: first string.
7424 // r1: second string.
7425 // r2: length of first string.
7426 // r3: length of second string.
7427 // r6: sum of length of strings.
7428 __ AllocateTwoByteString(r7, r6, r4, r5, r9, &string_add_runtime);
7429 // r0: first string.
7430 // r1: second string.
7431 // r2: length of first string.
7432 // r3: length of second string.
7433 // r7: result string.
7434
7435 // Locate first character of result.
7436 __ add(r6, r7, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
7437 // Locate first character of first argument.
7438 __ add(r0, r0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
7439
7440 // r0: first character of first string.
7441 // r1: second string.
7442 // r2: length of first string.
7443 // r3: length of second string.
7444 // r6: first character of result.
7445 // r7: result string.
7446 GenerateCopyCharacters(masm, r6, r0, r2, r4, false);
7447
7448 // Locate first character of second argument.
7449 __ add(r1, r1, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
7450
7451 // r1: first character of second string.
7452 // r3: length of second string.
7453 // r6: next character of result (after copy of first string).
7454 // r7: result string.
7455 GenerateCopyCharacters(masm, r6, r1, r3, r4, false);
7456
7457 __ mov(r0, Operand(r7));
7458 __ IncrementCounter(&Counters::string_add_native, 1, r2, r3);
7459 __ add(sp, sp, Operand(2 * kPointerSize));
7460 __ Ret();
7461
7462 // Just jump to runtime to add the two strings.
7463 __ bind(&string_add_runtime);
7464 __ TailCallRuntime(ExternalReference(Runtime::kStringAdd), 2, 1);
7465 }
7466
7467
6935 #undef __ 7468 #undef __
6936 7469
6937 } } // namespace v8::internal 7470 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/arm/codegen-arm.h ('k') | src/arm/disasm-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698