Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(904)

Side by Side Diff: src/interpreter/interpreter.cc

Issue 1413863010: [Interpreter] Add wide varients of bytecodes with feedback and constant pool indexes. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@int_fixbuiltinstacklimit
Patch Set: Rebased Created 5 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/interpreter/interpreter.h ('k') | test/cctest/interpreter/test-bytecode-generator.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2015 the V8 project authors. All rights reserved. 1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/interpreter/interpreter.h" 5 #include "src/interpreter/interpreter.h"
6 6
7 #include "src/code-factory.h" 7 #include "src/code-factory.h"
8 #include "src/compiler.h" 8 #include "src/compiler.h"
9 #include "src/compiler/interpreter-assembler.h" 9 #include "src/compiler/interpreter-assembler.h"
10 #include "src/factory.h" 10 #include "src/factory.h"
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after
89 Node* zero_value = __ NumberConstant(0.0); 89 Node* zero_value = __ NumberConstant(0.0);
90 __ SetAccumulator(zero_value); 90 __ SetAccumulator(zero_value);
91 __ Dispatch(); 91 __ Dispatch();
92 } 92 }
93 93
94 94
95 // LdaSmi8 <imm8> 95 // LdaSmi8 <imm8>
96 // 96 //
97 // Load an 8-bit integer literal into the accumulator as a Smi. 97 // Load an 8-bit integer literal into the accumulator as a Smi.
98 void Interpreter::DoLdaSmi8(compiler::InterpreterAssembler* assembler) { 98 void Interpreter::DoLdaSmi8(compiler::InterpreterAssembler* assembler) {
99 Node* raw_int = __ BytecodeOperandImm8(0); 99 Node* raw_int = __ BytecodeOperandImm(0);
100 Node* smi_int = __ SmiTag(raw_int); 100 Node* smi_int = __ SmiTag(raw_int);
101 __ SetAccumulator(smi_int); 101 __ SetAccumulator(smi_int);
102 __ Dispatch(); 102 __ Dispatch();
103 } 103 }
104 104
105 105
106 void Interpreter::DoLoadConstant(compiler::InterpreterAssembler* assembler) {
107 Node* index = __ BytecodeOperandIdx(0);
108 Node* constant = __ LoadConstantPoolEntry(index);
109 __ SetAccumulator(constant);
110 __ Dispatch();
111 }
112
113
106 // LdaConstant <idx> 114 // LdaConstant <idx>
107 // 115 //
108 // Load constant literal at |idx| in the constant pool into the accumulator. 116 // Load constant literal at |idx| in the constant pool into the accumulator.
109 void Interpreter::DoLdaConstant(compiler::InterpreterAssembler* assembler) { 117 void Interpreter::DoLdaConstant(compiler::InterpreterAssembler* assembler) {
110 Node* index = __ BytecodeOperandIdx8(0); 118 DoLoadConstant(assembler);
111 Node* constant = __ LoadConstantPoolEntry(index);
112 __ SetAccumulator(constant);
113 __ Dispatch();
114 } 119 }
115 120
116 121
122 // LdaConstantWide <idx>
123 //
124 // Load constant literal at |idx| in the constant pool into the accumulator.
125 void Interpreter::DoLdaConstantWide(compiler::InterpreterAssembler* assembler) {
126 DoLoadConstant(assembler);
127 }
128
129
117 // LdaUndefined 130 // LdaUndefined
118 // 131 //
119 // Load Undefined into the accumulator. 132 // Load Undefined into the accumulator.
120 void Interpreter::DoLdaUndefined(compiler::InterpreterAssembler* assembler) { 133 void Interpreter::DoLdaUndefined(compiler::InterpreterAssembler* assembler) {
121 Node* undefined_value = 134 Node* undefined_value =
122 __ HeapConstant(isolate_->factory()->undefined_value()); 135 __ HeapConstant(isolate_->factory()->undefined_value());
123 __ SetAccumulator(undefined_value); 136 __ SetAccumulator(undefined_value);
124 __ Dispatch(); 137 __ Dispatch();
125 } 138 }
126 139
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
162 Node* false_value = __ HeapConstant(isolate_->factory()->false_value()); 175 Node* false_value = __ HeapConstant(isolate_->factory()->false_value());
163 __ SetAccumulator(false_value); 176 __ SetAccumulator(false_value);
164 __ Dispatch(); 177 __ Dispatch();
165 } 178 }
166 179
167 180
168 // Ldar <src> 181 // Ldar <src>
169 // 182 //
170 // Load accumulator with value from register <src>. 183 // Load accumulator with value from register <src>.
171 void Interpreter::DoLdar(compiler::InterpreterAssembler* assembler) { 184 void Interpreter::DoLdar(compiler::InterpreterAssembler* assembler) {
172 Node* reg_index = __ BytecodeOperandReg8(0); 185 Node* reg_index = __ BytecodeOperandReg(0);
173 Node* value = __ LoadRegister(reg_index); 186 Node* value = __ LoadRegister(reg_index);
174 __ SetAccumulator(value); 187 __ SetAccumulator(value);
175 __ Dispatch(); 188 __ Dispatch();
176 } 189 }
177 190
178 191
179 // Star <dst> 192 // Star <dst>
180 // 193 //
181 // Store accumulator to register <dst>. 194 // Store accumulator to register <dst>.
182 void Interpreter::DoStar(compiler::InterpreterAssembler* assembler) { 195 void Interpreter::DoStar(compiler::InterpreterAssembler* assembler) {
183 Node* reg_index = __ BytecodeOperandReg8(0); 196 Node* reg_index = __ BytecodeOperandReg(0);
184 Node* accumulator = __ GetAccumulator(); 197 Node* accumulator = __ GetAccumulator();
185 __ StoreRegister(accumulator, reg_index); 198 __ StoreRegister(accumulator, reg_index);
186 __ Dispatch(); 199 __ Dispatch();
187 } 200 }
188 201
189 202
190 void Interpreter::DoLoadGlobal(Callable ic, 203 void Interpreter::DoLoadGlobal(Callable ic,
191 compiler::InterpreterAssembler* assembler) { 204 compiler::InterpreterAssembler* assembler) {
192 // Get the global object. 205 // Get the global object.
193 Node* context = __ GetContext(); 206 Node* context = __ GetContext();
194 Node* global = __ LoadContextSlot(context, Context::GLOBAL_OBJECT_INDEX); 207 Node* global = __ LoadContextSlot(context, Context::GLOBAL_OBJECT_INDEX);
195 208
196 // Load the global via the LoadIC. 209 // Load the global via the LoadIC.
197 Node* code_target = __ HeapConstant(ic.code()); 210 Node* code_target = __ HeapConstant(ic.code());
198 Node* constant_index = __ BytecodeOperandIdx8(0); 211 Node* constant_index = __ BytecodeOperandIdx(0);
199 Node* name = __ LoadConstantPoolEntry(constant_index); 212 Node* name = __ LoadConstantPoolEntry(constant_index);
200 Node* raw_slot = __ BytecodeOperandIdx8(1); 213 Node* raw_slot = __ BytecodeOperandIdx(1);
201 Node* smi_slot = __ SmiTag(raw_slot); 214 Node* smi_slot = __ SmiTag(raw_slot);
202 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); 215 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
203 Node* result = __ CallIC(ic.descriptor(), code_target, global, name, smi_slot, 216 Node* result = __ CallIC(ic.descriptor(), code_target, global, name, smi_slot,
204 type_feedback_vector); 217 type_feedback_vector);
205 __ SetAccumulator(result); 218 __ SetAccumulator(result);
206 219
207 __ Dispatch(); 220 __ Dispatch();
208 } 221 }
209 222
210 223
(...skipping 12 matching lines...) Expand all
223 // 236 //
224 // Load the global with name in constant pool entry <name_index> into the 237 // Load the global with name in constant pool entry <name_index> into the
225 // accumulator using FeedBackVector slot <slot> in strict mode. 238 // accumulator using FeedBackVector slot <slot> in strict mode.
226 void Interpreter::DoLdaGlobalStrict(compiler::InterpreterAssembler* assembler) { 239 void Interpreter::DoLdaGlobalStrict(compiler::InterpreterAssembler* assembler) {
227 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF, 240 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF,
228 STRICT, UNINITIALIZED); 241 STRICT, UNINITIALIZED);
229 DoLoadGlobal(ic, assembler); 242 DoLoadGlobal(ic, assembler);
230 } 243 }
231 244
232 245
246 // LdaGlobalSloppyWide <name_index> <slot>
247 //
248 // Load the global with name in constant pool entry <name_index> into the
249 // accumulator using FeedBackVector slot <slot> in sloppy mode.
250 void Interpreter::DoLdaGlobalSloppyWide(
251 compiler::InterpreterAssembler* assembler) {
252 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF,
253 SLOPPY, UNINITIALIZED);
254 DoLoadGlobal(ic, assembler);
255 }
256
257
258 // LdaGlobalSloppyWide <name_index> <slot>
259 //
260 // Load the global with name in constant pool entry <name_index> into the
261 // accumulator using FeedBackVector slot <slot> in strict mode.
262 void Interpreter::DoLdaGlobalStrictWide(
263 compiler::InterpreterAssembler* assembler) {
264 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF,
265 STRICT, UNINITIALIZED);
266 DoLoadGlobal(ic, assembler);
267 }
268
269
233 void Interpreter::DoStoreGlobal(Callable ic, 270 void Interpreter::DoStoreGlobal(Callable ic,
234 compiler::InterpreterAssembler* assembler) { 271 compiler::InterpreterAssembler* assembler) {
235 // Get the global object. 272 // Get the global object.
236 Node* context = __ GetContext(); 273 Node* context = __ GetContext();
237 Node* global = __ LoadContextSlot(context, Context::GLOBAL_OBJECT_INDEX); 274 Node* global = __ LoadContextSlot(context, Context::GLOBAL_OBJECT_INDEX);
238 275
239 // Store the global via the StoreIC. 276 // Store the global via the StoreIC.
240 Node* code_target = __ HeapConstant(ic.code()); 277 Node* code_target = __ HeapConstant(ic.code());
241 Node* constant_index = __ BytecodeOperandIdx8(0); 278 Node* constant_index = __ BytecodeOperandIdx(0);
242 Node* name = __ LoadConstantPoolEntry(constant_index); 279 Node* name = __ LoadConstantPoolEntry(constant_index);
243 Node* value = __ GetAccumulator(); 280 Node* value = __ GetAccumulator();
244 Node* raw_slot = __ BytecodeOperandIdx8(1); 281 Node* raw_slot = __ BytecodeOperandIdx(1);
245 Node* smi_slot = __ SmiTag(raw_slot); 282 Node* smi_slot = __ SmiTag(raw_slot);
246 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); 283 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
247 __ CallIC(ic.descriptor(), code_target, global, name, value, smi_slot, 284 __ CallIC(ic.descriptor(), code_target, global, name, value, smi_slot,
248 type_feedback_vector); 285 type_feedback_vector);
249 286
250 __ Dispatch(); 287 __ Dispatch();
251 } 288 }
252 289
253 290
254 // StaGlobalSloppy <name_index> <slot> 291 // StaGlobalSloppy <name_index> <slot>
(...skipping 11 matching lines...) Expand all
266 // 303 //
267 // Store the value in the accumulator into the global with name in constant pool 304 // Store the value in the accumulator into the global with name in constant pool
268 // entry <name_index> using FeedBackVector slot <slot> in strict mode. 305 // entry <name_index> using FeedBackVector slot <slot> in strict mode.
269 void Interpreter::DoStaGlobalStrict(compiler::InterpreterAssembler* assembler) { 306 void Interpreter::DoStaGlobalStrict(compiler::InterpreterAssembler* assembler) {
270 Callable ic = 307 Callable ic =
271 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); 308 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
272 DoStoreGlobal(ic, assembler); 309 DoStoreGlobal(ic, assembler);
273 } 310 }
274 311
275 312
313 // StaGlobalSloppyWide <name_index> <slot>
314 //
315 // Store the value in the accumulator into the global with name in constant pool
316 // entry <name_index> using FeedBackVector slot <slot> in sloppy mode.
317 void Interpreter::DoStaGlobalSloppyWide(
318 compiler::InterpreterAssembler* assembler) {
319 Callable ic =
320 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED);
321 DoStoreGlobal(ic, assembler);
322 }
323
324
325 // StaGlobalStrictWide <name_index> <slot>
326 //
327 // Store the value in the accumulator into the global with name in constant pool
328 // entry <name_index> using FeedBackVector slot <slot> in strict mode.
329 void Interpreter::DoStaGlobalStrictWide(
330 compiler::InterpreterAssembler* assembler) {
331 Callable ic =
332 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
333 DoStoreGlobal(ic, assembler);
334 }
335
336
276 // LdaContextSlot <context> <slot_index> 337 // LdaContextSlot <context> <slot_index>
277 // 338 //
278 // Load the object in |slot_index| of |context| into the accumulator. 339 // Load the object in |slot_index| of |context| into the accumulator.
279 void Interpreter::DoLdaContextSlot(compiler::InterpreterAssembler* assembler) { 340 void Interpreter::DoLdaContextSlot(compiler::InterpreterAssembler* assembler) {
280 Node* reg_index = __ BytecodeOperandReg8(0); 341 Node* reg_index = __ BytecodeOperandReg(0);
281 Node* context = __ LoadRegister(reg_index); 342 Node* context = __ LoadRegister(reg_index);
282 Node* slot_index = __ BytecodeOperandIdx8(1); 343 Node* slot_index = __ BytecodeOperandIdx(1);
283 Node* result = __ LoadContextSlot(context, slot_index); 344 Node* result = __ LoadContextSlot(context, slot_index);
284 __ SetAccumulator(result); 345 __ SetAccumulator(result);
285 __ Dispatch(); 346 __ Dispatch();
286 } 347 }
287 348
288 349
289 // StaContextSlot <context> <slot_index> 350 // StaContextSlot <context> <slot_index>
290 // 351 //
291 // Stores the object in the accumulator into |slot_index| of |context|. 352 // Stores the object in the accumulator into |slot_index| of |context|.
292 void Interpreter::DoStaContextSlot(compiler::InterpreterAssembler* assembler) { 353 void Interpreter::DoStaContextSlot(compiler::InterpreterAssembler* assembler) {
293 Node* value = __ GetAccumulator(); 354 Node* value = __ GetAccumulator();
294 Node* reg_index = __ BytecodeOperandReg8(0); 355 Node* reg_index = __ BytecodeOperandReg(0);
295 Node* context = __ LoadRegister(reg_index); 356 Node* context = __ LoadRegister(reg_index);
296 Node* slot_index = __ BytecodeOperandIdx8(1); 357 Node* slot_index = __ BytecodeOperandIdx(1);
297 __ StoreContextSlot(context, slot_index, value); 358 __ StoreContextSlot(context, slot_index, value);
298 __ Dispatch(); 359 __ Dispatch();
299 } 360 }
300 361
301 362
302 void Interpreter::DoLoadIC(Callable ic, 363 void Interpreter::DoLoadIC(Callable ic,
303 compiler::InterpreterAssembler* assembler) { 364 compiler::InterpreterAssembler* assembler) {
304 Node* code_target = __ HeapConstant(ic.code()); 365 Node* code_target = __ HeapConstant(ic.code());
305 Node* register_index = __ BytecodeOperandReg8(0); 366 Node* register_index = __ BytecodeOperandReg(0);
306 Node* object = __ LoadRegister(register_index); 367 Node* object = __ LoadRegister(register_index);
307 Node* constant_index = __ BytecodeOperandIdx8(1); 368 Node* constant_index = __ BytecodeOperandIdx(1);
308 Node* name = __ LoadConstantPoolEntry(constant_index); 369 Node* name = __ LoadConstantPoolEntry(constant_index);
309 Node* raw_slot = __ BytecodeOperandIdx8(2); 370 Node* raw_slot = __ BytecodeOperandIdx(2);
310 Node* smi_slot = __ SmiTag(raw_slot); 371 Node* smi_slot = __ SmiTag(raw_slot);
311 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); 372 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
312 Node* result = __ CallIC(ic.descriptor(), code_target, object, name, smi_slot, 373 Node* result = __ CallIC(ic.descriptor(), code_target, object, name, smi_slot,
313 type_feedback_vector); 374 type_feedback_vector);
314 __ SetAccumulator(result); 375 __ SetAccumulator(result);
315 __ Dispatch(); 376 __ Dispatch();
316 } 377 }
317 378
318 379
319 // LoadICSloppy <object> <name_index> <slot> 380 // LoadICSloppy <object> <name_index> <slot>
(...skipping 11 matching lines...) Expand all
331 // 392 //
332 // Calls the sloppy mode LoadIC at FeedBackVector slot <slot> for <object> and 393 // Calls the sloppy mode LoadIC at FeedBackVector slot <slot> for <object> and
333 // the name at constant pool entry <name_index>. 394 // the name at constant pool entry <name_index>.
334 void Interpreter::DoLoadICStrict(compiler::InterpreterAssembler* assembler) { 395 void Interpreter::DoLoadICStrict(compiler::InterpreterAssembler* assembler) {
335 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF, 396 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF,
336 STRICT, UNINITIALIZED); 397 STRICT, UNINITIALIZED);
337 DoLoadIC(ic, assembler); 398 DoLoadIC(ic, assembler);
338 } 399 }
339 400
340 401
402 // LoadICSloppyWide <object> <name_index> <slot>
403 //
404 // Calls the sloppy mode LoadIC at FeedBackVector slot <slot> for <object> and
405 // the name at constant pool entry <name_index>.
406 void Interpreter::DoLoadICSloppyWide(
407 compiler::InterpreterAssembler* assembler) {
408 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF,
409 SLOPPY, UNINITIALIZED);
410 DoLoadIC(ic, assembler);
411 }
412
413
414 // LoadICStrictWide <object> <name_index> <slot>
415 //
416 // Calls the sloppy mode LoadIC at FeedBackVector slot <slot> for <object> and
417 // the name at constant pool entry <name_index>.
418 void Interpreter::DoLoadICStrictWide(
419 compiler::InterpreterAssembler* assembler) {
420 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF,
421 STRICT, UNINITIALIZED);
422 DoLoadIC(ic, assembler);
423 }
424
425
341 void Interpreter::DoKeyedLoadIC(Callable ic, 426 void Interpreter::DoKeyedLoadIC(Callable ic,
342 compiler::InterpreterAssembler* assembler) { 427 compiler::InterpreterAssembler* assembler) {
343 Node* code_target = __ HeapConstant(ic.code()); 428 Node* code_target = __ HeapConstant(ic.code());
344 Node* reg_index = __ BytecodeOperandReg8(0); 429 Node* reg_index = __ BytecodeOperandReg(0);
345 Node* object = __ LoadRegister(reg_index); 430 Node* object = __ LoadRegister(reg_index);
346 Node* name = __ GetAccumulator(); 431 Node* name = __ GetAccumulator();
347 Node* raw_slot = __ BytecodeOperandIdx8(1); 432 Node* raw_slot = __ BytecodeOperandIdx(1);
348 Node* smi_slot = __ SmiTag(raw_slot); 433 Node* smi_slot = __ SmiTag(raw_slot);
349 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); 434 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
350 Node* result = __ CallIC(ic.descriptor(), code_target, object, name, smi_slot, 435 Node* result = __ CallIC(ic.descriptor(), code_target, object, name, smi_slot,
351 type_feedback_vector); 436 type_feedback_vector);
352 __ SetAccumulator(result); 437 __ SetAccumulator(result);
353 __ Dispatch(); 438 __ Dispatch();
354 } 439 }
355 440
356 441
357 // KeyedLoadICSloppy <object> <slot> 442 // KeyedLoadICSloppy <object> <slot>
(...skipping 13 matching lines...) Expand all
371 // Calls the strict mode KeyedLoadIC at FeedBackVector slot <slot> for <object> 456 // Calls the strict mode KeyedLoadIC at FeedBackVector slot <slot> for <object>
372 // and the key in the accumulator. 457 // and the key in the accumulator.
373 void Interpreter::DoKeyedLoadICStrict( 458 void Interpreter::DoKeyedLoadICStrict(
374 compiler::InterpreterAssembler* assembler) { 459 compiler::InterpreterAssembler* assembler) {
375 Callable ic = 460 Callable ic =
376 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); 461 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
377 DoKeyedLoadIC(ic, assembler); 462 DoKeyedLoadIC(ic, assembler);
378 } 463 }
379 464
380 465
466 // KeyedLoadICSloppyWide <object> <slot>
467 //
468 // Calls the sloppy mode KeyedLoadIC at FeedBackVector slot <slot> for <object>
469 // and the key in the accumulator.
470 void Interpreter::DoKeyedLoadICSloppyWide(
471 compiler::InterpreterAssembler* assembler) {
472 Callable ic =
473 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED);
474 DoKeyedLoadIC(ic, assembler);
475 }
476
477
478 // KeyedLoadICStrictWide <object> <slot>
479 //
480 // Calls the strict mode KeyedLoadIC at FeedBackVector slot <slot> for <object>
481 // and the key in the accumulator.
482 void Interpreter::DoKeyedLoadICStrictWide(
483 compiler::InterpreterAssembler* assembler) {
484 Callable ic =
485 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
486 DoKeyedLoadIC(ic, assembler);
487 }
488
489
381 void Interpreter::DoStoreIC(Callable ic, 490 void Interpreter::DoStoreIC(Callable ic,
382 compiler::InterpreterAssembler* assembler) { 491 compiler::InterpreterAssembler* assembler) {
383 Node* code_target = __ HeapConstant(ic.code()); 492 Node* code_target = __ HeapConstant(ic.code());
384 Node* object_reg_index = __ BytecodeOperandReg8(0); 493 Node* object_reg_index = __ BytecodeOperandReg(0);
385 Node* object = __ LoadRegister(object_reg_index); 494 Node* object = __ LoadRegister(object_reg_index);
386 Node* constant_index = __ BytecodeOperandIdx8(1); 495 Node* constant_index = __ BytecodeOperandIdx(1);
387 Node* name = __ LoadConstantPoolEntry(constant_index); 496 Node* name = __ LoadConstantPoolEntry(constant_index);
388 Node* value = __ GetAccumulator(); 497 Node* value = __ GetAccumulator();
389 Node* raw_slot = __ BytecodeOperandIdx8(2); 498 Node* raw_slot = __ BytecodeOperandIdx(2);
390 Node* smi_slot = __ SmiTag(raw_slot); 499 Node* smi_slot = __ SmiTag(raw_slot);
391 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); 500 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
392 __ CallIC(ic.descriptor(), code_target, object, name, value, smi_slot, 501 __ CallIC(ic.descriptor(), code_target, object, name, value, smi_slot,
393 type_feedback_vector); 502 type_feedback_vector);
394 __ Dispatch(); 503 __ Dispatch();
395 } 504 }
396 505
397 506
398 // StoreICSloppy <object> <name_index> <slot> 507 // StoreICSloppy <object> <name_index> <slot>
399 // 508 //
(...skipping 12 matching lines...) Expand all
412 // Calls the strict mode StoreIC at FeedBackVector slot <slot> for <object> and 521 // Calls the strict mode StoreIC at FeedBackVector slot <slot> for <object> and
413 // the name in constant pool entry <name_index> with the value in the 522 // the name in constant pool entry <name_index> with the value in the
414 // accumulator. 523 // accumulator.
415 void Interpreter::DoStoreICStrict(compiler::InterpreterAssembler* assembler) { 524 void Interpreter::DoStoreICStrict(compiler::InterpreterAssembler* assembler) {
416 Callable ic = 525 Callable ic =
417 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); 526 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
418 DoStoreIC(ic, assembler); 527 DoStoreIC(ic, assembler);
419 } 528 }
420 529
421 530
531 // StoreICSloppyWide <object> <name_index> <slot>
532 //
533 // Calls the sloppy mode StoreIC at FeedBackVector slot <slot> for <object> and
534 // the name in constant pool entry <name_index> with the value in the
535 // accumulator.
536 void Interpreter::DoStoreICSloppyWide(
537 compiler::InterpreterAssembler* assembler) {
538 Callable ic =
539 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED);
540 DoStoreIC(ic, assembler);
541 }
542
543
544 // StoreICStrictWide <object> <name_index> <slot>
545 //
546 // Calls the strict mode StoreIC at FeedBackVector slot <slot> for <object> and
547 // the name in constant pool entry <name_index> with the value in the
548 // accumulator.
549 void Interpreter::DoStoreICStrictWide(
550 compiler::InterpreterAssembler* assembler) {
551 Callable ic =
552 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
553 DoStoreIC(ic, assembler);
554 }
555
556
422 void Interpreter::DoKeyedStoreIC(Callable ic, 557 void Interpreter::DoKeyedStoreIC(Callable ic,
423 compiler::InterpreterAssembler* assembler) { 558 compiler::InterpreterAssembler* assembler) {
424 Node* code_target = __ HeapConstant(ic.code()); 559 Node* code_target = __ HeapConstant(ic.code());
425 Node* object_reg_index = __ BytecodeOperandReg8(0); 560 Node* object_reg_index = __ BytecodeOperandReg(0);
426 Node* object = __ LoadRegister(object_reg_index); 561 Node* object = __ LoadRegister(object_reg_index);
427 Node* name_reg_index = __ BytecodeOperandReg8(1); 562 Node* name_reg_index = __ BytecodeOperandReg(1);
428 Node* name = __ LoadRegister(name_reg_index); 563 Node* name = __ LoadRegister(name_reg_index);
429 Node* value = __ GetAccumulator(); 564 Node* value = __ GetAccumulator();
430 Node* raw_slot = __ BytecodeOperandIdx8(2); 565 Node* raw_slot = __ BytecodeOperandIdx(2);
431 Node* smi_slot = __ SmiTag(raw_slot); 566 Node* smi_slot = __ SmiTag(raw_slot);
432 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); 567 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
433 __ CallIC(ic.descriptor(), code_target, object, name, value, smi_slot, 568 __ CallIC(ic.descriptor(), code_target, object, name, value, smi_slot,
434 type_feedback_vector); 569 type_feedback_vector);
435 __ Dispatch(); 570 __ Dispatch();
436 } 571 }
437 572
438 573
439 // KeyedStoreICSloppy <object> <key> <slot> 574 // KeyedStoreICSloppy <object> <key> <slot>
440 // 575 //
(...skipping 12 matching lines...) Expand all
453 // Calls the strict mode KeyStoreIC at FeedBackVector slot <slot> for <object> 588 // Calls the strict mode KeyStoreIC at FeedBackVector slot <slot> for <object>
454 // and the key <key> with the value in the accumulator. 589 // and the key <key> with the value in the accumulator.
455 void Interpreter::DoKeyedStoreICStrict( 590 void Interpreter::DoKeyedStoreICStrict(
456 compiler::InterpreterAssembler* assembler) { 591 compiler::InterpreterAssembler* assembler) {
457 Callable ic = 592 Callable ic =
458 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); 593 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
459 DoKeyedStoreIC(ic, assembler); 594 DoKeyedStoreIC(ic, assembler);
460 } 595 }
461 596
462 597
598 // KeyedStoreICSloppyWide <object> <key> <slot>
599 //
600 // Calls the sloppy mode KeyStoreIC at FeedBackVector slot <slot> for <object>
601 // and the key <key> with the value in the accumulator.
602 void Interpreter::DoKeyedStoreICSloppyWide(
603 compiler::InterpreterAssembler* assembler) {
604 Callable ic =
605 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED);
606 DoKeyedStoreIC(ic, assembler);
607 }
608
609
610 // KeyedStoreICStoreWide <object> <key> <slot>
611 //
612 // Calls the strict mode KeyStoreIC at FeedBackVector slot <slot> for <object>
613 // and the key <key> with the value in the accumulator.
614 void Interpreter::DoKeyedStoreICStrictWide(
615 compiler::InterpreterAssembler* assembler) {
616 Callable ic =
617 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
618 DoKeyedStoreIC(ic, assembler);
619 }
620
621
463 // PushContext <context> 622 // PushContext <context>
464 // 623 //
465 // Pushes the accumulator as the current context, and saves it in <context> 624 // Pushes the accumulator as the current context, and saves it in <context>
466 void Interpreter::DoPushContext(compiler::InterpreterAssembler* assembler) { 625 void Interpreter::DoPushContext(compiler::InterpreterAssembler* assembler) {
467 Node* reg_index = __ BytecodeOperandReg8(0); 626 Node* reg_index = __ BytecodeOperandReg(0);
468 Node* context = __ GetAccumulator(); 627 Node* context = __ GetAccumulator();
469 __ SetContext(context); 628 __ SetContext(context);
470 __ StoreRegister(context, reg_index); 629 __ StoreRegister(context, reg_index);
471 __ Dispatch(); 630 __ Dispatch();
472 } 631 }
473 632
474 633
475 // PopContext <context> 634 // PopContext <context>
476 // 635 //
477 // Pops the current context and sets <context> as the new context. 636 // Pops the current context and sets <context> as the new context.
478 void Interpreter::DoPopContext(compiler::InterpreterAssembler* assembler) { 637 void Interpreter::DoPopContext(compiler::InterpreterAssembler* assembler) {
479 Node* reg_index = __ BytecodeOperandReg8(0); 638 Node* reg_index = __ BytecodeOperandReg(0);
480 Node* context = __ LoadRegister(reg_index); 639 Node* context = __ LoadRegister(reg_index);
481 __ SetContext(context); 640 __ SetContext(context);
482 __ Dispatch(); 641 __ Dispatch();
483 } 642 }
484 643
485 644
486 void Interpreter::DoBinaryOp(Runtime::FunctionId function_id, 645 void Interpreter::DoBinaryOp(Runtime::FunctionId function_id,
487 compiler::InterpreterAssembler* assembler) { 646 compiler::InterpreterAssembler* assembler) {
488 // TODO(rmcilroy): Call ICs which back-patch bytecode with type specialized 647 // TODO(rmcilroy): Call ICs which back-patch bytecode with type specialized
489 // operations, instead of calling builtins directly. 648 // operations, instead of calling builtins directly.
490 Node* reg_index = __ BytecodeOperandReg8(0); 649 Node* reg_index = __ BytecodeOperandReg(0);
491 Node* lhs = __ LoadRegister(reg_index); 650 Node* lhs = __ LoadRegister(reg_index);
492 Node* rhs = __ GetAccumulator(); 651 Node* rhs = __ GetAccumulator();
493 Node* result = __ CallRuntime(function_id, lhs, rhs); 652 Node* result = __ CallRuntime(function_id, lhs, rhs);
494 __ SetAccumulator(result); 653 __ SetAccumulator(result);
495 __ Dispatch(); 654 __ Dispatch();
496 } 655 }
497 656
498 657
499 // Add <src> 658 // Add <src>
500 // 659 //
(...skipping 138 matching lines...) Expand 10 before | Expand all | Expand 10 after
639 void Interpreter::DoTypeOf(compiler::InterpreterAssembler* assembler) { 798 void Interpreter::DoTypeOf(compiler::InterpreterAssembler* assembler) {
640 Node* accumulator = __ GetAccumulator(); 799 Node* accumulator = __ GetAccumulator();
641 Node* result = __ CallRuntime(Runtime::kInterpreterTypeOf, accumulator); 800 Node* result = __ CallRuntime(Runtime::kInterpreterTypeOf, accumulator);
642 __ SetAccumulator(result); 801 __ SetAccumulator(result);
643 __ Dispatch(); 802 __ Dispatch();
644 } 803 }
645 804
646 805
647 void Interpreter::DoDelete(Runtime::FunctionId function_id, 806 void Interpreter::DoDelete(Runtime::FunctionId function_id,
648 compiler::InterpreterAssembler* assembler) { 807 compiler::InterpreterAssembler* assembler) {
649 Node* reg_index = __ BytecodeOperandReg8(0); 808 Node* reg_index = __ BytecodeOperandReg(0);
650 Node* object = __ LoadRegister(reg_index); 809 Node* object = __ LoadRegister(reg_index);
651 Node* key = __ GetAccumulator(); 810 Node* key = __ GetAccumulator();
652 Node* result = __ CallRuntime(function_id, object, key); 811 Node* result = __ CallRuntime(function_id, object, key);
653 __ SetAccumulator(result); 812 __ SetAccumulator(result);
654 __ Dispatch(); 813 __ Dispatch();
655 } 814 }
656 815
657 816
658 // DeletePropertyStrict 817 // DeletePropertyStrict
659 // 818 //
(...skipping 13 matching lines...) Expand all
673 compiler::InterpreterAssembler* assembler) { 832 compiler::InterpreterAssembler* assembler) {
674 DoDelete(Runtime::kDeleteProperty_Sloppy, assembler); 833 DoDelete(Runtime::kDeleteProperty_Sloppy, assembler);
675 } 834 }
676 835
677 836
678 // Call <callable> <receiver> <arg_count> 837 // Call <callable> <receiver> <arg_count>
679 // 838 //
680 // Call a JSfunction or Callable in |callable| with the |receiver| and 839 // Call a JSfunction or Callable in |callable| with the |receiver| and
681 // |arg_count| arguments in subsequent registers. 840 // |arg_count| arguments in subsequent registers.
682 void Interpreter::DoCall(compiler::InterpreterAssembler* assembler) { 841 void Interpreter::DoCall(compiler::InterpreterAssembler* assembler) {
683 Node* function_reg = __ BytecodeOperandReg8(0); 842 Node* function_reg = __ BytecodeOperandReg(0);
684 Node* function = __ LoadRegister(function_reg); 843 Node* function = __ LoadRegister(function_reg);
685 Node* receiver_reg = __ BytecodeOperandReg8(1); 844 Node* receiver_reg = __ BytecodeOperandReg(1);
686 Node* first_arg = __ RegisterLocation(receiver_reg); 845 Node* first_arg = __ RegisterLocation(receiver_reg);
687 Node* args_count = __ BytecodeOperandCount8(2); 846 Node* args_count = __ BytecodeOperandCount(2);
688 Node* result = __ CallJS(function, first_arg, args_count); 847 Node* result = __ CallJS(function, first_arg, args_count);
689 __ SetAccumulator(result); 848 __ SetAccumulator(result);
690 __ Dispatch(); 849 __ Dispatch();
691 } 850 }
692 851
693 852
694 // CallRuntime <function_id> <first_arg> <arg_count> 853 // CallRuntime <function_id> <first_arg> <arg_count>
695 // 854 //
696 // Call the runtime function |function_id| with the first argument in 855 // Call the runtime function |function_id| with the first argument in
697 // register |first_arg| and |arg_count| arguments in subsequent 856 // register |first_arg| and |arg_count| arguments in subsequent
698 // registers. 857 // registers.
699 void Interpreter::DoCallRuntime(compiler::InterpreterAssembler* assembler) { 858 void Interpreter::DoCallRuntime(compiler::InterpreterAssembler* assembler) {
700 Node* function_id = __ BytecodeOperandIdx16(0); 859 Node* function_id = __ BytecodeOperandIdx(0);
701 Node* first_arg_reg = __ BytecodeOperandReg8(1); 860 Node* first_arg_reg = __ BytecodeOperandReg(1);
702 Node* first_arg = __ RegisterLocation(first_arg_reg); 861 Node* first_arg = __ RegisterLocation(first_arg_reg);
703 Node* args_count = __ BytecodeOperandCount8(2); 862 Node* args_count = __ BytecodeOperandCount(2);
704 Node* result = __ CallRuntime(function_id, first_arg, args_count); 863 Node* result = __ CallRuntime(function_id, first_arg, args_count);
705 __ SetAccumulator(result); 864 __ SetAccumulator(result);
706 __ Dispatch(); 865 __ Dispatch();
707 } 866 }
708 867
709 868
710 // New <constructor> <arg_count> 869 // New <constructor> <arg_count>
711 // 870 //
712 // Call operator new with |constructor| and the first argument in 871 // Call operator new with |constructor| and the first argument in
713 // register |first_arg| and |arg_count| arguments in subsequent 872 // register |first_arg| and |arg_count| arguments in subsequent
714 // 873 //
715 void Interpreter::DoNew(compiler::InterpreterAssembler* assembler) { 874 void Interpreter::DoNew(compiler::InterpreterAssembler* assembler) {
716 Callable ic = CodeFactory::InterpreterPushArgsAndConstruct(isolate_); 875 Callable ic = CodeFactory::InterpreterPushArgsAndConstruct(isolate_);
717 Node* constructor_index = __ BytecodeOperandReg8(0); 876 Node* constructor_reg = __ BytecodeOperandReg(0);
718 Node* constructor = __ LoadRegister(constructor_index); 877 Node* constructor = __ LoadRegister(constructor_reg);
719 Node* first_arg_reg = __ BytecodeOperandReg8(1); 878 Node* first_arg_reg = __ BytecodeOperandReg(1);
720 Node* first_arg = __ RegisterLocation(first_arg_reg); 879 Node* first_arg = __ RegisterLocation(first_arg_reg);
721 Node* args_count = __ BytecodeOperandCount8(2); 880 Node* args_count = __ BytecodeOperandCount(2);
722 Node* result = 881 Node* result =
723 __ CallConstruct(constructor, constructor, first_arg, args_count); 882 __ CallConstruct(constructor, constructor, first_arg, args_count);
724 __ SetAccumulator(result); 883 __ SetAccumulator(result);
725 __ Dispatch(); 884 __ Dispatch();
726 } 885 }
727 886
728 887
729 // TestEqual <src> 888 // TestEqual <src>
730 // 889 //
731 // Test if the value in the <src> register equals the accumulator. 890 // Test if the value in the <src> register equals the accumulator.
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after
855 Node* result = __ CallRuntime(Runtime::kToObject, accumulator); 1014 Node* result = __ CallRuntime(Runtime::kToObject, accumulator);
856 __ SetAccumulator(result); 1015 __ SetAccumulator(result);
857 __ Dispatch(); 1016 __ Dispatch();
858 } 1017 }
859 1018
860 1019
861 // Jump <imm8> 1020 // Jump <imm8>
862 // 1021 //
863 // Jump by number of bytes represented by the immediate operand |imm8|. 1022 // Jump by number of bytes represented by the immediate operand |imm8|.
864 void Interpreter::DoJump(compiler::InterpreterAssembler* assembler) { 1023 void Interpreter::DoJump(compiler::InterpreterAssembler* assembler) {
865 Node* relative_jump = __ BytecodeOperandImm8(0); 1024 Node* relative_jump = __ BytecodeOperandImm(0);
866 __ Jump(relative_jump); 1025 __ Jump(relative_jump);
867 } 1026 }
868 1027
869 1028
870 // JumpConstant <idx> 1029 // JumpConstant <idx>
871 // 1030 //
872 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool. 1031 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool.
873 void Interpreter::DoJumpConstant(compiler::InterpreterAssembler* assembler) { 1032 void Interpreter::DoJumpConstant(compiler::InterpreterAssembler* assembler) {
874 Node* index = __ BytecodeOperandIdx8(0); 1033 Node* index = __ BytecodeOperandIdx(0);
875 Node* constant = __ LoadConstantPoolEntry(index); 1034 Node* constant = __ LoadConstantPoolEntry(index);
876 Node* relative_jump = __ SmiUntag(constant); 1035 Node* relative_jump = __ SmiUntag(constant);
877 __ Jump(relative_jump); 1036 __ Jump(relative_jump);
878 } 1037 }
879 1038
880 1039
881 // JumpIfTrue <imm8> 1040 // JumpIfTrue <imm8>
882 // 1041 //
883 // Jump by number of bytes represented by an immediate operand if the 1042 // Jump by number of bytes represented by an immediate operand if the
884 // accumulator contains true. 1043 // accumulator contains true.
885 void Interpreter::DoJumpIfTrue(compiler::InterpreterAssembler* assembler) { 1044 void Interpreter::DoJumpIfTrue(compiler::InterpreterAssembler* assembler) {
886 Node* accumulator = __ GetAccumulator(); 1045 Node* accumulator = __ GetAccumulator();
887 Node* relative_jump = __ BytecodeOperandImm8(0); 1046 Node* relative_jump = __ BytecodeOperandImm(0);
888 Node* true_value = __ BooleanConstant(true); 1047 Node* true_value = __ BooleanConstant(true);
889 __ JumpIfWordEqual(accumulator, true_value, relative_jump); 1048 __ JumpIfWordEqual(accumulator, true_value, relative_jump);
890 } 1049 }
891 1050
892 1051
893 // JumpIfTrueConstant <idx> 1052 // JumpIfTrueConstant <idx>
894 // 1053 //
895 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool 1054 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool
896 // if the accumulator contains true. 1055 // if the accumulator contains true.
897 void Interpreter::DoJumpIfTrueConstant( 1056 void Interpreter::DoJumpIfTrueConstant(
898 compiler::InterpreterAssembler* assembler) { 1057 compiler::InterpreterAssembler* assembler) {
899 Node* accumulator = __ GetAccumulator(); 1058 Node* accumulator = __ GetAccumulator();
900 Node* index = __ BytecodeOperandIdx8(0); 1059 Node* index = __ BytecodeOperandIdx(0);
901 Node* constant = __ LoadConstantPoolEntry(index); 1060 Node* constant = __ LoadConstantPoolEntry(index);
902 Node* relative_jump = __ SmiUntag(constant); 1061 Node* relative_jump = __ SmiUntag(constant);
903 Node* true_value = __ BooleanConstant(true); 1062 Node* true_value = __ BooleanConstant(true);
904 __ JumpIfWordEqual(accumulator, true_value, relative_jump); 1063 __ JumpIfWordEqual(accumulator, true_value, relative_jump);
905 } 1064 }
906 1065
907 1066
908 // JumpIfFalse <imm8> 1067 // JumpIfFalse <imm8>
909 // 1068 //
910 // Jump by number of bytes represented by an immediate operand if the 1069 // Jump by number of bytes represented by an immediate operand if the
911 // accumulator contains false. 1070 // accumulator contains false.
912 void Interpreter::DoJumpIfFalse(compiler::InterpreterAssembler* assembler) { 1071 void Interpreter::DoJumpIfFalse(compiler::InterpreterAssembler* assembler) {
913 Node* accumulator = __ GetAccumulator(); 1072 Node* accumulator = __ GetAccumulator();
914 Node* relative_jump = __ BytecodeOperandImm8(0); 1073 Node* relative_jump = __ BytecodeOperandImm(0);
915 Node* false_value = __ BooleanConstant(false); 1074 Node* false_value = __ BooleanConstant(false);
916 __ JumpIfWordEqual(accumulator, false_value, relative_jump); 1075 __ JumpIfWordEqual(accumulator, false_value, relative_jump);
917 } 1076 }
918 1077
919 1078
920 // JumpIfFalseConstant <idx> 1079 // JumpIfFalseConstant <idx>
921 // 1080 //
922 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool 1081 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool
923 // if the accumulator contains false. 1082 // if the accumulator contains false.
924 void Interpreter::DoJumpIfFalseConstant( 1083 void Interpreter::DoJumpIfFalseConstant(
925 compiler::InterpreterAssembler* assembler) { 1084 compiler::InterpreterAssembler* assembler) {
926 Node* accumulator = __ GetAccumulator(); 1085 Node* accumulator = __ GetAccumulator();
927 Node* index = __ BytecodeOperandIdx8(0); 1086 Node* index = __ BytecodeOperandIdx(0);
928 Node* constant = __ LoadConstantPoolEntry(index); 1087 Node* constant = __ LoadConstantPoolEntry(index);
929 Node* relative_jump = __ SmiUntag(constant); 1088 Node* relative_jump = __ SmiUntag(constant);
930 Node* false_value = __ BooleanConstant(false); 1089 Node* false_value = __ BooleanConstant(false);
931 __ JumpIfWordEqual(accumulator, false_value, relative_jump); 1090 __ JumpIfWordEqual(accumulator, false_value, relative_jump);
932 } 1091 }
933 1092
934 1093
935 // JumpIfToBooleanTrue <imm8> 1094 // JumpIfToBooleanTrue <imm8>
936 // 1095 //
937 // Jump by number of bytes represented by an immediate operand if the object 1096 // Jump by number of bytes represented by an immediate operand if the object
938 // referenced by the accumulator is true when the object is cast to boolean. 1097 // referenced by the accumulator is true when the object is cast to boolean.
939 void Interpreter::DoJumpIfToBooleanTrue( 1098 void Interpreter::DoJumpIfToBooleanTrue(
940 compiler::InterpreterAssembler* assembler) { 1099 compiler::InterpreterAssembler* assembler) {
941 Node* accumulator = __ GetAccumulator(); 1100 Node* accumulator = __ GetAccumulator();
942 Node* relative_jump = __ BytecodeOperandImm8(0); 1101 Node* relative_jump = __ BytecodeOperandImm(0);
943 Node* to_boolean_value = 1102 Node* to_boolean_value =
944 __ CallRuntime(Runtime::kInterpreterToBoolean, accumulator); 1103 __ CallRuntime(Runtime::kInterpreterToBoolean, accumulator);
945 Node* true_value = __ BooleanConstant(true); 1104 Node* true_value = __ BooleanConstant(true);
946 __ JumpIfWordEqual(to_boolean_value, true_value, relative_jump); 1105 __ JumpIfWordEqual(to_boolean_value, true_value, relative_jump);
947 } 1106 }
948 1107
949 1108
950 // JumpIfToBooleanTrueConstant <idx> 1109 // JumpIfToBooleanTrueConstant <idx>
951 // 1110 //
952 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool 1111 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool
953 // if the object referenced by the accumulator is true when the object is cast 1112 // if the object referenced by the accumulator is true when the object is cast
954 // to boolean. 1113 // to boolean.
955 void Interpreter::DoJumpIfToBooleanTrueConstant( 1114 void Interpreter::DoJumpIfToBooleanTrueConstant(
956 compiler::InterpreterAssembler* assembler) { 1115 compiler::InterpreterAssembler* assembler) {
957 Node* accumulator = __ GetAccumulator(); 1116 Node* accumulator = __ GetAccumulator();
958 Node* to_boolean_value = 1117 Node* to_boolean_value =
959 __ CallRuntime(Runtime::kInterpreterToBoolean, accumulator); 1118 __ CallRuntime(Runtime::kInterpreterToBoolean, accumulator);
960 Node* index = __ BytecodeOperandIdx8(0); 1119 Node* index = __ BytecodeOperandIdx(0);
961 Node* constant = __ LoadConstantPoolEntry(index); 1120 Node* constant = __ LoadConstantPoolEntry(index);
962 Node* relative_jump = __ SmiUntag(constant); 1121 Node* relative_jump = __ SmiUntag(constant);
963 Node* true_value = __ BooleanConstant(true); 1122 Node* true_value = __ BooleanConstant(true);
964 __ JumpIfWordEqual(to_boolean_value, true_value, relative_jump); 1123 __ JumpIfWordEqual(to_boolean_value, true_value, relative_jump);
965 } 1124 }
966 1125
967 1126
968 // JumpIfToBooleanFalse <imm8> 1127 // JumpIfToBooleanFalse <imm8>
969 // 1128 //
970 // Jump by number of bytes represented by an immediate operand if the object 1129 // Jump by number of bytes represented by an immediate operand if the object
971 // referenced by the accumulator is false when the object is cast to boolean. 1130 // referenced by the accumulator is false when the object is cast to boolean.
972 void Interpreter::DoJumpIfToBooleanFalse( 1131 void Interpreter::DoJumpIfToBooleanFalse(
973 compiler::InterpreterAssembler* assembler) { 1132 compiler::InterpreterAssembler* assembler) {
974 Node* accumulator = __ GetAccumulator(); 1133 Node* accumulator = __ GetAccumulator();
975 Node* relative_jump = __ BytecodeOperandImm8(0); 1134 Node* relative_jump = __ BytecodeOperandImm(0);
976 Node* to_boolean_value = 1135 Node* to_boolean_value =
977 __ CallRuntime(Runtime::kInterpreterToBoolean, accumulator); 1136 __ CallRuntime(Runtime::kInterpreterToBoolean, accumulator);
978 Node* false_value = __ BooleanConstant(false); 1137 Node* false_value = __ BooleanConstant(false);
979 __ JumpIfWordEqual(to_boolean_value, false_value, relative_jump); 1138 __ JumpIfWordEqual(to_boolean_value, false_value, relative_jump);
980 } 1139 }
981 1140
982 1141
983 // JumpIfToBooleanFalseConstant <idx> 1142 // JumpIfToBooleanFalseConstant <idx>
984 // 1143 //
985 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool 1144 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool
986 // if the object referenced by the accumulator is false when the object is cast 1145 // if the object referenced by the accumulator is false when the object is cast
987 // to boolean. 1146 // to boolean.
988 void Interpreter::DoJumpIfToBooleanFalseConstant( 1147 void Interpreter::DoJumpIfToBooleanFalseConstant(
989 compiler::InterpreterAssembler* assembler) { 1148 compiler::InterpreterAssembler* assembler) {
990 Node* accumulator = __ GetAccumulator(); 1149 Node* accumulator = __ GetAccumulator();
991 Node* to_boolean_value = 1150 Node* to_boolean_value =
992 __ CallRuntime(Runtime::kInterpreterToBoolean, accumulator); 1151 __ CallRuntime(Runtime::kInterpreterToBoolean, accumulator);
993 Node* index = __ BytecodeOperandIdx8(0); 1152 Node* index = __ BytecodeOperandIdx(0);
994 Node* constant = __ LoadConstantPoolEntry(index); 1153 Node* constant = __ LoadConstantPoolEntry(index);
995 Node* relative_jump = __ SmiUntag(constant); 1154 Node* relative_jump = __ SmiUntag(constant);
996 Node* false_value = __ BooleanConstant(false); 1155 Node* false_value = __ BooleanConstant(false);
997 __ JumpIfWordEqual(to_boolean_value, false_value, relative_jump); 1156 __ JumpIfWordEqual(to_boolean_value, false_value, relative_jump);
998 } 1157 }
999 1158
1000 1159
1001 // JumpIfNull <imm8> 1160 // JumpIfNull <imm8>
1002 // 1161 //
1003 // Jump by number of bytes represented by an immediate operand if the object 1162 // Jump by number of bytes represented by an immediate operand if the object
1004 // referenced by the accumulator is the null constant. 1163 // referenced by the accumulator is the null constant.
1005 void Interpreter::DoJumpIfNull(compiler::InterpreterAssembler* assembler) { 1164 void Interpreter::DoJumpIfNull(compiler::InterpreterAssembler* assembler) {
1006 Node* accumulator = __ GetAccumulator(); 1165 Node* accumulator = __ GetAccumulator();
1007 Node* null_value = __ HeapConstant(isolate_->factory()->null_value()); 1166 Node* null_value = __ HeapConstant(isolate_->factory()->null_value());
1008 Node* relative_jump = __ BytecodeOperandImm8(0); 1167 Node* relative_jump = __ BytecodeOperandImm(0);
1009 __ JumpIfWordEqual(accumulator, null_value, relative_jump); 1168 __ JumpIfWordEqual(accumulator, null_value, relative_jump);
1010 } 1169 }
1011 1170
1012 1171
1013 // JumpIfNullConstant <idx> 1172 // JumpIfNullConstant <idx>
1014 // 1173 //
1015 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool 1174 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool
1016 // if the object referenced by the accumulator is the null constant. 1175 // if the object referenced by the accumulator is the null constant.
1017 void Interpreter::DoJumpIfNullConstant( 1176 void Interpreter::DoJumpIfNullConstant(
1018 compiler::InterpreterAssembler* assembler) { 1177 compiler::InterpreterAssembler* assembler) {
1019 Node* accumulator = __ GetAccumulator(); 1178 Node* accumulator = __ GetAccumulator();
1020 Node* null_value = __ HeapConstant(isolate_->factory()->null_value()); 1179 Node* null_value = __ HeapConstant(isolate_->factory()->null_value());
1021 Node* index = __ BytecodeOperandIdx8(0); 1180 Node* index = __ BytecodeOperandIdx(0);
1022 Node* constant = __ LoadConstantPoolEntry(index); 1181 Node* constant = __ LoadConstantPoolEntry(index);
1023 Node* relative_jump = __ SmiUntag(constant); 1182 Node* relative_jump = __ SmiUntag(constant);
1024 __ JumpIfWordEqual(accumulator, null_value, relative_jump); 1183 __ JumpIfWordEqual(accumulator, null_value, relative_jump);
1025 } 1184 }
1026 1185
1027 1186
1028 // JumpIfUndefined <imm8> 1187 // JumpIfUndefined <imm8>
1029 // 1188 //
1030 // Jump by number of bytes represented by an immediate operand if the object 1189 // Jump by number of bytes represented by an immediate operand if the object
1031 // referenced by the accumulator is the undefined constant. 1190 // referenced by the accumulator is the undefined constant.
1032 void Interpreter::DoJumpIfUndefined(compiler::InterpreterAssembler* assembler) { 1191 void Interpreter::DoJumpIfUndefined(compiler::InterpreterAssembler* assembler) {
1033 Node* accumulator = __ GetAccumulator(); 1192 Node* accumulator = __ GetAccumulator();
1034 Node* undefined_value = 1193 Node* undefined_value =
1035 __ HeapConstant(isolate_->factory()->undefined_value()); 1194 __ HeapConstant(isolate_->factory()->undefined_value());
1036 Node* relative_jump = __ BytecodeOperandImm8(0); 1195 Node* relative_jump = __ BytecodeOperandImm(0);
1037 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump); 1196 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump);
1038 } 1197 }
1039 1198
1040 1199
1041 // JumpIfUndefinedConstant <idx> 1200 // JumpIfUndefinedConstant <idx>
1042 // 1201 //
1043 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool 1202 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool
1044 // if the object referenced by the accumulator is the undefined constant. 1203 // if the object referenced by the accumulator is the undefined constant.
1045 void Interpreter::DoJumpIfUndefinedConstant( 1204 void Interpreter::DoJumpIfUndefinedConstant(
1046 compiler::InterpreterAssembler* assembler) { 1205 compiler::InterpreterAssembler* assembler) {
1047 Node* accumulator = __ GetAccumulator(); 1206 Node* accumulator = __ GetAccumulator();
1048 Node* undefined_value = 1207 Node* undefined_value =
1049 __ HeapConstant(isolate_->factory()->undefined_value()); 1208 __ HeapConstant(isolate_->factory()->undefined_value());
1050 Node* index = __ BytecodeOperandIdx8(0); 1209 Node* index = __ BytecodeOperandIdx(0);
1051 Node* constant = __ LoadConstantPoolEntry(index); 1210 Node* constant = __ LoadConstantPoolEntry(index);
1052 Node* relative_jump = __ SmiUntag(constant); 1211 Node* relative_jump = __ SmiUntag(constant);
1053 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump); 1212 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump);
1054 } 1213 }
1055 1214
1056 1215
1057 // CreateRegExpLiteral <idx> <flags_reg> 1216 // CreateRegExpLiteral <idx> <flags_reg>
1058 // 1217 //
1059 // Creates a regular expression literal for literal index <idx> with flags held 1218 // Creates a regular expression literal for literal index <idx> with flags held
1060 // in <flags_reg> and the pattern in the accumulator. 1219 // in <flags_reg> and the pattern in the accumulator.
1061 void Interpreter::DoCreateRegExpLiteral( 1220 void Interpreter::DoCreateRegExpLiteral(
1062 compiler::InterpreterAssembler* assembler) { 1221 compiler::InterpreterAssembler* assembler) {
1063 Node* pattern = __ GetAccumulator(); 1222 Node* pattern = __ GetAccumulator();
1064 Node* literal_index_raw = __ BytecodeOperandIdx8(0); 1223 Node* literal_index_raw = __ BytecodeOperandIdx(0);
1065 Node* literal_index = __ SmiTag(literal_index_raw); 1224 Node* literal_index = __ SmiTag(literal_index_raw);
1066 Node* flags_reg = __ BytecodeOperandReg8(1); 1225 Node* flags_reg = __ BytecodeOperandReg(1);
1067 Node* flags = __ LoadRegister(flags_reg); 1226 Node* flags = __ LoadRegister(flags_reg);
1068 Node* closure = __ LoadRegister(Register::function_closure()); 1227 Node* closure = __ LoadRegister(Register::function_closure());
1069 Node* literals_array = 1228 Node* literals_array =
1070 __ LoadObjectField(closure, JSFunction::kLiteralsOffset); 1229 __ LoadObjectField(closure, JSFunction::kLiteralsOffset);
1071 Node* result = __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 1230 Node* result = __ CallRuntime(Runtime::kMaterializeRegExpLiteral,
1072 literals_array, literal_index, pattern, flags); 1231 literals_array, literal_index, pattern, flags);
1073 __ SetAccumulator(result); 1232 __ SetAccumulator(result);
1074 __ Dispatch(); 1233 __ Dispatch();
1075 } 1234 }
1076 1235
1077 1236
1078 void Interpreter::DoCreateLiteral(Runtime::FunctionId function_id, 1237 void Interpreter::DoCreateLiteral(Runtime::FunctionId function_id,
1079 compiler::InterpreterAssembler* assembler) { 1238 compiler::InterpreterAssembler* assembler) {
1080 Node* constant_elements = __ GetAccumulator(); 1239 Node* constant_elements = __ GetAccumulator();
1081 Node* literal_index_raw = __ BytecodeOperandIdx8(0); 1240 Node* literal_index_raw = __ BytecodeOperandIdx(0);
1082 Node* literal_index = __ SmiTag(literal_index_raw); 1241 Node* literal_index = __ SmiTag(literal_index_raw);
1083 Node* flags_raw = __ BytecodeOperandImm8(1); 1242 Node* flags_raw = __ BytecodeOperandImm(1);
1084 Node* flags = __ SmiTag(flags_raw); 1243 Node* flags = __ SmiTag(flags_raw);
1085 Node* closure = __ LoadRegister(Register::function_closure()); 1244 Node* closure = __ LoadRegister(Register::function_closure());
1086 Node* literals_array = 1245 Node* literals_array =
1087 __ LoadObjectField(closure, JSFunction::kLiteralsOffset); 1246 __ LoadObjectField(closure, JSFunction::kLiteralsOffset);
1088 Node* result = __ CallRuntime(function_id, literals_array, literal_index, 1247 Node* result = __ CallRuntime(function_id, literals_array, literal_index,
1089 constant_elements, flags); 1248 constant_elements, flags);
1090 __ SetAccumulator(result); 1249 __ SetAccumulator(result);
1091 __ Dispatch(); 1250 __ Dispatch();
1092 } 1251 }
1093 1252
(...skipping 19 matching lines...) Expand all
1113 1272
1114 1273
1115 // CreateClosure <tenured> 1274 // CreateClosure <tenured>
1116 // 1275 //
1117 // Creates a new closure for SharedFunctionInfo in the accumulator with the 1276 // Creates a new closure for SharedFunctionInfo in the accumulator with the
1118 // PretenureFlag <tenured>. 1277 // PretenureFlag <tenured>.
1119 void Interpreter::DoCreateClosure(compiler::InterpreterAssembler* assembler) { 1278 void Interpreter::DoCreateClosure(compiler::InterpreterAssembler* assembler) {
1120 // TODO(rmcilroy): Possibly call FastNewClosureStub when possible instead of 1279 // TODO(rmcilroy): Possibly call FastNewClosureStub when possible instead of
1121 // calling into the runtime. 1280 // calling into the runtime.
1122 Node* shared = __ GetAccumulator(); 1281 Node* shared = __ GetAccumulator();
1123 Node* tenured_raw = __ BytecodeOperandImm8(0); 1282 Node* tenured_raw = __ BytecodeOperandImm(0);
1124 Node* tenured = __ SmiTag(tenured_raw); 1283 Node* tenured = __ SmiTag(tenured_raw);
1125 Node* result = 1284 Node* result =
1126 __ CallRuntime(Runtime::kInterpreterNewClosure, shared, tenured); 1285 __ CallRuntime(Runtime::kInterpreterNewClosure, shared, tenured);
1127 __ SetAccumulator(result); 1286 __ SetAccumulator(result);
1128 __ Dispatch(); 1287 __ Dispatch();
1129 } 1288 }
1130 1289
1131 1290
1132 // CreateMappedArguments 1291 // CreateMappedArguments
1133 // 1292 //
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
1170 void Interpreter::DoReturn(compiler::InterpreterAssembler* assembler) { 1329 void Interpreter::DoReturn(compiler::InterpreterAssembler* assembler) {
1171 __ Return(); 1330 __ Return();
1172 } 1331 }
1173 1332
1174 1333
1175 // ForInPrepare <receiver> 1334 // ForInPrepare <receiver>
1176 // 1335 //
1177 // Returns state for for..in loop execution based on the |receiver| and 1336 // Returns state for for..in loop execution based on the |receiver| and
1178 // the property names in the accumulator. 1337 // the property names in the accumulator.
1179 void Interpreter::DoForInPrepare(compiler::InterpreterAssembler* assembler) { 1338 void Interpreter::DoForInPrepare(compiler::InterpreterAssembler* assembler) {
1180 Node* receiver_reg = __ BytecodeOperandReg8(0); 1339 Node* receiver_reg = __ BytecodeOperandReg(0);
1181 Node* receiver = __ LoadRegister(receiver_reg); 1340 Node* receiver = __ LoadRegister(receiver_reg);
1182 Node* property_names = __ GetAccumulator(); 1341 Node* property_names = __ GetAccumulator();
1183 Node* result = __ CallRuntime(Runtime::kInterpreterForInPrepare, receiver, 1342 Node* result = __ CallRuntime(Runtime::kInterpreterForInPrepare, receiver,
1184 property_names); 1343 property_names);
1185 __ SetAccumulator(result); 1344 __ SetAccumulator(result);
1186 __ Dispatch(); 1345 __ Dispatch();
1187 } 1346 }
1188 1347
1189 1348
1190 // ForInNext <for_in_state> <index> 1349 // ForInNext <for_in_state> <index>
1191 // 1350 //
1192 // Returns the next key in a for..in loop. The state associated with the 1351 // Returns the next key in a for..in loop. The state associated with the
1193 // iteration is contained in |for_in_state| and |index| is the current 1352 // iteration is contained in |for_in_state| and |index| is the current
1194 // zero-based iteration count. 1353 // zero-based iteration count.
1195 void Interpreter::DoForInNext(compiler::InterpreterAssembler* assembler) { 1354 void Interpreter::DoForInNext(compiler::InterpreterAssembler* assembler) {
1196 Node* for_in_state_reg = __ BytecodeOperandReg8(0); 1355 Node* for_in_state_reg = __ BytecodeOperandReg(0);
1197 Node* for_in_state = __ LoadRegister(for_in_state_reg); 1356 Node* for_in_state = __ LoadRegister(for_in_state_reg);
1198 Node* receiver = __ LoadFixedArrayElement(for_in_state, 0); 1357 Node* receiver = __ LoadFixedArrayElement(for_in_state, 0);
1199 Node* cache_array = __ LoadFixedArrayElement(for_in_state, 1); 1358 Node* cache_array = __ LoadFixedArrayElement(for_in_state, 1);
1200 Node* cache_type = __ LoadFixedArrayElement(for_in_state, 2); 1359 Node* cache_type = __ LoadFixedArrayElement(for_in_state, 2);
1201 Node* index_reg = __ BytecodeOperandReg8(1); 1360 Node* index_reg = __ BytecodeOperandReg(1);
1202 Node* index = __ LoadRegister(index_reg); 1361 Node* index = __ LoadRegister(index_reg);
1203 Node* result = __ CallRuntime(Runtime::kForInNext, receiver, cache_array, 1362 Node* result = __ CallRuntime(Runtime::kForInNext, receiver, cache_array,
1204 cache_type, index); 1363 cache_type, index);
1205 __ SetAccumulator(result); 1364 __ SetAccumulator(result);
1206 __ Dispatch(); 1365 __ Dispatch();
1207 } 1366 }
1208 1367
1209 1368
1210 // ForInDone <for_in_state> 1369 // ForInDone <for_in_state>
1211 // 1370 //
1212 // Returns the next key in a for..in loop. The accumulator contains the current 1371 // Returns the next key in a for..in loop. The accumulator contains the current
1213 // zero-based iteration count and |for_in_state| is the state returned by an 1372 // zero-based iteration count and |for_in_state| is the state returned by an
1214 // earlier invocation of ForInPrepare. 1373 // earlier invocation of ForInPrepare.
1215 void Interpreter::DoForInDone(compiler::InterpreterAssembler* assembler) { 1374 void Interpreter::DoForInDone(compiler::InterpreterAssembler* assembler) {
1216 Node* index = __ GetAccumulator(); 1375 Node* index = __ GetAccumulator();
1217 Node* for_in_state_reg = __ BytecodeOperandReg8(0); 1376 Node* for_in_state_reg = __ BytecodeOperandReg(0);
1218 Node* for_in_state = __ LoadRegister(for_in_state_reg); 1377 Node* for_in_state = __ LoadRegister(for_in_state_reg);
1219 Node* cache_length = __ LoadFixedArrayElement(for_in_state, 3); 1378 Node* cache_length = __ LoadFixedArrayElement(for_in_state, 3);
1220 Node* result = __ CallRuntime(Runtime::kForInDone, index, cache_length); 1379 Node* result = __ CallRuntime(Runtime::kForInDone, index, cache_length);
1221 __ SetAccumulator(result); 1380 __ SetAccumulator(result);
1222 __ Dispatch(); 1381 __ Dispatch();
1223 } 1382 }
1224 1383
1225 1384
1226 } // namespace interpreter 1385 } // namespace interpreter
1227 } // namespace internal 1386 } // namespace internal
1228 } // namespace v8 1387 } // namespace v8
OLDNEW
« no previous file with comments | « src/interpreter/interpreter.h ('k') | test/cctest/interpreter/test-bytecode-generator.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698