Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(253)

Side by Side Diff: src/ia32/code-stubs-ia32.cc

Issue 196133017: Experimental parser: merge r19949 (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/parser
Patch Set: Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/code-stubs-ia32.h ('k') | src/ia32/deoptimizer-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after
163 void KeyedLoadFieldStub::InitializeInterfaceDescriptor( 163 void KeyedLoadFieldStub::InitializeInterfaceDescriptor(
164 Isolate* isolate, 164 Isolate* isolate,
165 CodeStubInterfaceDescriptor* descriptor) { 165 CodeStubInterfaceDescriptor* descriptor) {
166 static Register registers[] = { edx }; 166 static Register registers[] = { edx };
167 descriptor->register_param_count_ = 1; 167 descriptor->register_param_count_ = 1;
168 descriptor->register_params_ = registers; 168 descriptor->register_params_ = registers;
169 descriptor->deoptimization_handler_ = NULL; 169 descriptor->deoptimization_handler_ = NULL;
170 } 170 }
171 171
172 172
173 void StringLengthStub::InitializeInterfaceDescriptor(
174 Isolate* isolate,
175 CodeStubInterfaceDescriptor* descriptor) {
176 static Register registers[] = { edx, ecx };
177 descriptor->register_param_count_ = 2;
178 descriptor->register_params_ = registers;
179 descriptor->deoptimization_handler_ = NULL;
180 }
181
182
183 void KeyedStringLengthStub::InitializeInterfaceDescriptor(
184 Isolate* isolate,
185 CodeStubInterfaceDescriptor* descriptor) {
186 static Register registers[] = { edx, ecx };
187 descriptor->register_param_count_ = 2;
188 descriptor->register_params_ = registers;
189 descriptor->deoptimization_handler_ = NULL;
190 }
191
192
173 void KeyedStoreFastElementStub::InitializeInterfaceDescriptor( 193 void KeyedStoreFastElementStub::InitializeInterfaceDescriptor(
174 Isolate* isolate, 194 Isolate* isolate,
175 CodeStubInterfaceDescriptor* descriptor) { 195 CodeStubInterfaceDescriptor* descriptor) {
176 static Register registers[] = { edx, ecx, eax }; 196 static Register registers[] = { edx, ecx, eax };
177 descriptor->register_param_count_ = 3; 197 descriptor->register_param_count_ = 3;
178 descriptor->register_params_ = registers; 198 descriptor->register_params_ = registers;
179 descriptor->deoptimization_handler_ = 199 descriptor->deoptimization_handler_ =
180 FUNCTION_ADDR(KeyedStoreIC_MissFromStubFailure); 200 FUNCTION_ADDR(KeyedStoreIC_MissFromStubFailure);
181 } 201 }
182 202
(...skipping 856 matching lines...) Expand 10 before | Expand all | Expand 10 after
1039 __ j(not_equal, &miss); 1059 __ j(not_equal, &miss);
1040 } 1060 }
1041 1061
1042 StubCompiler::GenerateLoadFunctionPrototype(masm, edx, eax, ebx, &miss); 1062 StubCompiler::GenerateLoadFunctionPrototype(masm, edx, eax, ebx, &miss);
1043 __ bind(&miss); 1063 __ bind(&miss);
1044 StubCompiler::TailCallBuiltin( 1064 StubCompiler::TailCallBuiltin(
1045 masm, BaseLoadStoreStubCompiler::MissBuiltin(kind())); 1065 masm, BaseLoadStoreStubCompiler::MissBuiltin(kind()));
1046 } 1066 }
1047 1067
1048 1068
1049 void StringLengthStub::Generate(MacroAssembler* masm) {
1050 // ----------- S t a t e -------------
1051 // -- ecx : name
1052 // -- edx : receiver
1053 // -- esp[0] : return address
1054 // -----------------------------------
1055 Label miss;
1056
1057 if (kind() == Code::KEYED_LOAD_IC) {
1058 __ cmp(ecx, Immediate(masm->isolate()->factory()->length_string()));
1059 __ j(not_equal, &miss);
1060 }
1061
1062 StubCompiler::GenerateLoadStringLength(masm, edx, eax, ebx, &miss);
1063 __ bind(&miss);
1064 StubCompiler::TailCallBuiltin(
1065 masm, BaseLoadStoreStubCompiler::MissBuiltin(kind()));
1066 }
1067
1068
1069 void StoreArrayLengthStub::Generate(MacroAssembler* masm) { 1069 void StoreArrayLengthStub::Generate(MacroAssembler* masm) {
1070 // ----------- S t a t e ------------- 1070 // ----------- S t a t e -------------
1071 // -- eax : value 1071 // -- eax : value
1072 // -- ecx : name 1072 // -- ecx : name
1073 // -- edx : receiver 1073 // -- edx : receiver
1074 // -- esp[0] : return address 1074 // -- esp[0] : return address
1075 // ----------------------------------- 1075 // -----------------------------------
1076 // 1076 //
1077 // This accepts as a receiver anything JSArray::SetElementsLength accepts 1077 // This accepts as a receiver anything JSArray::SetElementsLength accepts
1078 // (currently anything except for external arrays which means anything with 1078 // (currently anything except for external arrays which means anything with
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after
1183 // Slow-case: Handle non-smi or out-of-bounds access to arguments 1183 // Slow-case: Handle non-smi or out-of-bounds access to arguments
1184 // by calling the runtime system. 1184 // by calling the runtime system.
1185 __ bind(&slow); 1185 __ bind(&slow);
1186 __ pop(ebx); // Return address. 1186 __ pop(ebx); // Return address.
1187 __ push(edx); 1187 __ push(edx);
1188 __ push(ebx); 1188 __ push(ebx);
1189 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1); 1189 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
1190 } 1190 }
1191 1191
1192 1192
1193 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) { 1193 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
1194 // esp[0] : return address 1194 // esp[0] : return address
1195 // esp[4] : number of parameters 1195 // esp[4] : number of parameters
1196 // esp[8] : receiver displacement 1196 // esp[8] : receiver displacement
1197 // esp[12] : function 1197 // esp[12] : function
1198 1198
1199 // Check if the calling frame is an arguments adaptor frame. 1199 // Check if the calling frame is an arguments adaptor frame.
1200 Label runtime; 1200 Label runtime;
1201 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); 1201 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
1202 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset)); 1202 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
1203 __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 1203 __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1204 __ j(not_equal, &runtime, Label::kNear); 1204 __ j(not_equal, &runtime, Label::kNear);
1205 1205
1206 // Patch the arguments.length and the parameters pointer. 1206 // Patch the arguments.length and the parameters pointer.
1207 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 1207 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
1208 __ mov(Operand(esp, 1 * kPointerSize), ecx); 1208 __ mov(Operand(esp, 1 * kPointerSize), ecx);
1209 __ lea(edx, Operand(edx, ecx, times_2, 1209 __ lea(edx, Operand(edx, ecx, times_2,
1210 StandardFrameConstants::kCallerSPOffset)); 1210 StandardFrameConstants::kCallerSPOffset));
1211 __ mov(Operand(esp, 2 * kPointerSize), edx); 1211 __ mov(Operand(esp, 2 * kPointerSize), edx);
1212 1212
1213 __ bind(&runtime); 1213 __ bind(&runtime);
1214 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); 1214 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
1215 } 1215 }
1216 1216
1217 1217
1218 void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) { 1218 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
1219 Isolate* isolate = masm->isolate(); 1219 Isolate* isolate = masm->isolate();
1220 1220
1221 // esp[0] : return address 1221 // esp[0] : return address
1222 // esp[4] : number of parameters (tagged) 1222 // esp[4] : number of parameters (tagged)
1223 // esp[8] : receiver displacement 1223 // esp[8] : receiver displacement
1224 // esp[12] : function 1224 // esp[12] : function
1225 1225
1226 // ebx = parameter count (tagged) 1226 // ebx = parameter count (tagged)
1227 __ mov(ebx, Operand(esp, 1 * kPointerSize)); 1227 __ mov(ebx, Operand(esp, 1 * kPointerSize));
1228 1228
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
1268 Label no_parameter_map; 1268 Label no_parameter_map;
1269 __ test(ebx, ebx); 1269 __ test(ebx, ebx);
1270 __ j(zero, &no_parameter_map, Label::kNear); 1270 __ j(zero, &no_parameter_map, Label::kNear);
1271 __ lea(ebx, Operand(ebx, times_2, kParameterMapHeaderSize)); 1271 __ lea(ebx, Operand(ebx, times_2, kParameterMapHeaderSize));
1272 __ bind(&no_parameter_map); 1272 __ bind(&no_parameter_map);
1273 1273
1274 // 2. Backing store. 1274 // 2. Backing store.
1275 __ lea(ebx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize)); 1275 __ lea(ebx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize));
1276 1276
1277 // 3. Arguments object. 1277 // 3. Arguments object.
1278 __ add(ebx, Immediate(Heap::kArgumentsObjectSize)); 1278 __ add(ebx, Immediate(Heap::kSloppyArgumentsObjectSize));
1279 1279
1280 // Do the allocation of all three objects in one go. 1280 // Do the allocation of all three objects in one go.
1281 __ Allocate(ebx, eax, edx, edi, &runtime, TAG_OBJECT); 1281 __ Allocate(ebx, eax, edx, edi, &runtime, TAG_OBJECT);
1282 1282
1283 // eax = address of new object(s) (tagged) 1283 // eax = address of new object(s) (tagged)
1284 // ecx = argument count (tagged) 1284 // ecx = argument count (tagged)
1285 // esp[0] = mapped parameter count (tagged) 1285 // esp[0] = mapped parameter count (tagged)
1286 // esp[8] = parameter count (tagged) 1286 // esp[8] = parameter count (tagged)
1287 // esp[12] = address of receiver argument 1287 // esp[12] = address of receiver argument
1288 // Get the arguments boilerplate from the current native context into edi. 1288 // Get the arguments boilerplate from the current native context into edi.
1289 Label has_mapped_parameters, copy; 1289 Label has_mapped_parameters, copy;
1290 __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 1290 __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
1291 __ mov(edi, FieldOperand(edi, GlobalObject::kNativeContextOffset)); 1291 __ mov(edi, FieldOperand(edi, GlobalObject::kNativeContextOffset));
1292 __ mov(ebx, Operand(esp, 0 * kPointerSize)); 1292 __ mov(ebx, Operand(esp, 0 * kPointerSize));
1293 __ test(ebx, ebx); 1293 __ test(ebx, ebx);
1294 __ j(not_zero, &has_mapped_parameters, Label::kNear); 1294 __ j(not_zero, &has_mapped_parameters, Label::kNear);
1295 __ mov(edi, Operand(edi, 1295 __ mov(edi, Operand(edi,
1296 Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX))); 1296 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_BOILERPLATE_INDEX)));
1297 __ jmp(&copy, Label::kNear); 1297 __ jmp(&copy, Label::kNear);
1298 1298
1299 __ bind(&has_mapped_parameters); 1299 __ bind(&has_mapped_parameters);
1300 __ mov(edi, Operand(edi, 1300 __ mov(edi, Operand(edi,
1301 Context::SlotOffset(Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX))); 1301 Context::SlotOffset(Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX)));
1302 __ bind(&copy); 1302 __ bind(&copy);
1303 1303
1304 // eax = address of new object (tagged) 1304 // eax = address of new object (tagged)
1305 // ebx = mapped parameter count (tagged) 1305 // ebx = mapped parameter count (tagged)
1306 // ecx = argument count (tagged) 1306 // ecx = argument count (tagged)
(...skipping 16 matching lines...) Expand all
1323 1323
1324 // Use the length (smi tagged) and set that as an in-object property too. 1324 // Use the length (smi tagged) and set that as an in-object property too.
1325 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); 1325 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
1326 __ mov(FieldOperand(eax, JSObject::kHeaderSize + 1326 __ mov(FieldOperand(eax, JSObject::kHeaderSize +
1327 Heap::kArgumentsLengthIndex * kPointerSize), 1327 Heap::kArgumentsLengthIndex * kPointerSize),
1328 ecx); 1328 ecx);
1329 1329
1330 // Set up the elements pointer in the allocated arguments object. 1330 // Set up the elements pointer in the allocated arguments object.
1331 // If we allocated a parameter map, edi will point there, otherwise to the 1331 // If we allocated a parameter map, edi will point there, otherwise to the
1332 // backing store. 1332 // backing store.
1333 __ lea(edi, Operand(eax, Heap::kArgumentsObjectSize)); 1333 __ lea(edi, Operand(eax, Heap::kSloppyArgumentsObjectSize));
1334 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi); 1334 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
1335 1335
1336 // eax = address of new object (tagged) 1336 // eax = address of new object (tagged)
1337 // ebx = mapped parameter count (tagged) 1337 // ebx = mapped parameter count (tagged)
1338 // ecx = argument count (tagged) 1338 // ecx = argument count (tagged)
1339 // edi = address of parameter map or backing store (tagged) 1339 // edi = address of parameter map or backing store (tagged)
1340 // esp[0] = mapped parameter count (tagged) 1340 // esp[0] = mapped parameter count (tagged)
1341 // esp[8] = parameter count (tagged) 1341 // esp[8] = parameter count (tagged)
1342 // esp[12] = address of receiver argument 1342 // esp[12] = address of receiver argument
1343 // Free a register. 1343 // Free a register.
1344 __ push(eax); 1344 __ push(eax);
1345 1345
1346 // Initialize parameter map. If there are no mapped arguments, we're done. 1346 // Initialize parameter map. If there are no mapped arguments, we're done.
1347 Label skip_parameter_map; 1347 Label skip_parameter_map;
1348 __ test(ebx, ebx); 1348 __ test(ebx, ebx);
1349 __ j(zero, &skip_parameter_map); 1349 __ j(zero, &skip_parameter_map);
1350 1350
1351 __ mov(FieldOperand(edi, FixedArray::kMapOffset), 1351 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
1352 Immediate(isolate->factory()->non_strict_arguments_elements_map())); 1352 Immediate(isolate->factory()->sloppy_arguments_elements_map()));
1353 __ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2)))); 1353 __ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2))));
1354 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), eax); 1354 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), eax);
1355 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi); 1355 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi);
1356 __ lea(eax, Operand(edi, ebx, times_2, kParameterMapHeaderSize)); 1356 __ lea(eax, Operand(edi, ebx, times_2, kParameterMapHeaderSize));
1357 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 1 * kPointerSize), eax); 1357 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 1 * kPointerSize), eax);
1358 1358
1359 // Copy the parameter slots and the holes in the arguments. 1359 // Copy the parameter slots and the holes in the arguments.
1360 // We need to fill in mapped_parameter_count slots. They index the context, 1360 // We need to fill in mapped_parameter_count slots. They index the context,
1361 // where parameters are stored in reverse order, at 1361 // where parameters are stored in reverse order, at
1362 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1 1362 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after
1468 __ mov(Operand(esp, 2 * kPointerSize), edx); 1468 __ mov(Operand(esp, 2 * kPointerSize), edx);
1469 1469
1470 // Try the new space allocation. Start out with computing the size of 1470 // Try the new space allocation. Start out with computing the size of
1471 // the arguments object and the elements array. 1471 // the arguments object and the elements array.
1472 Label add_arguments_object; 1472 Label add_arguments_object;
1473 __ bind(&try_allocate); 1473 __ bind(&try_allocate);
1474 __ test(ecx, ecx); 1474 __ test(ecx, ecx);
1475 __ j(zero, &add_arguments_object, Label::kNear); 1475 __ j(zero, &add_arguments_object, Label::kNear);
1476 __ lea(ecx, Operand(ecx, times_2, FixedArray::kHeaderSize)); 1476 __ lea(ecx, Operand(ecx, times_2, FixedArray::kHeaderSize));
1477 __ bind(&add_arguments_object); 1477 __ bind(&add_arguments_object);
1478 __ add(ecx, Immediate(Heap::kArgumentsObjectSizeStrict)); 1478 __ add(ecx, Immediate(Heap::kStrictArgumentsObjectSize));
1479 1479
1480 // Do the allocation of both objects in one go. 1480 // Do the allocation of both objects in one go.
1481 __ Allocate(ecx, eax, edx, ebx, &runtime, TAG_OBJECT); 1481 __ Allocate(ecx, eax, edx, ebx, &runtime, TAG_OBJECT);
1482 1482
1483 // Get the arguments boilerplate from the current native context. 1483 // Get the arguments boilerplate from the current native context.
1484 __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 1484 __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
1485 __ mov(edi, FieldOperand(edi, GlobalObject::kNativeContextOffset)); 1485 __ mov(edi, FieldOperand(edi, GlobalObject::kNativeContextOffset));
1486 const int offset = 1486 const int offset =
1487 Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX); 1487 Context::SlotOffset(Context::STRICT_ARGUMENTS_BOILERPLATE_INDEX);
1488 __ mov(edi, Operand(edi, offset)); 1488 __ mov(edi, Operand(edi, offset));
1489 1489
1490 // Copy the JS object part. 1490 // Copy the JS object part.
1491 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { 1491 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
1492 __ mov(ebx, FieldOperand(edi, i)); 1492 __ mov(ebx, FieldOperand(edi, i));
1493 __ mov(FieldOperand(eax, i), ebx); 1493 __ mov(FieldOperand(eax, i), ebx);
1494 } 1494 }
1495 1495
1496 // Get the length (smi tagged) and set that as an in-object property too. 1496 // Get the length (smi tagged) and set that as an in-object property too.
1497 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); 1497 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
1498 __ mov(ecx, Operand(esp, 1 * kPointerSize)); 1498 __ mov(ecx, Operand(esp, 1 * kPointerSize));
1499 __ mov(FieldOperand(eax, JSObject::kHeaderSize + 1499 __ mov(FieldOperand(eax, JSObject::kHeaderSize +
1500 Heap::kArgumentsLengthIndex * kPointerSize), 1500 Heap::kArgumentsLengthIndex * kPointerSize),
1501 ecx); 1501 ecx);
1502 1502
1503 // If there are no actual arguments, we're done. 1503 // If there are no actual arguments, we're done.
1504 Label done; 1504 Label done;
1505 __ test(ecx, ecx); 1505 __ test(ecx, ecx);
1506 __ j(zero, &done, Label::kNear); 1506 __ j(zero, &done, Label::kNear);
1507 1507
1508 // Get the parameters pointer from the stack. 1508 // Get the parameters pointer from the stack.
1509 __ mov(edx, Operand(esp, 2 * kPointerSize)); 1509 __ mov(edx, Operand(esp, 2 * kPointerSize));
1510 1510
1511 // Set up the elements pointer in the allocated arguments object and 1511 // Set up the elements pointer in the allocated arguments object and
1512 // initialize the header in the elements fixed array. 1512 // initialize the header in the elements fixed array.
1513 __ lea(edi, Operand(eax, Heap::kArgumentsObjectSizeStrict)); 1513 __ lea(edi, Operand(eax, Heap::kStrictArgumentsObjectSize));
1514 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi); 1514 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
1515 __ mov(FieldOperand(edi, FixedArray::kMapOffset), 1515 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
1516 Immediate(isolate->factory()->fixed_array_map())); 1516 Immediate(isolate->factory()->fixed_array_map()));
1517 1517
1518 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx); 1518 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
1519 // Untag the length for the loop below. 1519 // Untag the length for the loop below.
1520 __ SmiUntag(ecx); 1520 __ SmiUntag(ecx);
1521 1521
1522 // Copy the fixed array slots. 1522 // Copy the fixed array slots.
1523 Label loop; 1523 Label loop;
(...skipping 892 matching lines...) Expand 10 before | Expand all | Expand 10 after
2416 __ pop(edx); 2416 __ pop(edx);
2417 __ pop(ebx); 2417 __ pop(ebx);
2418 __ pop(edi); 2418 __ pop(edi);
2419 2419
2420 __ bind(&done); 2420 __ bind(&done);
2421 } 2421 }
2422 2422
2423 2423
2424 void CallFunctionStub::Generate(MacroAssembler* masm) { 2424 void CallFunctionStub::Generate(MacroAssembler* masm) {
2425 // ebx : feedback vector 2425 // ebx : feedback vector
2426 // edx : (only if ebx is not undefined) slot in feedback vector (Smi) 2426 // edx : (only if ebx is not the megamorphic symbol) slot in feedback
2427 // vector (Smi)
2427 // edi : the function to call 2428 // edi : the function to call
2428 Isolate* isolate = masm->isolate(); 2429 Isolate* isolate = masm->isolate();
2429 Label slow, non_function, wrap, cont; 2430 Label slow, non_function, wrap, cont;
2430 2431
2431 if (NeedsChecks()) { 2432 if (NeedsChecks()) {
2432 // Check that the function really is a JavaScript function. 2433 // Check that the function really is a JavaScript function.
2433 __ JumpIfSmi(edi, &non_function); 2434 __ JumpIfSmi(edi, &non_function);
2434 2435
2435 // Goto slow case if we do not have a function. 2436 // Goto slow case if we do not have a function.
2436 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); 2437 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
2474 } 2475 }
2475 2476
2476 __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper()); 2477 __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper());
2477 2478
2478 if (NeedsChecks()) { 2479 if (NeedsChecks()) {
2479 // Slow-case: Non-function called. 2480 // Slow-case: Non-function called.
2480 __ bind(&slow); 2481 __ bind(&slow);
2481 if (RecordCallTarget()) { 2482 if (RecordCallTarget()) {
2482 // If there is a call target cache, mark it megamorphic in the 2483 // If there is a call target cache, mark it megamorphic in the
2483 // non-function case. MegamorphicSentinel is an immortal immovable 2484 // non-function case. MegamorphicSentinel is an immortal immovable
2484 // object (undefined) so no write barrier is needed. 2485 // object (megamorphic symbol) so no write barrier is needed.
2485 __ mov(FieldOperand(ebx, edx, times_half_pointer_size, 2486 __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
2486 FixedArray::kHeaderSize), 2487 FixedArray::kHeaderSize),
2487 Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate))); 2488 Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate)));
2488 } 2489 }
2489 // Check for function proxy. 2490 // Check for function proxy.
2490 __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE); 2491 __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
2491 __ j(not_equal, &non_function); 2492 __ j(not_equal, &non_function);
2492 __ pop(ecx); 2493 __ pop(ecx);
2493 __ push(edi); // put proxy as additional argument under return address 2494 __ push(edi); // put proxy as additional argument under return address
2494 __ push(ecx); 2495 __ push(ecx);
(...skipping 27 matching lines...) Expand all
2522 } 2523 }
2523 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), eax); 2524 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), eax);
2524 __ jmp(&cont); 2525 __ jmp(&cont);
2525 } 2526 }
2526 } 2527 }
2527 2528
2528 2529
2529 void CallConstructStub::Generate(MacroAssembler* masm) { 2530 void CallConstructStub::Generate(MacroAssembler* masm) {
2530 // eax : number of arguments 2531 // eax : number of arguments
2531 // ebx : feedback vector 2532 // ebx : feedback vector
2532 // edx : (only if ebx is not undefined) slot in feedback vector (Smi) 2533 // edx : (only if ebx is not the megamorphic symbol) slot in feedback
2534 // vector (Smi)
2533 // edi : constructor function 2535 // edi : constructor function
2534 Label slow, non_function_call; 2536 Label slow, non_function_call;
2535 2537
2536 // Check that function is not a smi. 2538 // Check that function is not a smi.
2537 __ JumpIfSmi(edi, &non_function_call); 2539 __ JumpIfSmi(edi, &non_function_call);
2538 // Check that function is a JSFunction. 2540 // Check that function is a JSFunction.
2539 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); 2541 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2540 __ j(not_equal, &slow); 2542 __ j(not_equal, &slow);
2541 2543
2542 if (RecordCallTarget()) { 2544 if (RecordCallTarget()) {
(...skipping 2141 matching lines...) Expand 10 before | Expand all | Expand 10 after
4684 1 << MemoryChunk::SCAN_ON_SCAVENGE, 4686 1 << MemoryChunk::SCAN_ON_SCAVENGE,
4685 not_zero, 4687 not_zero,
4686 &dont_need_remembered_set); 4688 &dont_need_remembered_set);
4687 4689
4688 // First notify the incremental marker if necessary, then update the 4690 // First notify the incremental marker if necessary, then update the
4689 // remembered set. 4691 // remembered set.
4690 CheckNeedsToInformIncrementalMarker( 4692 CheckNeedsToInformIncrementalMarker(
4691 masm, 4693 masm,
4692 kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, 4694 kUpdateRememberedSetOnNoNeedToInformIncrementalMarker,
4693 mode); 4695 mode);
4694 InformIncrementalMarker(masm, mode); 4696 InformIncrementalMarker(masm);
4695 regs_.Restore(masm); 4697 regs_.Restore(masm);
4696 __ RememberedSetHelper(object_, 4698 __ RememberedSetHelper(object_,
4697 address_, 4699 address_,
4698 value_, 4700 value_,
4699 save_fp_regs_mode_, 4701 save_fp_regs_mode_,
4700 MacroAssembler::kReturnAtEnd); 4702 MacroAssembler::kReturnAtEnd);
4701 4703
4702 __ bind(&dont_need_remembered_set); 4704 __ bind(&dont_need_remembered_set);
4703 } 4705 }
4704 4706
4705 CheckNeedsToInformIncrementalMarker( 4707 CheckNeedsToInformIncrementalMarker(
4706 masm, 4708 masm,
4707 kReturnOnNoNeedToInformIncrementalMarker, 4709 kReturnOnNoNeedToInformIncrementalMarker,
4708 mode); 4710 mode);
4709 InformIncrementalMarker(masm, mode); 4711 InformIncrementalMarker(masm);
4710 regs_.Restore(masm); 4712 regs_.Restore(masm);
4711 __ ret(0); 4713 __ ret(0);
4712 } 4714 }
4713 4715
4714 4716
4715 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode mode) { 4717 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
4716 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_); 4718 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
4717 int argument_count = 3; 4719 int argument_count = 3;
4718 __ PrepareCallCFunction(argument_count, regs_.scratch0()); 4720 __ PrepareCallCFunction(argument_count, regs_.scratch0());
4719 __ mov(Operand(esp, 0 * kPointerSize), regs_.object()); 4721 __ mov(Operand(esp, 0 * kPointerSize), regs_.object());
4720 __ mov(Operand(esp, 1 * kPointerSize), regs_.address()); // Slot. 4722 __ mov(Operand(esp, 1 * kPointerSize), regs_.address()); // Slot.
4721 __ mov(Operand(esp, 2 * kPointerSize), 4723 __ mov(Operand(esp, 2 * kPointerSize),
4722 Immediate(ExternalReference::isolate_address(masm->isolate()))); 4724 Immediate(ExternalReference::isolate_address(masm->isolate())));
4723 4725
4724 AllowExternalCallThatCantCauseGC scope(masm); 4726 AllowExternalCallThatCantCauseGC scope(masm);
4725 if (mode == INCREMENTAL_COMPACTION) { 4727 __ CallCFunction(
4726 __ CallCFunction( 4728 ExternalReference::incremental_marking_record_write_function(
4727 ExternalReference::incremental_evacuation_record_write_function( 4729 masm->isolate()),
4728 masm->isolate()), 4730 argument_count);
4729 argument_count); 4731
4730 } else {
4731 ASSERT(mode == INCREMENTAL);
4732 __ CallCFunction(
4733 ExternalReference::incremental_marking_record_write_function(
4734 masm->isolate()),
4735 argument_count);
4736 }
4737 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); 4732 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
4738 } 4733 }
4739 4734
4740 4735
4741 void RecordWriteStub::CheckNeedsToInformIncrementalMarker( 4736 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
4742 MacroAssembler* masm, 4737 MacroAssembler* masm,
4743 OnNoNeedToInformIncrementalMarker on_no_need, 4738 OnNoNeedToInformIncrementalMarker on_no_need,
4744 Mode mode) { 4739 Mode mode) {
4745 Label object_is_black, need_incremental, need_incremental_pop_object; 4740 Label object_is_black, need_incremental, need_incremental_pop_object;
4746 4741
(...skipping 398 matching lines...) Expand 10 before | Expand all | Expand 10 after
5145 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode); 5140 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
5146 } else { 5141 } else {
5147 UNREACHABLE(); 5142 UNREACHABLE();
5148 } 5143 }
5149 } 5144 }
5150 5145
5151 5146
5152 void ArrayConstructorStub::Generate(MacroAssembler* masm) { 5147 void ArrayConstructorStub::Generate(MacroAssembler* masm) {
5153 // ----------- S t a t e ------------- 5148 // ----------- S t a t e -------------
5154 // -- eax : argc (only if argument_count_ == ANY) 5149 // -- eax : argc (only if argument_count_ == ANY)
5155 // -- ebx : feedback vector (fixed array or undefined) 5150 // -- ebx : feedback vector (fixed array or megamorphic symbol)
5156 // -- edx : slot index (if ebx is fixed array) 5151 // -- edx : slot index (if ebx is fixed array)
5157 // -- edi : constructor 5152 // -- edi : constructor
5158 // -- esp[0] : return address 5153 // -- esp[0] : return address
5159 // -- esp[4] : last argument 5154 // -- esp[4] : last argument
5160 // ----------------------------------- 5155 // -----------------------------------
5161 Handle<Object> undefined_sentinel( 5156 Handle<Object> megamorphic_sentinel =
5162 masm->isolate()->heap()->undefined_value(), 5157 TypeFeedbackInfo::MegamorphicSentinel(masm->isolate());
5163 masm->isolate());
5164 5158
5165 if (FLAG_debug_code) { 5159 if (FLAG_debug_code) {
5166 // The array construct code is only set for the global and natives 5160 // The array construct code is only set for the global and natives
5167 // builtin Array functions which always have maps. 5161 // builtin Array functions which always have maps.
5168 5162
5169 // Initial map for the builtin Array function should be a map. 5163 // Initial map for the builtin Array function should be a map.
5170 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset)); 5164 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
5171 // Will both indicate a NULL and a Smi. 5165 // Will both indicate a NULL and a Smi.
5172 __ test(ecx, Immediate(kSmiTagMask)); 5166 __ test(ecx, Immediate(kSmiTagMask));
5173 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction); 5167 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
5174 __ CmpObjectType(ecx, MAP_TYPE, ecx); 5168 __ CmpObjectType(ecx, MAP_TYPE, ecx);
5175 __ Assert(equal, kUnexpectedInitialMapForArrayFunction); 5169 __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
5176 5170
5177 // We should either have undefined in ebx or a valid fixed array. 5171 // We should either have the megamorphic symbol in ebx or a valid
5172 // fixed array.
5178 Label okay_here; 5173 Label okay_here;
5179 Handle<Map> fixed_array_map = masm->isolate()->factory()->fixed_array_map(); 5174 Handle<Map> fixed_array_map = masm->isolate()->factory()->fixed_array_map();
5180 __ cmp(ebx, Immediate(undefined_sentinel)); 5175 __ cmp(ebx, Immediate(megamorphic_sentinel));
5181 __ j(equal, &okay_here); 5176 __ j(equal, &okay_here);
5182 __ cmp(FieldOperand(ebx, 0), Immediate(fixed_array_map)); 5177 __ cmp(FieldOperand(ebx, 0), Immediate(fixed_array_map));
5183 __ Assert(equal, kExpectedFixedArrayInRegisterEbx); 5178 __ Assert(equal, kExpectedFixedArrayInRegisterEbx);
5184 5179
5185 // edx should be a smi if we don't have undefined in ebx. 5180 // edx should be a smi if we don't have the megamorphic symbol in ebx.
5186 __ AssertSmi(edx); 5181 __ AssertSmi(edx);
5187 5182
5188 __ bind(&okay_here); 5183 __ bind(&okay_here);
5189 } 5184 }
5190 5185
5191 Label no_info; 5186 Label no_info;
5192 // If the feedback vector is undefined, or contains anything other than an 5187 // If the feedback vector is the megamorphic sentinel, or contains anything
5193 // AllocationSite, call an array constructor that doesn't use AllocationSites. 5188 // other than an AllocationSite, call an array constructor that doesn't use
5194 __ cmp(ebx, Immediate(undefined_sentinel)); 5189 // AllocationSites.
5190 __ cmp(ebx, Immediate(megamorphic_sentinel));
5195 __ j(equal, &no_info); 5191 __ j(equal, &no_info);
5196 __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size, 5192 __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
5197 FixedArray::kHeaderSize)); 5193 FixedArray::kHeaderSize));
5198 __ cmp(FieldOperand(ebx, 0), Immediate( 5194 __ cmp(FieldOperand(ebx, 0), Immediate(
5199 masm->isolate()->factory()->allocation_site_map())); 5195 masm->isolate()->factory()->allocation_site_map()));
5200 __ j(not_equal, &no_info); 5196 __ j(not_equal, &no_info);
5201 5197
5202 // Only look at the lower 16 bits of the transition info. 5198 // Only look at the lower 16 bits of the transition info.
5203 __ mov(edx, FieldOperand(ebx, AllocationSite::kTransitionInfoOffset)); 5199 __ mov(edx, FieldOperand(ebx, AllocationSite::kTransitionInfoOffset));
5204 __ SmiUntag(edx); 5200 __ SmiUntag(edx);
(...skipping 250 matching lines...) Expand 10 before | Expand all | Expand 10 after
5455 Operand(ebp, 7 * kPointerSize), 5451 Operand(ebp, 7 * kPointerSize),
5456 NULL); 5452 NULL);
5457 } 5453 }
5458 5454
5459 5455
5460 #undef __ 5456 #undef __
5461 5457
5462 } } // namespace v8::internal 5458 } } // namespace v8::internal
5463 5459
5464 #endif // V8_TARGET_ARCH_IA32 5460 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/code-stubs-ia32.h ('k') | src/ia32/deoptimizer-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698