Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(3)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 7497063: Simplify and optimize ToBoolean handling. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 9 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 224 matching lines...) Expand 10 before | Expand all | Expand 10 after
235 void ToBooleanStub::Generate(MacroAssembler* masm) { 235 void ToBooleanStub::Generate(MacroAssembler* masm) {
236 Label patch; 236 Label patch;
237 const Register argument = rax; 237 const Register argument = rax;
238 const Register map = rdx; 238 const Register map = rdx;
239 239
240 if (!types_.IsEmpty()) { 240 if (!types_.IsEmpty()) {
241 __ movq(argument, Operand(rsp, 1 * kPointerSize)); 241 __ movq(argument, Operand(rsp, 1 * kPointerSize));
242 } 242 }
243 243
244 // undefined -> false 244 // undefined -> false
245 CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false, &patch); 245 CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false);
246 246
247 // Boolean -> its value 247 // Boolean -> its value
248 CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false, &patch); 248 CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false);
249 CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true, &patch); 249 CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true);
250 250
251 // 'null' -> false. 251 // 'null' -> false.
252 CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false, &patch); 252 CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false);
253 253
254 if (types_.Contains(SMI)) { 254 if (types_.Contains(SMI)) {
255 // Smis: 0 -> false, all other -> true 255 // Smis: 0 -> false, all other -> true
256 Label not_smi; 256 Label not_smi;
257 __ JumpIfNotSmi(argument, &not_smi, Label::kNear); 257 __ JumpIfNotSmi(argument, &not_smi, Label::kNear);
258 // argument contains the correct return value already 258 // argument contains the correct return value already
259 if (!tos_.is(argument)) { 259 if (!tos_.is(argument)) {
260 __ movq(tos_, argument); 260 __ movq(tos_, argument);
261 } 261 }
262 __ ret(1 * kPointerSize); 262 __ ret(1 * kPointerSize);
263 __ bind(&not_smi); 263 __ bind(&not_smi);
264 } else if (types_.NeedsMap()) { 264 } else if (types_.NeedsMap()) {
265 // If we need a map later and have a Smi -> patch. 265 // If we need a map later and have a Smi -> patch.
266 __ JumpIfSmi(argument, &patch, Label::kNear); 266 __ JumpIfSmi(argument, &patch, Label::kNear);
267 } 267 }
268 268
269 if (types_.NeedsMap()) { 269 if (types_.NeedsMap()) {
270 __ movq(map, FieldOperand(argument, HeapObject::kMapOffset)); 270 __ movq(map, FieldOperand(argument, HeapObject::kMapOffset));
271 271
272 // Everything with a map could be undetectable, so check this now. 272 if (types_.CanBeUndetectable()) {
273 __ testb(FieldOperand(map, Map::kBitFieldOffset), 273 __ testb(FieldOperand(map, Map::kBitFieldOffset),
274 Immediate(1 << Map::kIsUndetectable)); 274 Immediate(1 << Map::kIsUndetectable));
275 // Undetectable -> false. 275 // Undetectable -> false.
276 Label not_undetectable; 276 Label not_undetectable;
277 __ j(zero, &not_undetectable, Label::kNear); 277 __ j(zero, &not_undetectable, Label::kNear);
278 __ Set(tos_, 0); 278 __ Set(tos_, 0);
279 __ ret(1 * kPointerSize); 279 __ ret(1 * kPointerSize);
280 __ bind(&not_undetectable); 280 __ bind(&not_undetectable);
281 }
281 } 282 }
282 283
283 if (types_.Contains(SPEC_OBJECT)) { 284 if (types_.Contains(SPEC_OBJECT)) {
284 // spec object -> true. 285 // spec object -> true.
285 Label not_js_object; 286 Label not_js_object;
286 __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE); 287 __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
287 __ j(below, &not_js_object, Label::kNear); 288 __ j(below, &not_js_object, Label::kNear);
288 __ Set(tos_, 1); 289 // argument contains the correct return value already.
290 if (!tos_.is(argument)) {
291 __ Set(tos_, 1);
292 }
289 __ ret(1 * kPointerSize); 293 __ ret(1 * kPointerSize);
290 __ bind(&not_js_object); 294 __ bind(&not_js_object);
291 } else if (types_.Contains(INTERNAL_OBJECT)) {
292 // We've seen a spec object for the first time -> patch.
293 __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
294 __ j(above_equal, &patch, Label::kNear);
295 } 295 }
296 296
297 if (types_.Contains(STRING)) { 297 if (types_.Contains(STRING)) {
298 // String value -> false iff empty. 298 // String value -> false iff empty.
299 Label not_string; 299 Label not_string;
300 __ CmpInstanceType(map, FIRST_NONSTRING_TYPE); 300 __ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
301 __ j(above_equal, &not_string, Label::kNear); 301 __ j(above_equal, &not_string, Label::kNear);
302 __ movq(tos_, FieldOperand(argument, String::kLengthOffset)); 302 __ movq(tos_, FieldOperand(argument, String::kLengthOffset));
303 __ ret(1 * kPointerSize); // the string length is OK as the return value 303 __ ret(1 * kPointerSize); // the string length is OK as the return value
304 __ bind(&not_string); 304 __ bind(&not_string);
305 } else if (types_.Contains(INTERNAL_OBJECT)) {
306 // We've seen a string for the first time -> patch
307 __ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
308 __ j(below, &patch, Label::kNear);
309 } 305 }
310 306
311 if (types_.Contains(HEAP_NUMBER)) { 307 if (types_.Contains(HEAP_NUMBER)) {
312 // heap number -> false iff +0, -0, or NaN. 308 // heap number -> false iff +0, -0, or NaN.
313 Label not_heap_number, false_result; 309 Label not_heap_number, false_result;
314 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); 310 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
315 __ j(not_equal, &not_heap_number, Label::kNear); 311 __ j(not_equal, &not_heap_number, Label::kNear);
316 __ xorps(xmm0, xmm0); 312 __ xorps(xmm0, xmm0);
317 __ ucomisd(xmm0, FieldOperand(argument, HeapNumber::kValueOffset)); 313 __ ucomisd(xmm0, FieldOperand(argument, HeapNumber::kValueOffset));
318 __ j(zero, &false_result, Label::kNear); 314 __ j(zero, &false_result, Label::kNear);
319 __ Set(tos_, 1); 315 // argument contains the correct return value already.
316 if (!tos_.is(argument)) {
317 __ Set(tos_, 1);
318 }
320 __ ret(1 * kPointerSize); 319 __ ret(1 * kPointerSize);
321 __ bind(&false_result); 320 __ bind(&false_result);
322 __ Set(tos_, 0); 321 __ Set(tos_, 0);
323 __ ret(1 * kPointerSize); 322 __ ret(1 * kPointerSize);
324 __ bind(&not_heap_number); 323 __ bind(&not_heap_number);
325 } else if (types_.Contains(INTERNAL_OBJECT)) {
326 // We've seen a heap number for the first time -> patch
327 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
328 __ j(equal, &patch, Label::kNear);
329 } 324 }
330 325
331 if (types_.Contains(INTERNAL_OBJECT)) { 326 __ bind(&patch);
332 // internal objects -> true 327 GenerateTypeTransition(masm);
333 __ Set(tos_, 1);
334 __ ret(1 * kPointerSize);
335 }
336
337 if (!types_.IsAll()) {
338 __ bind(&patch);
339 GenerateTypeTransition(masm);
340 }
341 } 328 }
342 329
343 330
344 void ToBooleanStub::CheckOddball(MacroAssembler* masm, 331 void ToBooleanStub::CheckOddball(MacroAssembler* masm,
345 Type type, 332 Type type,
346 Heap::RootListIndex value, 333 Heap::RootListIndex value,
347 bool result, 334 bool result) {
348 Label* patch) {
349 const Register argument = rax; 335 const Register argument = rax;
350 if (types_.Contains(type)) { 336 if (types_.Contains(type)) {
351 // If we see an expected oddball, return its ToBoolean value tos_. 337 // If we see an expected oddball, return its ToBoolean value tos_.
352 Label different_value; 338 Label different_value;
353 __ CompareRoot(argument, value); 339 __ CompareRoot(argument, value);
354 __ j(not_equal, &different_value, Label::kNear); 340 __ j(not_equal, &different_value, Label::kNear);
355 __ Set(tos_, result ? 1 : 0); 341 if (!result) {
342 // If we have to return zero, there is no way around clearing tos_.
343 __ Set(tos_, 0);
344 } else if (!tos_.is(argument)) {
345 // If we have to return non-zero, we can re-use the argument if it is the
346 // same register as the result, because we never see Smi-zero here.
347 __ Set(tos_, 1);
348 }
356 __ ret(1 * kPointerSize); 349 __ ret(1 * kPointerSize);
357 __ bind(&different_value); 350 __ bind(&different_value);
358 } else if (types_.Contains(INTERNAL_OBJECT)) {
359 // If we see an unexpected oddball and handle internal objects, we must
360 // patch because the code for internal objects doesn't handle it explictly.
361 __ CompareRoot(argument, value);
362 __ j(equal, patch);
363 } 351 }
364 } 352 }
365 353
366 354
367 void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) { 355 void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) {
368 __ pop(rcx); // Get return address, operand is now on top of stack. 356 __ pop(rcx); // Get return address, operand is now on top of stack.
369 __ Push(Smi::FromInt(tos_.code())); 357 __ Push(Smi::FromInt(tos_.code()));
370 __ Push(Smi::FromInt(types_.ToByte())); 358 __ Push(Smi::FromInt(types_.ToByte()));
371 __ push(rcx); // Push return address. 359 __ push(rcx); // Push return address.
372 // Patch the caller to an appropriate specialized stub and return the 360 // Patch the caller to an appropriate specialized stub and return the
(...skipping 4993 matching lines...) Expand 10 before | Expand all | Expand 10 after
5366 __ Drop(1); 5354 __ Drop(1);
5367 __ ret(2 * kPointerSize); 5355 __ ret(2 * kPointerSize);
5368 } 5356 }
5369 5357
5370 5358
5371 #undef __ 5359 #undef __
5372 5360
5373 } } // namespace v8::internal 5361 } } // namespace v8::internal
5374 5362
5375 #endif // V8_TARGET_ARCH_X64 5363 #endif // V8_TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698