| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 231 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 242 Label patch; | 242 Label patch; |
| 243 Factory* factory = masm->isolate()->factory(); | 243 Factory* factory = masm->isolate()->factory(); |
| 244 const Register argument = eax; | 244 const Register argument = eax; |
| 245 const Register map = edx; | 245 const Register map = edx; |
| 246 | 246 |
| 247 if (!types_.IsEmpty()) { | 247 if (!types_.IsEmpty()) { |
| 248 __ mov(argument, Operand(esp, 1 * kPointerSize)); | 248 __ mov(argument, Operand(esp, 1 * kPointerSize)); |
| 249 } | 249 } |
| 250 | 250 |
| 251 // undefined -> false | 251 // undefined -> false |
| 252 CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false, &patch); | 252 CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false); |
| 253 | 253 |
| 254 // Boolean -> its value | 254 // Boolean -> its value |
| 255 CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false, &patch); | 255 CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false); |
| 256 CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true, &patch); | 256 CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true); |
| 257 | 257 |
| 258 // 'null' -> false. | 258 // 'null' -> false. |
| 259 CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false, &patch); | 259 CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false); |
| 260 | 260 |
| 261 if (types_.Contains(SMI)) { | 261 if (types_.Contains(SMI)) { |
| 262 // Smis: 0 -> false, all other -> true | 262 // Smis: 0 -> false, all other -> true |
| 263 Label not_smi; | 263 Label not_smi; |
| 264 __ JumpIfNotSmi(argument, ¬_smi, Label::kNear); | 264 __ JumpIfNotSmi(argument, ¬_smi, Label::kNear); |
| 265 // argument contains the correct return value already | 265 // argument contains the correct return value already. |
| 266 if (!tos_.is(argument)) { | 266 if (!tos_.is(argument)) { |
| 267 __ mov(tos_, argument); | 267 __ mov(tos_, argument); |
| 268 } | 268 } |
| 269 __ ret(1 * kPointerSize); | 269 __ ret(1 * kPointerSize); |
| 270 __ bind(¬_smi); | 270 __ bind(¬_smi); |
| 271 } else if (types_.NeedsMap()) { | 271 } else if (types_.NeedsMap()) { |
| 272 // If we need a map later and have a Smi -> patch. | 272 // If we need a map later and have a Smi -> patch. |
| 273 __ JumpIfSmi(argument, &patch, Label::kNear); | 273 __ JumpIfSmi(argument, &patch, Label::kNear); |
| 274 } | 274 } |
| 275 | 275 |
| 276 if (types_.NeedsMap()) { | 276 if (types_.NeedsMap()) { |
| 277 __ mov(map, FieldOperand(argument, HeapObject::kMapOffset)); | 277 __ mov(map, FieldOperand(argument, HeapObject::kMapOffset)); |
| 278 | 278 |
| 279 // Everything with a map could be undetectable, so check this now. | 279 if (types_.CanBeUndetectable()) { |
| 280 __ test_b(FieldOperand(map, Map::kBitFieldOffset), | 280 __ test_b(FieldOperand(map, Map::kBitFieldOffset), |
| 281 1 << Map::kIsUndetectable); | 281 1 << Map::kIsUndetectable); |
| 282 // Undetectable -> false. | 282 // Undetectable -> false. |
| 283 Label not_undetectable; | 283 Label not_undetectable; |
| 284 __ j(zero, ¬_undetectable, Label::kNear); | 284 __ j(zero, ¬_undetectable, Label::kNear); |
| 285 __ Set(tos_, Immediate(0)); | 285 __ Set(tos_, Immediate(0)); |
| 286 __ ret(1 * kPointerSize); | 286 __ ret(1 * kPointerSize); |
| 287 __ bind(¬_undetectable); | 287 __ bind(¬_undetectable); |
| 288 } |
| 288 } | 289 } |
| 289 | 290 |
| 290 if (types_.Contains(SPEC_OBJECT)) { | 291 if (types_.Contains(SPEC_OBJECT)) { |
| 291 // spec object -> true. | 292 // spec object -> true. |
| 292 Label not_js_object; | 293 Label not_js_object; |
| 293 __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE); | 294 __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE); |
| 294 __ j(below, ¬_js_object, Label::kNear); | 295 __ j(below, ¬_js_object, Label::kNear); |
| 295 __ Set(tos_, Immediate(1)); | 296 // argument contains the correct return value already. |
| 297 if (!tos_.is(argument)) { |
| 298 __ Set(tos_, Immediate(1)); |
| 299 } |
| 296 __ ret(1 * kPointerSize); | 300 __ ret(1 * kPointerSize); |
| 297 __ bind(¬_js_object); | 301 __ bind(¬_js_object); |
| 298 } else if (types_.Contains(INTERNAL_OBJECT)) { | |
| 299 // We've seen a spec object for the first time -> patch. | |
| 300 __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE); | |
| 301 __ j(above_equal, &patch, Label::kNear); | |
| 302 } | 302 } |
| 303 | 303 |
| 304 if (types_.Contains(STRING)) { | 304 if (types_.Contains(STRING)) { |
| 305 // String value -> false iff empty. | 305 // String value -> false iff empty. |
| 306 Label not_string; | 306 Label not_string; |
| 307 __ CmpInstanceType(map, FIRST_NONSTRING_TYPE); | 307 __ CmpInstanceType(map, FIRST_NONSTRING_TYPE); |
| 308 __ j(above_equal, ¬_string, Label::kNear); | 308 __ j(above_equal, ¬_string, Label::kNear); |
| 309 __ mov(tos_, FieldOperand(argument, String::kLengthOffset)); | 309 __ mov(tos_, FieldOperand(argument, String::kLengthOffset)); |
| 310 __ ret(1 * kPointerSize); // the string length is OK as the return value | 310 __ ret(1 * kPointerSize); // the string length is OK as the return value |
| 311 __ bind(¬_string); | 311 __ bind(¬_string); |
| 312 } else if (types_.Contains(INTERNAL_OBJECT)) { | |
| 313 // We've seen a string for the first time -> patch | |
| 314 __ CmpInstanceType(map, FIRST_NONSTRING_TYPE); | |
| 315 __ j(below, &patch, Label::kNear); | |
| 316 } | 312 } |
| 317 | 313 |
| 318 if (types_.Contains(HEAP_NUMBER)) { | 314 if (types_.Contains(HEAP_NUMBER)) { |
| 319 // heap number -> false iff +0, -0, or NaN. | 315 // heap number -> false iff +0, -0, or NaN. |
| 320 Label not_heap_number, false_result; | 316 Label not_heap_number, false_result; |
| 321 __ cmp(map, factory->heap_number_map()); | 317 __ cmp(map, factory->heap_number_map()); |
| 322 __ j(not_equal, ¬_heap_number, Label::kNear); | 318 __ j(not_equal, ¬_heap_number, Label::kNear); |
| 323 __ fldz(); | 319 __ fldz(); |
| 324 __ fld_d(FieldOperand(argument, HeapNumber::kValueOffset)); | 320 __ fld_d(FieldOperand(argument, HeapNumber::kValueOffset)); |
| 325 __ FCmp(); | 321 __ FCmp(); |
| 326 __ j(zero, &false_result, Label::kNear); | 322 __ j(zero, &false_result, Label::kNear); |
| 327 __ Set(tos_, Immediate(1)); | 323 // argument contains the correct return value already. |
| 324 if (!tos_.is(argument)) { |
| 325 __ Set(tos_, Immediate(1)); |
| 326 } |
| 328 __ ret(1 * kPointerSize); | 327 __ ret(1 * kPointerSize); |
| 329 __ bind(&false_result); | 328 __ bind(&false_result); |
| 330 __ Set(tos_, Immediate(0)); | 329 __ Set(tos_, Immediate(0)); |
| 331 __ ret(1 * kPointerSize); | 330 __ ret(1 * kPointerSize); |
| 332 __ bind(¬_heap_number); | 331 __ bind(¬_heap_number); |
| 333 } else if (types_.Contains(INTERNAL_OBJECT)) { | |
| 334 // We've seen a heap number for the first time -> patch | |
| 335 __ cmp(map, factory->heap_number_map()); | |
| 336 __ j(equal, &patch, Label::kNear); | |
| 337 } | 332 } |
| 338 | 333 |
| 339 if (types_.Contains(INTERNAL_OBJECT)) { | 334 __ bind(&patch); |
| 340 // internal objects -> true | 335 GenerateTypeTransition(masm); |
| 341 __ Set(tos_, Immediate(1)); | |
| 342 __ ret(1 * kPointerSize); | |
| 343 } | |
| 344 | |
| 345 if (!types_.IsAll()) { | |
| 346 __ bind(&patch); | |
| 347 GenerateTypeTransition(masm); | |
| 348 } | |
| 349 } | 336 } |
| 350 | 337 |
| 351 | 338 |
| 352 void ToBooleanStub::CheckOddball(MacroAssembler* masm, | 339 void ToBooleanStub::CheckOddball(MacroAssembler* masm, |
| 353 Type type, | 340 Type type, |
| 354 Heap::RootListIndex value, | 341 Heap::RootListIndex value, |
| 355 bool result, | 342 bool result) { |
| 356 Label* patch) { | |
| 357 const Register argument = eax; | 343 const Register argument = eax; |
| 358 if (types_.Contains(type)) { | 344 if (types_.Contains(type)) { |
| 359 // If we see an expected oddball, return its ToBoolean value tos_. | 345 // If we see an expected oddball, return its ToBoolean value tos_. |
| 360 Label different_value; | 346 Label different_value; |
| 361 __ CompareRoot(argument, value); | 347 __ CompareRoot(argument, value); |
| 362 __ j(not_equal, &different_value, Label::kNear); | 348 __ j(not_equal, &different_value, Label::kNear); |
| 363 __ Set(tos_, Immediate(result ? 1 : 0)); | 349 if (!result) { |
| 350 // If we have to return zero, there is no way around clearing tos_. |
| 351 __ Set(tos_, Immediate(0)); |
| 352 } else if (!tos_.is(argument)) { |
| 353 // If we have to return non-zero, we can re-use the argument if it is the |
| 354 // same register as the result, because we never see Smi-zero here. |
| 355 __ Set(tos_, Immediate(1)); |
| 356 } |
| 364 __ ret(1 * kPointerSize); | 357 __ ret(1 * kPointerSize); |
| 365 __ bind(&different_value); | 358 __ bind(&different_value); |
| 366 } else if (types_.Contains(INTERNAL_OBJECT)) { | |
| 367 // If we see an unexpected oddball and handle internal objects, we must | |
| 368 // patch because the code for internal objects doesn't handle it explictly. | |
| 369 __ CompareRoot(argument, value); | |
| 370 __ j(equal, patch); | |
| 371 } | 359 } |
| 372 } | 360 } |
| 373 | 361 |
| 374 | 362 |
| 375 void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) { | 363 void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) { |
| 376 __ pop(ecx); // Get return address, operand is now on top of stack. | 364 __ pop(ecx); // Get return address, operand is now on top of stack. |
| 377 __ push(Immediate(Smi::FromInt(tos_.code()))); | 365 __ push(Immediate(Smi::FromInt(tos_.code()))); |
| 378 __ push(Immediate(Smi::FromInt(types_.ToByte()))); | 366 __ push(Immediate(Smi::FromInt(types_.ToByte()))); |
| 379 __ push(ecx); // Push return address. | 367 __ push(ecx); // Push return address. |
| 380 // Patch the caller to an appropriate specialized stub and return the | 368 // Patch the caller to an appropriate specialized stub and return the |
| (...skipping 5996 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6377 __ Drop(1); | 6365 __ Drop(1); |
| 6378 __ ret(2 * kPointerSize); | 6366 __ ret(2 * kPointerSize); |
| 6379 } | 6367 } |
| 6380 | 6368 |
| 6381 | 6369 |
| 6382 #undef __ | 6370 #undef __ |
| 6383 | 6371 |
| 6384 } } // namespace v8::internal | 6372 } } // namespace v8::internal |
| 6385 | 6373 |
| 6386 #endif // V8_TARGET_ARCH_IA32 | 6374 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |