OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 241 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
252 CheckOddball(masm, UNDEFINED, factory->undefined_value(), false, &patch); | 252 CheckOddball(masm, UNDEFINED, factory->undefined_value(), false, &patch); |
253 | 253 |
254 // Boolean -> its value | 254 // Boolean -> its value |
255 CheckOddball(masm, BOOLEAN, factory->false_value(), false, &patch); | 255 CheckOddball(masm, BOOLEAN, factory->false_value(), false, &patch); |
256 CheckOddball(masm, BOOLEAN, factory->true_value(), true, &patch); | 256 CheckOddball(masm, BOOLEAN, factory->true_value(), true, &patch); |
257 | 257 |
258 // 'null' -> false. | 258 // 'null' -> false. |
259 CheckOddball(masm, NULL_TYPE, factory->null_value(), false, &patch); | 259 CheckOddball(masm, NULL_TYPE, factory->null_value(), false, &patch); |
260 | 260 |
261 bool need_map = | 261 bool need_map = |
262 types_.Contains(UNDETECTABLE) | | |
263 types_.Contains(SPEC_OBJECT) | | 262 types_.Contains(SPEC_OBJECT) | |
264 types_.Contains(STRING) | | 263 types_.Contains(STRING) | |
265 types_.Contains(HEAP_NUMBER) | | 264 types_.Contains(HEAP_NUMBER) | |
266 types_.Contains(INTERNAL_OBJECT); | 265 types_.Contains(INTERNAL_OBJECT); |
267 | 266 |
268 if (types_.Contains(SMI)) { | 267 if (types_.Contains(SMI)) { |
269 // Smis: 0 -> false, all other -> true | 268 // Smis: 0 -> false, all other -> true |
270 Label not_smi; | 269 Label not_smi; |
271 __ JumpIfNotSmi(argument, ¬_smi, Label::kNear); | 270 __ JumpIfNotSmi(argument, ¬_smi, Label::kNear); |
272 // argument contains the correct return value already | 271 // argument contains the correct return value already |
273 if (!tos_.is(argument)) { | 272 if (!tos_.is(argument)) { |
274 __ mov(tos_, argument); | 273 __ mov(tos_, argument); |
275 } | 274 } |
276 __ ret(1 * kPointerSize); | 275 __ ret(1 * kPointerSize); |
277 __ bind(¬_smi); | 276 __ bind(¬_smi); |
278 } else if (need_map) { | 277 } else if (need_map) { |
279 // If we need a map later and have a Smi -> patch. | 278 // If we need a map later and have a Smi -> patch. |
280 __ JumpIfSmi(argument, &patch, Label::kNear); | 279 __ JumpIfSmi(argument, &patch, Label::kNear); |
281 } | 280 } |
282 | 281 |
283 if (need_map) { | 282 if (need_map) { |
284 __ mov(map, FieldOperand(argument, HeapObject::kMapOffset)); | 283 __ mov(map, FieldOperand(argument, HeapObject::kMapOffset)); |
285 | 284 |
286 // Everything with a map could be undetectable, so check this now. | 285 // Everything with a map could be undetectable, so check this now. |
287 __ test_b(FieldOperand(map, Map::kBitFieldOffset), | 286 __ test_b(FieldOperand(map, Map::kBitFieldOffset), |
288 1 << Map::kIsUndetectable); | 287 1 << Map::kIsUndetectable); |
289 if (types_.Contains(UNDETECTABLE)) { | 288 // Undetectable -> false. |
290 // Undetectable -> false. | 289 Label not_undetectable; |
291 Label not_undetectable; | 290 __ j(zero, ¬_undetectable, Label::kNear); |
292 __ j(zero, ¬_undetectable, Label::kNear); | 291 __ Set(tos_, Immediate(0)); |
293 __ Set(tos_, Immediate(0)); | 292 __ ret(1 * kPointerSize); |
294 __ ret(1 * kPointerSize); | 293 __ bind(¬_undetectable); |
295 __ bind(¬_undetectable); | |
296 } else { | |
297 // We've seen an undetectable value for the first time -> patch. | |
298 __ j(not_zero, &patch, Label::kNear); | |
299 } | |
300 } | 294 } |
301 | 295 |
302 if (types_.Contains(SPEC_OBJECT)) { | 296 if (types_.Contains(SPEC_OBJECT)) { |
303 // spec object -> true. | 297 // spec object -> true. |
304 Label not_js_object; | 298 Label not_js_object; |
305 __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE); | 299 __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE); |
306 __ j(below, ¬_js_object, Label::kNear); | 300 __ j(below, ¬_js_object, Label::kNear); |
307 __ Set(tos_, Immediate(1)); | 301 __ Set(tos_, Immediate(1)); |
308 __ ret(1 * kPointerSize); | 302 __ ret(1 * kPointerSize); |
309 __ bind(¬_js_object); | 303 __ bind(¬_js_object); |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
378 // patch because the code for internal objects doesn't handle it explictly. | 372 // patch because the code for internal objects doesn't handle it explictly. |
379 __ cmp(argument, value); | 373 __ cmp(argument, value); |
380 __ j(equal, patch); | 374 __ j(equal, patch); |
381 } | 375 } |
382 } | 376 } |
383 | 377 |
384 | 378 |
385 void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) { | 379 void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) { |
386 __ pop(ecx); // Get return address, operand is now on top of stack. | 380 __ pop(ecx); // Get return address, operand is now on top of stack. |
387 __ push(Immediate(Smi::FromInt(tos_.code()))); | 381 __ push(Immediate(Smi::FromInt(tos_.code()))); |
388 __ push(Immediate(Smi::FromInt(types_.ToInt()))); | 382 __ push(Immediate(Smi::FromInt(types_.ToByte()))); |
389 __ push(ecx); // Push return address. | 383 __ push(ecx); // Push return address. |
390 // Patch the caller to an appropriate specialized stub and return the | 384 // Patch the caller to an appropriate specialized stub and return the |
391 // operation result to the caller of the stub. | 385 // operation result to the caller of the stub. |
392 __ TailCallExternalReference( | 386 __ TailCallExternalReference( |
393 ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()), | 387 ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()), |
394 3, | 388 3, |
395 1); | 389 1); |
396 } | 390 } |
397 | 391 |
398 | 392 |
(...skipping 5988 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6387 __ Drop(1); | 6381 __ Drop(1); |
6388 __ ret(2 * kPointerSize); | 6382 __ ret(2 * kPointerSize); |
6389 } | 6383 } |
6390 | 6384 |
6391 | 6385 |
6392 #undef __ | 6386 #undef __ |
6393 | 6387 |
6394 } } // namespace v8::internal | 6388 } } // namespace v8::internal |
6395 | 6389 |
6396 #endif // V8_TARGET_ARCH_IA32 | 6390 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |