| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 231 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 242 Label patch; | 242 Label patch; |
| 243 Factory* factory = masm->isolate()->factory(); | 243 Factory* factory = masm->isolate()->factory(); |
| 244 const Register argument = eax; | 244 const Register argument = eax; |
| 245 const Register map = edx; | 245 const Register map = edx; |
| 246 | 246 |
| 247 if (!types_.IsEmpty()) { | 247 if (!types_.IsEmpty()) { |
| 248 __ mov(argument, Operand(esp, 1 * kPointerSize)); | 248 __ mov(argument, Operand(esp, 1 * kPointerSize)); |
| 249 } | 249 } |
| 250 | 250 |
| 251 // undefined -> false | 251 // undefined -> false |
| 252 CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false, &patch); | 252 CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false); |
| 253 | 253 |
| 254 // Boolean -> its value | 254 // Boolean -> its value |
| 255 CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false, &patch); | 255 CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false); |
| 256 CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true, &patch); | 256 CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true); |
| 257 | 257 |
| 258 // 'null' -> false. | 258 // 'null' -> false. |
| 259 CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false, &patch); | 259 CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false); |
| 260 | 260 |
| 261 if (types_.Contains(SMI)) { | 261 if (types_.Contains(SMI)) { |
| 262 // Smis: 0 -> false, all other -> true | 262 // Smis: 0 -> false, all other -> true |
| 263 Label not_smi; | 263 Label not_smi; |
| 264 __ JumpIfNotSmi(argument, ¬_smi, Label::kNear); | 264 __ JumpIfNotSmi(argument, ¬_smi, Label::kNear); |
| 265 // argument contains the correct return value already | 265 // argument contains the correct return value already. |
| 266 if (!tos_.is(argument)) { | 266 if (!tos_.is(argument)) { |
| 267 __ mov(tos_, argument); | 267 __ mov(tos_, argument); |
| 268 } | 268 } |
| 269 __ ret(1 * kPointerSize); | 269 __ ret(1 * kPointerSize); |
| 270 __ bind(¬_smi); | 270 __ bind(¬_smi); |
| 271 } else if (types_.NeedsMap()) { | 271 } else if (types_.NeedsMap()) { |
| 272 // If we need a map later and have a Smi -> patch. | 272 // If we need a map later and have a Smi -> patch. |
| 273 __ JumpIfSmi(argument, &patch, Label::kNear); | 273 __ JumpIfSmi(argument, &patch, Label::kNear); |
| 274 } | 274 } |
| 275 | 275 |
| 276 if (types_.NeedsMap()) { | 276 if (types_.NeedsMap()) { |
| 277 __ mov(map, FieldOperand(argument, HeapObject::kMapOffset)); | 277 __ mov(map, FieldOperand(argument, HeapObject::kMapOffset)); |
| 278 | 278 |
| 279 // Everything with a map could be undetectable, so check this now. | 279 if (types_.CanBeUndetectable()) { |
| 280 __ test_b(FieldOperand(map, Map::kBitFieldOffset), | 280 __ test_b(FieldOperand(map, Map::kBitFieldOffset), |
| 281 1 << Map::kIsUndetectable); | 281 1 << Map::kIsUndetectable); |
| 282 // Undetectable -> false. | 282 // Undetectable -> false. |
| 283 Label not_undetectable; | 283 Label not_undetectable; |
| 284 __ j(zero, ¬_undetectable, Label::kNear); | 284 __ j(zero, ¬_undetectable, Label::kNear); |
| 285 __ Set(tos_, Immediate(0)); | 285 __ Set(tos_, Immediate(0)); |
| 286 __ ret(1 * kPointerSize); | 286 __ ret(1 * kPointerSize); |
| 287 __ bind(¬_undetectable); | 287 __ bind(¬_undetectable); |
| 288 } |
| 288 } | 289 } |
| 289 | 290 |
| 290 if (types_.Contains(SPEC_OBJECT)) { | 291 if (types_.Contains(SPEC_OBJECT)) { |
| 291 // spec object -> true. | 292 // spec object -> true. |
| 292 Label not_js_object; | 293 Label not_js_object; |
| 293 __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE); | 294 __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE); |
| 294 __ j(below, ¬_js_object, Label::kNear); | 295 __ j(below, ¬_js_object, Label::kNear); |
| 295 __ Set(tos_, Immediate(1)); | 296 // argument contains the correct return value already. |
| 297 if (!tos_.is(argument)) { |
| 298 __ Set(tos_, Immediate(1)); |
| 299 } |
| 296 __ ret(1 * kPointerSize); | 300 __ ret(1 * kPointerSize); |
| 297 __ bind(¬_js_object); | 301 __ bind(¬_js_object); |
| 298 } else if (types_.Contains(INTERNAL_OBJECT)) { | |
| 299 // We've seen a spec object for the first time -> patch. | |
| 300 __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE); | |
| 301 __ j(above_equal, &patch, Label::kNear); | |
| 302 } | 302 } |
| 303 | 303 |
| 304 if (types_.Contains(STRING)) { | 304 if (types_.Contains(STRING)) { |
| 305 // String value -> false iff empty. | 305 // String value -> false iff empty. |
| 306 Label not_string; | 306 Label not_string; |
| 307 __ CmpInstanceType(map, FIRST_NONSTRING_TYPE); | 307 __ CmpInstanceType(map, FIRST_NONSTRING_TYPE); |
| 308 __ j(above_equal, ¬_string, Label::kNear); | 308 __ j(above_equal, ¬_string, Label::kNear); |
| 309 __ mov(tos_, FieldOperand(argument, String::kLengthOffset)); | 309 __ mov(tos_, FieldOperand(argument, String::kLengthOffset)); |
| 310 __ ret(1 * kPointerSize); // the string length is OK as the return value | 310 __ ret(1 * kPointerSize); // the string length is OK as the return value |
| 311 __ bind(¬_string); | 311 __ bind(¬_string); |
| 312 } else if (types_.Contains(INTERNAL_OBJECT)) { | |
| 313 // We've seen a string for the first time -> patch | |
| 314 __ CmpInstanceType(map, FIRST_NONSTRING_TYPE); | |
| 315 __ j(below, &patch, Label::kNear); | |
| 316 } | 312 } |
| 317 | 313 |
| 318 if (types_.Contains(HEAP_NUMBER)) { | 314 if (types_.Contains(HEAP_NUMBER)) { |
| 319 // heap number -> false iff +0, -0, or NaN. | 315 // heap number -> false iff +0, -0, or NaN. |
| 320 Label not_heap_number, false_result; | 316 Label not_heap_number, false_result; |
| 321 __ cmp(map, factory->heap_number_map()); | 317 __ cmp(map, factory->heap_number_map()); |
| 322 __ j(not_equal, ¬_heap_number, Label::kNear); | 318 __ j(not_equal, ¬_heap_number, Label::kNear); |
| 323 __ fldz(); | 319 __ fldz(); |
| 324 __ fld_d(FieldOperand(argument, HeapNumber::kValueOffset)); | 320 __ fld_d(FieldOperand(argument, HeapNumber::kValueOffset)); |
| 325 __ FCmp(); | 321 __ FCmp(); |
| 326 __ j(zero, &false_result, Label::kNear); | 322 __ j(zero, &false_result, Label::kNear); |
| 327 __ Set(tos_, Immediate(1)); | 323 // argument contains the correct return value already. |
| 324 if (!tos_.is(argument)) { |
| 325 __ Set(tos_, Immediate(1)); |
| 326 } |
| 328 __ ret(1 * kPointerSize); | 327 __ ret(1 * kPointerSize); |
| 329 __ bind(&false_result); | 328 __ bind(&false_result); |
| 330 __ Set(tos_, Immediate(0)); | 329 __ Set(tos_, Immediate(0)); |
| 331 __ ret(1 * kPointerSize); | 330 __ ret(1 * kPointerSize); |
| 332 __ bind(¬_heap_number); | 331 __ bind(¬_heap_number); |
| 333 } else if (types_.Contains(INTERNAL_OBJECT)) { | |
| 334 // We've seen a heap number for the first time -> patch | |
| 335 __ cmp(map, factory->heap_number_map()); | |
| 336 __ j(equal, &patch, Label::kNear); | |
| 337 } | 332 } |
| 338 | 333 |
| 339 if (types_.Contains(INTERNAL_OBJECT)) { | 334 __ bind(&patch); |
| 340 // internal objects -> true | 335 GenerateTypeTransition(masm); |
| 341 __ Set(tos_, Immediate(1)); | |
| 342 __ ret(1 * kPointerSize); | |
| 343 } | |
| 344 | |
| 345 if (!types_.IsAll()) { | |
| 346 __ bind(&patch); | |
| 347 GenerateTypeTransition(masm); | |
| 348 } | |
| 349 } | 336 } |
| 350 | 337 |
| 351 | 338 |
| 352 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { | 339 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { |
| 353 // We don't allow a GC during a store buffer overflow so there is no need to | 340 // We don't allow a GC during a store buffer overflow so there is no need to |
| 354 // store the registers in any particular way, but we do have to store and | 341 // store the registers in any particular way, but we do have to store and |
| 355 // restore them. | 342 // restore them. |
| 356 __ pushad(); | 343 __ pushad(); |
| 357 if (save_doubles_ == kSaveFPRegs) { | 344 if (save_doubles_ == kSaveFPRegs) { |
| 358 CpuFeatures::Scope scope(SSE2); | 345 CpuFeatures::Scope scope(SSE2); |
| (...skipping 19 matching lines...) Expand all Loading... |
| 378 __ add(Operand(esp), Immediate(kDoubleSize * XMMRegister::kNumRegisters)); | 365 __ add(Operand(esp), Immediate(kDoubleSize * XMMRegister::kNumRegisters)); |
| 379 } | 366 } |
| 380 __ popad(); | 367 __ popad(); |
| 381 __ ret(0); | 368 __ ret(0); |
| 382 } | 369 } |
| 383 | 370 |
| 384 | 371 |
| 385 void ToBooleanStub::CheckOddball(MacroAssembler* masm, | 372 void ToBooleanStub::CheckOddball(MacroAssembler* masm, |
| 386 Type type, | 373 Type type, |
| 387 Heap::RootListIndex value, | 374 Heap::RootListIndex value, |
| 388 bool result, | 375 bool result) { |
| 389 Label* patch) { | |
| 390 const Register argument = eax; | 376 const Register argument = eax; |
| 391 if (types_.Contains(type)) { | 377 if (types_.Contains(type)) { |
| 392 // If we see an expected oddball, return its ToBoolean value tos_. | 378 // If we see an expected oddball, return its ToBoolean value tos_. |
| 393 Label different_value; | 379 Label different_value; |
| 394 __ CompareRoot(argument, value); | 380 __ CompareRoot(argument, value); |
| 395 __ j(not_equal, &different_value, Label::kNear); | 381 __ j(not_equal, &different_value, Label::kNear); |
| 396 __ Set(tos_, Immediate(result ? 1 : 0)); | 382 if (!result) { |
| 383 // If we have to return zero, there is no way around clearing tos_. |
| 384 __ Set(tos_, Immediate(0)); |
| 385 } else if (!tos_.is(argument)) { |
| 386 // If we have to return non-zero, we can re-use the argument if it is the |
| 387 // same register as the result, because we never see Smi-zero here. |
| 388 __ Set(tos_, Immediate(1)); |
| 389 } |
| 397 __ ret(1 * kPointerSize); | 390 __ ret(1 * kPointerSize); |
| 398 __ bind(&different_value); | 391 __ bind(&different_value); |
| 399 } else if (types_.Contains(INTERNAL_OBJECT)) { | |
| 400 // If we see an unexpected oddball and handle internal objects, we must | |
| 401 // patch because the code for internal objects doesn't handle it explictly. | |
| 402 __ CompareRoot(argument, value); | |
| 403 __ j(equal, patch); | |
| 404 } | 392 } |
| 405 } | 393 } |
| 406 | 394 |
| 407 | 395 |
| 408 void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) { | 396 void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) { |
| 409 __ pop(ecx); // Get return address, operand is now on top of stack. | 397 __ pop(ecx); // Get return address, operand is now on top of stack. |
| 410 __ push(Immediate(Smi::FromInt(tos_.code()))); | 398 __ push(Immediate(Smi::FromInt(tos_.code()))); |
| 411 __ push(Immediate(Smi::FromInt(types_.ToByte()))); | 399 __ push(Immediate(Smi::FromInt(types_.ToByte()))); |
| 412 __ push(ecx); // Push return address. | 400 __ push(ecx); // Push return address. |
| 413 // Patch the caller to an appropriate specialized stub and return the | 401 // Patch the caller to an appropriate specialized stub and return the |
| (...skipping 6185 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6599 | 6587 |
| 6600 // Fall through when we need to inform the incremental marker. | 6588 // Fall through when we need to inform the incremental marker. |
| 6601 } | 6589 } |
| 6602 | 6590 |
| 6603 | 6591 |
| 6604 #undef __ | 6592 #undef __ |
| 6605 | 6593 |
| 6606 } } // namespace v8::internal | 6594 } } // namespace v8::internal |
| 6607 | 6595 |
| 6608 #endif // V8_TARGET_ARCH_IA32 | 6596 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |